diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index fe9948b5a1df..d1cca1c162b4 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,7 +3,7 @@ "features": { "ghcr.io/devcontainers/features/node:1": {}, "ghcr.io/devcontainers/features/dotnet:1": { - "version": "7" + "version": "8" }, "ghcr.io/jlaundry/devcontainer-features/azure-functions-core-tools:1": {} }, diff --git a/.editorconfig b/.editorconfig index e695f4b51104..487b3056ebe7 100644 --- a/.editorconfig +++ b/.editorconfig @@ -150,7 +150,6 @@ dotnet_diagnostic.IDE0071.severity = warning # Simplify interpolation dotnet_diagnostic.IDE0073.severity = warning # Require file header dotnet_diagnostic.IDE0082.severity = warning # Convert typeof to nameof dotnet_diagnostic.IDE0090.severity = warning # Simplify new expression -dotnet_diagnostic.IDE0130.severity = warning # Namespace does not match folder structure dotnet_diagnostic.IDE0161.severity = warning # Use file-scoped namespace # Suppressed diagnostics @@ -159,6 +158,7 @@ dotnet_diagnostic.CA1032.severity = none # We're using RCS1194 which seems to co dotnet_diagnostic.CA1034.severity = none # Do not nest type. Alternatively, change its accessibility so that it is not externally visible dotnet_diagnostic.CA1062.severity = none # Disable null check, C# already does it for us dotnet_diagnostic.CA1303.severity = none # Do not pass literals as localized parameters +dotnet_diagnostic.CA1508.severity = none # Avoid dead conditional code. Too many false positives. dotnet_diagnostic.CA1510.severity = none dotnet_diagnostic.CA1805.severity = none # Member is explicitly initialized to its default value dotnet_diagnostic.CA1822.severity = none # Member does not access instance data and can be marked as static @@ -220,6 +220,7 @@ dotnet_diagnostic.IDE0079.severity = none # Remove unnecessary suppression. dotnet_diagnostic.IDE0080.severity = none # Remove unnecessary suppression operator. dotnet_diagnostic.IDE0100.severity = none # Remove unnecessary equality operator dotnet_diagnostic.IDE0110.severity = none # Remove unnecessary discards +dotnet_diagnostic.IDE0130.severity = none # Namespace does not match folder structure dotnet_diagnostic.IDE0032.severity = none # Use auto property dotnet_diagnostic.IDE0160.severity = none # Use block-scoped namespace dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/.github/_typos.toml b/.github/_typos.toml index 08d93b234795..6e3594ae70fa 100644 --- a/.github/_typos.toml +++ b/.github/_typos.toml @@ -18,6 +18,7 @@ extend-exclude = [ [default.extend-words] ACI = "ACI" # Azure Container Instance +exercize = "exercize" #test typos [default.extend-identifiers] ags = "ags" # Azure Graph Service diff --git a/.github/workflows/check-coverage.ps1 b/.github/workflows/check-coverage.ps1 new file mode 100644 index 000000000000..39fec480cb40 --- /dev/null +++ b/.github/workflows/check-coverage.ps1 @@ -0,0 +1,54 @@ +param ( + [string]$JsonReportPath, + [double]$CoverageThreshold +) + +$jsonContent = Get-Content $JsonReportPath -Raw | ConvertFrom-Json +$coverageBelowThreshold = $false + +function Get-FormattedValue($number) { + $formattedNumber = "{0:N1}" -f $number + $icon = if ($number -ge $CoverageThreshold) { '✅' } else { '❌' } + + return "$formattedNumber% $icon" +} + +$lineCoverage = $jsonContent.summary.linecoverage +$branchCoverage = $jsonContent.summary.branchcoverage + +if ($lineCoverage -lt $CoverageThreshold -or $branchCoverage -lt $CoverageThreshold) { + $coverageBelowThreshold = $true +} + +$totalTableData = [PSCustomObject]@{ + 'Metric' = 'Total Coverage' + 'Line Coverage' = Get-FormattedValue $lineCoverage + 'Branch Coverage' = Get-FormattedValue $branchCoverage +} + +$totalTableData | Format-Table -AutoSize + +$assemblyTableData = @() + +foreach ($assembly in $jsonContent.coverage.assemblies) { + $assemblyName = $assembly.name + $assemblyLineCoverage = $assembly.coverage + $assemblyBranchCoverage = $assembly.branchcoverage + + if ($assemblyLineCoverage -lt $CoverageThreshold -or $assemblyBranchCoverage -lt $CoverageThreshold) { + $coverageBelowThreshold = $true + } + + $assemblyTableData += [PSCustomObject]@{ + 'Assembly Name' = $assemblyName + 'Line' = Get-FormattedValue $assemblyLineCoverage + 'Branch' = Get-FormattedValue $assemblyBranchCoverage + } +} + +$assemblyTableData | Format-Table -AutoSize + +if ($coverageBelowThreshold) { + Write-Host "Code coverage is lower than defined threshold: $CoverageThreshold. Stopping the task." + exit 1 +} diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index 777222ab5911..755aef1f7d20 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -12,6 +12,9 @@ on: merge_group: branches: ["main"] +env: + COVERAGE_THRESHOLD: 80 + concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true @@ -51,11 +54,7 @@ jobs: include: - { dotnet: "6.0-jammy", os: "ubuntu", configuration: Debug } - { dotnet: "7.0-jammy", os: "ubuntu", configuration: Release } - - { - dotnet: "8.0-preview-jammy", - os: "ubuntu", - configuration: Release, - } + - { dotnet: "8.0-jammy", os: "ubuntu", configuration: Release } - { dotnet: "6.0", os: "windows", configuration: Release } - { dotnet: "7.0", @@ -63,7 +62,7 @@ jobs: configuration: Debug, integration-tests: true, } - - { dotnet: "8.0-preview", os: "windows", configuration: Release } + - { dotnet: "8.0", os: "windows", configuration: Release } runs-on: ubuntu-latest container: @@ -84,15 +83,15 @@ jobs: - name: Run Unit Tests run: | - export UT_PROJECTS=$(find ./dotnet -type f -name "*.UnitTests.csproj" | tr '\n' ' ') + export UT_PROJECTS=$(find ./dotnet -type f -name "*.UnitTests.csproj" | grep -v -E "(Planners.Core.UnitTests.csproj|Experimental.Orchestration.Flow.UnitTests.csproj|Experimental.Assistants.UnitTests.csproj)" | tr '\n' ' ') for project in $UT_PROJECTS; do - dotnet test -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx + dotnet test -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx --collect:"XPlat Code Coverage" --results-directory:"TestResults/Coverage/" done - name: Run Integration Tests if: github.event_name != 'pull_request' && matrix.integration-tests run: | - export INTEGRATION_TEST_PROJECTS=$(find ./dotnet -type f -name "*IntegrationTests.csproj" | tr '\n' ' ') + export INTEGRATION_TEST_PROJECTS=$(find ./dotnet -type f -name "*IntegrationTests.csproj" | grep -v "Experimental.Orchestration.Flow.IntegrationTests.csproj" | tr '\n' ' ') for project in $INTEGRATION_TEST_PROJECTS; do dotnet test -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx done @@ -105,9 +104,28 @@ jobs: AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }} AzureOpenAI__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }} AzureOpenAIEmbeddings__ApiKey: ${{ secrets.AZUREOPENAI__APIKEY }} + Planners__AzureOpenAI__ApiKey: ${{ secrets.PLANNERS__AZUREOPENAI__APIKEY }} + Planners__AzureOpenAI__Endpoint: ${{ secrets.PLANNERS__AZUREOPENAI__ENDPOINT }} + Planners__AzureOpenAI__DeploymentName: ${{ vars.PLANNERS__AZUREOPENAI__DEPLOYMENTNAME }} + Planners__OpenAI__ApiKey: ${{ secrets.PLANNERS__OPENAI__APIKEY }} + Planners__OpenAI__ModelId: ${{ vars.PLANNERS__OPENAI__MODELID }} Bing__ApiKey: ${{ secrets.BING__APIKEY }} OpenAI__ApiKey: ${{ secrets.OPENAI__APIKEY }} + # Generate test reports and check coverage + - name: Generate test reports + uses: danielpalme/ReportGenerator-GitHub-Action@5.2.0 + with: + reports: "./TestResults/Coverage/**/coverage.cobertura.xml" + targetdir: "./TestResults/Reports" + reporttypes: "JsonSummary" + # Report for production packages only + assemblyfilters: "+Microsoft.SemanticKernel.Abstractions;+Microsoft.SemanticKernel.Core;+Microsoft.SemanticKernel.PromptTemplates.Handlebars;+Microsoft.SemanticKernel.Connectors.OpenAI;+Microsoft.SemanticKernel.Yaml;" + + - name: Check coverage + shell: pwsh + run: .github/workflows/check-coverage.ps1 -JsonReportPath "TestResults/Reports/Summary.json" -CoverageThreshold $env:COVERAGE_THRESHOLD + # This final job is required to satisfy the merge queue. It must only run (or succeed) if no tests failed dotnet-build-and-test-check: if: always() diff --git a/.github/workflows/dotnet-ci.yml b/.github/workflows/dotnet-ci.yml index 68b5c6a8b862..0ed0631b9c01 100644 --- a/.github/workflows/dotnet-ci.yml +++ b/.github/workflows/dotnet-ci.yml @@ -22,14 +22,14 @@ jobs: - { os: ubuntu-latest, dotnet: '6.0', configuration: Debug } - { os: ubuntu-latest, dotnet: '6.0', configuration: Release } - { os: ubuntu-latest, dotnet: '7.0', configuration: Release } - - { os: ubuntu-latest, dotnet: '8.0-preview', configuration: Release } - + - { os: ubuntu-latest, dotnet: '8.0', configuration: Release } + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 with: clean: true - + - name: Find solutions shell: bash run: echo "solutions=$(find ./ -type f -name "*.sln" | tr '\n' ' ')" >> $GITHUB_ENV @@ -43,7 +43,7 @@ jobs: for solution in ${{ env.solutions }}; do docker run --rm -v $(pwd):/app -w /app -e GITHUB_ACTIONS='true' mcr.microsoft.com/dotnet/sdk:${{ matrix.dotnet }} /bin/sh -c "dotnet build -c ${{ matrix.configuration }} /app/$solution" done - + - name: Find test projects shell: bash run: echo "testprojects=$(find ./dotnet -type f -name "*UnitTests.csproj" | tr '\n' ' ')" >> $GITHUB_ENV diff --git a/.github/workflows/dotnet-format.yml b/.github/workflows/dotnet-format.yml index cfa1c7feebb5..3c8c341b6884 100644 --- a/.github/workflows/dotnet-format.yml +++ b/.github/workflows/dotnet-format.yml @@ -26,19 +26,19 @@ jobs: matrix: include: #- { dotnet: '6.0', configuration: Release, os: ubuntu-latest } - - { dotnet: '7.0', configuration: Release, os: ubuntu-latest } - #- { dotnet: '8.0-preview', configuration: Release, os: ubuntu-latest } - + #- { dotnet: '7.0', configuration: Release, os: ubuntu-latest } + - { dotnet: '8.0', configuration: Release, os: ubuntu-latest } + runs-on: ${{ matrix.os }} env: NUGET_CERT_REVOCATION_MODE: offline - + steps: - name: Check out code uses: actions/checkout@v4 with: fetch-depth: 0 - + - name: Get changed files id: changed-files if: github.event_name == 'pull_request' @@ -56,16 +56,20 @@ jobs: if: github.event_name != 'pull_request' || steps.changed-files.outputs.added_modified != '' || steps.changed-files.outcome == 'failure' run: | csproj_files=() + exclude_files=("Planners.Core.csproj" "Planners.Core.UnitTests.csproj" "Experimental.Orchestration.Flow.csproj" "Experimental.Orchestration.Flow.UnitTests.csproj" "Experimental.Orchestration.Flow.IntegrationTests.csproj") if [[ ${{ steps.changed-files.outcome }} == 'success' ]]; then for file in ${{ steps.changed-files.outputs.added_modified }}; do echo "$file was changed" dir="./$file" while [[ $dir != "." && $dir != "/" && $dir != $GITHUB_WORKSPACE ]]; do if find "$dir" -maxdepth 1 -name "*.csproj" -print -quit | grep -q .; then - csproj_files+=("$(find "$dir" -maxdepth 1 -name "*.csproj" -print -quit)") + csproj_path="$(find "$dir" -maxdepth 1 -name "*.csproj" -print -quit)" + if [[ ! "${exclude_files[@]}" =~ "${csproj_path##*/}" ]]; then + csproj_files+=("$csproj_path") + fi break fi - + dir=$(echo ${dir%/*}) done done diff --git a/.github/workflows/dotnet-integration-tests.yml b/.github/workflows/dotnet-integration-tests.yml index 6bd5b461d019..87f07a09d6ed 100644 --- a/.github/workflows/dotnet-integration-tests.yml +++ b/.github/workflows/dotnet-integration-tests.yml @@ -21,7 +21,7 @@ jobs: os: [ubuntu-latest] configuration: [Debug] runs-on: ${{ matrix.os }} - steps: + steps: - uses: actions/checkout@v4 if: ${{ github.event_name != 'pull_request' }} with: diff --git a/.github/workflows/java-format.yml b/.github/workflows/java-format.yml deleted file mode 100644 index ea1e90d31a2f..000000000000 --- a/.github/workflows/java-format.yml +++ /dev/null @@ -1,88 +0,0 @@ -name: Java format automation -on: - issue_comment: - types: [ created ] - -jobs: - comment-driven-automation: - if: | - github.event.issue.pull_request && - ( - startsWith(github.event.comment.body, '/spotless') || - startsWith(github.event.comment.body, '/help') - ) - - runs-on: ubuntu-latest - - permissions: - issues: write - pull-requests: write - - steps: - - name: Check for command - id: command - uses: xt0rted/slash-command-action@v2 - continue-on-error: true - with: - command: spotless - reaction-type: "eyes" - - - name: Get command - env: - BODY: ${{ github.event.comment.body }} - run: | - # intentionally only looking at the first line of the body - command=$(echo "$BODY" | head -1 | sed "s;^/;;") - echo "COMMAND=$command" >> $GITHUB_ENV - - - uses: actions/checkout@v4 - - - name: Check out PR branch - env: - NUMBER: ${{ github.event.issue.number }} - GH_TOKEN: ${{ github.token }} - run: | - gh pr checkout $NUMBER - if: env.COMMAND == 'spotless' - - - name: Set up Maven cache - uses: actions/setup-java@v3 - with: - java-version: 17 - distribution: microsoft - cache: maven - if: env.COMMAND == 'spotless' - - - name: Set git user - run: | - git config user.name github-actions[bot] - git config user.email github-action[bot]@users.noreply.github.com - if: env.COMMAND == 'spotless' - - - name: Run command - env: - NUMBER: ${{ github.event.issue.number }} - GH_TOKEN: ${{ github.token }} - run: | - available_commands="Available commands: - * \`/spotless\` - runs \`./mvnw process-sources -DskipTests -Pbug-check\` - * \`/help\` - displays available commands - " - if [[ "$COMMAND" == "spotless" ]]; then - ./mvnw process-sources -DskipTests -Pbug-check - if git diff --quiet; then - gh pr comment $NUMBER --body "Already up-to-date" - exit 0 # success - fi - git commit -a -m "./mvnw process-sources -DskipTests -Pbug-check" - git push - elif [[ "$COMMAND" == "help" ]]; then - gh pr comment $NUMBER --body "$available_commands" - else - body="Unknown command: \`$COMMAND\` - - $available_commands - " - gh pr comment $NUMBER --body "$body" - fi - working-directory: java diff --git a/.github/workflows/markdown-link-check.yml b/.github/workflows/markdown-link-check.yml index 4e02c7256673..2cea71de5383 100644 --- a/.github/workflows/markdown-link-check.yml +++ b/.github/workflows/markdown-link-check.yml @@ -3,8 +3,8 @@ name: Check .md links on: workflow_dispatch: pull_request: - branches: [ "main" ] - + branches: ["main", "java-development"] + permissions: contents: read @@ -13,11 +13,11 @@ jobs: runs-on: ubuntu-latest # check out the latest version of the code steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v4 - # Checks the status of hyperlinks in .md files in verbose mode - - name: Check links - uses: gaurav-nelson/github-action-markdown-link-check@v1 - with: - use-verbose-mode: 'yes' - config-file: ".github/workflows/markdown-link-check-config.json" + # Checks the status of hyperlinks in .md files in verbose mode + - name: Check links + uses: gaurav-nelson/github-action-markdown-link-check@v1 + with: + use-verbose-mode: "yes" + config-file: ".github/workflows/markdown-link-check-config.json" diff --git a/.github/workflows/node-pr.yml b/.github/workflows/node-pr.yml deleted file mode 100644 index dda61b748988..000000000000 --- a/.github/workflows/node-pr.yml +++ /dev/null @@ -1,72 +0,0 @@ -# This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs - -name: node-pr - -on: - workflow_dispatch: - pull_request: - branches: ["main"] - paths: - - "samples/apps/**" - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - find-yarn-projects: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-yarn-folders.outputs.matrix }} - - steps: - - uses: actions/checkout@v4 - - - name: Find yarn projects - id: set-yarn-folders - # This step uses a bash script to find all subfolders of /samples that contain a yarn.lock file - run: | - #!/bin/bash - set -e # exit with nonzero exit code if anything fails - shopt -s globstar # enable globstar option to use ** for recursive matching - yarndirs=() - for lockfile in samples/apps/**/yarn.lock; do # loop over all yarn.lock files - dir=$(dirname "$lockfile") # get the directory of the lock file - echo "Found yarn project in $dir" - yarndirs+=("$dir") # add the directory to the yarndirs array - done - - echo "All yarn projects found: '${yarndirs[*]}'" - yarndirs_json=$(echo -n "${yarndirs[*]%\n}" | jq -R -s -j --compact-output 'split(" ")') - matrix_json="{\"node_version\":[18], \"yarn_folder\":$yarndirs_json}" - echo "Setting output matrix to $matrix_json" - echo "matrix=$matrix_json" >> $GITHUB_OUTPUT - - build: - runs-on: ubuntu-latest - needs: find-yarn-projects - - strategy: - matrix: ${{ fromJson(needs.find-yarn-projects.outputs.matrix) }} - - steps: - - uses: actions/checkout@v4 - - name: Use Node.js ${{ matrix.node_version }} - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node_version }} - cache: "yarn" - cache-dependency-path: "samples/apps/**/yarn.lock" - - - name: Run yarn install & yarn build - # This step runs yarn install and yarn build for each project. - # The --frozen-lockfile option ensures that the dependencies are installed exactly as specified in the lock file. - # The -cwd option sets the current working directory to the folder where the yarn.lock file is located. - run: | - #!/bin/bash - set -e # exit with nonzero exit code if anything fails - dir=${{ matrix.yarn_folder }} # get the directory of the lock file - echo "Running yarn install and yarn build for $dir" - yarn --cwd "$dir" install --frozen-lockfile # install dependencies - yarn --cwd "$dir" build # run build script diff --git a/.github/workflows/python-unit-tests.yml b/.github/workflows/python-unit-tests.yml index ba58264325b8..39f722e5ba04 100644 --- a/.github/workflows/python-unit-tests.yml +++ b/.github/workflows/python-unit-tests.yml @@ -26,7 +26,7 @@ jobs: run: | python -m pip install poetry pytest cd python - poetry install --without chromadb --without hugging_face --without azure_cognitive_search --without weaviate --without pinecone --without postgres --without qdrant --without redis + poetry install --without chromadb --without azure_cognitive_search --without weaviate --without pinecone --without postgres --without qdrant --without redis - name: Test with pytest run: | cd python && poetry run pytest ./tests/unit diff --git a/.github/workflows/typos.yaml b/.github/workflows/typos.yaml index 94931d48b5d9..e19b7cd61fd8 100644 --- a/.github/workflows/typos.yaml +++ b/.github/workflows/typos.yaml @@ -12,18 +12,18 @@ name: Spell Check on: workflow_dispatch: pull_request: - branches: [ "main", "feature*" ] + branches: ["main", "java-development", "feature*"] jobs: run: name: Spell Check with Typos runs-on: ubuntu-latest steps: - - name: Check out code - uses: actions/checkout@v4 + - name: Check out code + uses: actions/checkout@v4 - - name: Use custom config file - uses: crate-ci/typos@master - with: - config: .github/_typos.toml - write_changes: false + - name: Use custom config file + uses: crate-ci/typos@master + with: + config: .github/_typos.toml + write_changes: false diff --git a/.gitignore b/.gitignore index eb12bfcd2430..d37a856dbc26 100644 --- a/.gitignore +++ b/.gitignore @@ -409,15 +409,19 @@ FodyWeavers.xsd *.key *.pem +# JetBrains IntelliJ +.idea +*.ipr +*.iml +*.iws + .env certs/ launchSettings.json -!samples/dotnet/MsGraphPluginsExample/Properties/launchSettings.json config.development.yaml *.development.config *.development.json .DS_Store -.idea/ node_modules/ obj/ bin/ diff --git a/.vscode/settings.json b/.vscode/settings.json index dae66ae37dc7..e06fc6829f30 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -71,7 +71,8 @@ "**/Thumbs.db": true }, "cSpell.words": [ - "Partitioner" + "Partitioner", + "SKEXP" ], "[java]": { "editor.formatOnSave": false, @@ -90,5 +91,10 @@ }, "java.debug.settings.onBuildFailureProceed": true, "java.compile.nullAnalysis.mode": "disabled", - "dotnet.defaultSolution": "dotnet\\SK-dotnet.sln" + "dotnet.defaultSolution": "dotnet\\SK-dotnet.sln", + "python.testing.pytestArgs": [ + "python/tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true } \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json index ec84e7621884..7993d689209a 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -371,70 +371,6 @@ "group": "PR-Validate" } }, - // KernelHttpServer - { - "label": "run (KernelHttpServer)", - "command": "func", - "type": "shell", - "args": ["start", "--csharp"], - "group": "test", - "options": { - "cwd": "${workspaceFolder}/samples/dotnet/KernelHttpServer" - }, - "presentation": { - "reveal": "always", - "panel": "shared", - "group": "Run-Samples" - } - }, - // chat-summary-webapp-react - { - "label": "run (Starter Chat WebApp React)", - "command": "yarn", - "type": "shell", - "args": ["start"], - "group": "test", - "options": { - "cwd": "${workspaceFolder}/samples/apps/chat-summary-webapp-react" - }, - "presentation": { - "reveal": "always", - "panel": "shared", - "group": "Run-Samples" - } - }, - // book-creator-webapp-react - { - "label": "run (Book Creator WebApp React)", - "command": "yarn", - "type": "shell", - "args": ["start"], - "group": "test", - "options": { - "cwd": "${workspaceFolder}/samples/apps/book-creator-webapp-react" - }, - "presentation": { - "reveal": "always", - "panel": "shared", - "group": "Run-Samples" - } - }, - // auth-api-webapp-react - { - "label": "run (Auth API WebApp React)", - "command": "yarn", - "type": "shell", - "args": ["start"], - "group": "test", - "options": { - "cwd": "${workspaceFolder}/samples/apps/auth-api-webapp-react" - }, - "presentation": { - "reveal": "always", - "panel": "shared", - "group": "Run-Samples" - } - } ], "inputs": [ { diff --git a/COMMUNITY.md b/COMMUNITY.md index 829e673c01d5..bf6ab05289fd 100644 --- a/COMMUNITY.md +++ b/COMMUNITY.md @@ -15,8 +15,7 @@ Add Semantic Kernel events to your calendar - we're running two community calls * Americas timezone: download the [calendar.ics](https://aka.ms/sk-community-calendar) file. * Asia Pacific timezone: download the [calendar-APAC.ics](https://aka.ms/sk-community-calendar-apac) file. -To keep topics organized, please submit what you'd like us to cover here: -[https://forms.office.com/r/BbXFzmmFys](https://forms.office.com/r/BbXFzmmFys) +If you have any questions or if you would like to showcase your project(s), please email what you'd like us to cover here: skofficehours[at]microsoft.com. If you are unable to make it live, all meetings will be recorded and posted online. diff --git a/README.md b/README.md index ddab6c95ceef..9a0f0f37413b 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,7 @@ feature parity between our currently supported languages. Java logo
- Using Semantic Kernel in Java + Using Semantic Kernel in Java
@@ -65,7 +65,7 @@ from either OpenAI or Azure OpenAI and to run one of the C#, Python, and Java co ### For C#: 1. Create a new console app. -2. Add the semantic kernel nuget `Microsoft.SemanticKernel`. +2. Add the semantic kernel nuget [Microsoft.SemanticKernel](https://www.nuget.org/packages/Microsoft.SemanticKernel/). 3. Copy the code from [here](dotnet/README.md) into the app `Program.cs` file. 4. Replace the configuration placeholders for API key and other params with your key and settings. 5. Run with `F5` or `dotnet run` @@ -80,8 +80,9 @@ from either OpenAI or Azure OpenAI and to run one of the C#, Python, and Java co ### For Java: -1. Clone and checkout the experimental Java branch: `git clone -b experimental-java https://github.com/microsoft/semantic-kernel.git` -2. Follow the instructions [here](https://github.com/microsoft/semantic-kernel/blob/experimental-java/java/samples/sample-code/README.md) +1. Clone the repository: `git clone https://github.com/microsoft/semantic-kernel.git` + 1. To access the latest Java code, clone and checkout the Java development branch: `git clone -b java-development https://github.com/microsoft/semantic-kernel.git` +2. Follow the instructions [here](https://github.com/microsoft/semantic-kernel/blob/main/java/samples/sample-code/README.md) ## Learning how to use Semantic Kernel @@ -164,6 +165,10 @@ To learn more and get started: - Attend [regular office hours and SK community events](COMMUNITY.md) - Follow the team on our [blog](https://aka.ms/sk/blog) +## Contributor Wall of Fame + +[![semantic-kernel contributors](https://contrib.rocks/image?repo=microsoft/semantic-kernel)](https://github.com/microsoft/semantic-kernel/graphs/contributors) + ## Code of Conduct This project has adopted the diff --git a/SECURITY.md b/SECURITY.md index e138ec5d6a77..eed215e185a7 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -4,7 +4,7 @@ Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). -If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://www.microsoft.com/en-us/msrc/definition-of-a-security-vulnerability?rtc=1), please report it to us as described below. ## Reporting Security Issues @@ -12,9 +12,9 @@ If you believe you have found a security vulnerability in any Microsoft-owned re Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). -If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc?rtc=2). -You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/en-us/msrc?rtc=2). Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: @@ -28,7 +28,7 @@ Please include the requested information listed below (as much as you can provid This information will help us triage your report more quickly. -If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. +If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://www.microsoft.com/en-us/msrc/bounty?rtc=2) page for more details about our active programs. ## Preferred Languages @@ -36,6 +36,6 @@ We prefer all communications to be in English. ## Policy -Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). +Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd?rtc=2). diff --git a/docs/FAQS.md b/docs/FAQS.md new file mode 100644 index 000000000000..1e3f07386e1b --- /dev/null +++ b/docs/FAQS.md @@ -0,0 +1,55 @@ +# Frequently Asked Questions + +### How do I get access to nightly builds? + +Nightly builds of the Semantic Kernel are available [here](https://github.com/orgs/microsoft/packages?repo_name=semantic-kernel). + +To download nightly builds follow the following steps: + +1. You will need a GitHub account to complete these steps. +1. Create a GitHub Personal Access Token with the `read:packages` scope using these [instructions](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic). +1. If you account is part of the Microsoft organization then you must authorize the `Microsoft` organization as a single sign-on organization. + 1. Click the "Configure SSO" next to the Person Access Token you just created and then authorize `Microsoft`. +1. Use the following command to add the Microsoft GitHub Packages source to your NuGet configuration: + + ```powershell + dotnet nuget add source --username GITHUBUSERNAME --password GITHUBPERSONALACCESSTOKEN --store-password-in-clear-text --name GitHubMicrosoft "https://nuget.pkg.github.com/microsoft/index.json" + ``` + +1. Or you can manually create a `NuGet.Config` file. + + ```xml + + + + + + + + + + + + + + + + + + + + + + + + ``` + + * If you place this file in your project folder make sure to have Git (or whatever source control you use) ignore it. + * For more information on where to store this file go [here](https://learn.microsoft.com/en-us/nuget/reference/nuget-config-file). + * You can also use the following command `he Microsoft GitHub Packages source can be added easier to NuGet:` +1. You can now add packages from the nightly build to your project. + * E.g. use this command `dotnet add package Microsoft.SemanticKernel.Core --version 0.26.231003.1-nightly` +1. And the latest package release can be referenced in the project like this: + * `` + +For more information see: diff --git a/docs/decisions/0004-error-handling.md b/docs/decisions/0004-error-handling.md index 74be52a04227..3c3f61c648d7 100644 --- a/docs/decisions/0004-error-handling.md +++ b/docs/decisions/0004-error-handling.md @@ -1,15 +1,17 @@ --- # These are optional elements. Feel free to remove any of them. status: accepted -contact: semenshi +contact: SergeyMenshykh date: 2023-06-23 deciders: shawncal consulted: stephentoub informed: --- + # Error handling improvements ## Disclaimer + This ADR describes problems and their solutions for improving the error handling aspect of SK. It does not address logging, resiliency, or observability aspects. ## Context and Problem Statement @@ -41,4 +43,4 @@ Currently, there are several aspects of error handling in SK that can be enhance - Identify all cases where the original exception is not preserved as an inner exception of the rethrown SK exception, and address them. - Create a new exception HttpOperationException, which includes a StatusCode property, and implement the necessary logic to map the exception from HttpStatusCode, HttpRequestException, or Azure.RequestFailedException. Update existing SK code that interacts with the HTTP stack to throw HttpOperationException in case of a failed HTTP request and assign the original exception as its inner exception. - Modify all SK components that currently store exceptions to SK context to rethrow them instead. -- Simplify the SK critical exception handling functionality by modifying the IsCriticalException extension method to exclude handling of StackOverflowException and OutOfMemoryException exceptions. This is because the former exception is not thrown, so the calling code won't be executed, while the latter exception doesn't necessarily prevent the execution of recovery code. \ No newline at end of file +- Simplify the SK critical exception handling functionality by modifying the IsCriticalException extension method to exclude handling of StackOverflowException and OutOfMemoryException exceptions. This is because the former exception is not thrown, so the calling code won't be executed, while the latter exception doesn't necessarily prevent the execution of recovery code. diff --git a/docs/decisions/0006-open-api-dynamic-payload-and-namespaces.md b/docs/decisions/0006-open-api-dynamic-payload-and-namespaces.md index f7faf5f2125f..5935d4ce3c8c 100644 --- a/docs/decisions/0006-open-api-dynamic-payload-and-namespaces.md +++ b/docs/decisions/0006-open-api-dynamic-payload-and-namespaces.md @@ -1,32 +1,39 @@ --- status: accepted -contact: semenshi +contact: SergeyMenshykh date: 2023-08-15 deciders: shawncal consulted: informed: --- + # Dynamic payload building for PUT and POST RestAPI operations and parameter namespacing ## Context and Problem Statement + Currently, the SK OpenAPI does not allow the dynamic creation of payload/body for PUT and POST RestAPI operations, even though all the required metadata is available. One of the reasons the functionality was not fully developed originally, and eventually removed is that JSON payload/body content of PUT and POST RestAPI operations might contain properties with identical names at various levels. It was not clear how to unambiguously resolve their values from the flat list of context variables. Another reason the functionality has not been added yet is that the 'payload' context variable, along with RestAPI operation data contract schema(OpenAPI, JSON schema, Typings?) should have been sufficient for LLM to provide fully fleshed-out JSON payload/body content without the need to build it dynamically. + ## Decision Drivers -* Create a mechanism that enables the dynamic construction of the payload/body for PUT and POST RestAPI operations. -* Develop a mechanism(namespacing) that allows differentiation of payload properties with identical names at various levels for PUT and POST RestAPI operations. -* Aim to minimize breaking changes and maintain backward compatibility of the code as much as possible. + +- Create a mechanism that enables the dynamic construction of the payload/body for PUT and POST RestAPI operations. +- Develop a mechanism(namespacing) that allows differentiation of payload properties with identical names at various levels for PUT and POST RestAPI operations. +- Aim to minimize breaking changes and maintain backward compatibility of the code as much as possible. ## Considered Options -* Enable the dynamic creation of payload and/or namespacing by default. -* Enable the dynamic creation of payload and/or namespacing based on configuration. + +- Enable the dynamic creation of payload and/or namespacing by default. +- Enable the dynamic creation of payload and/or namespacing based on configuration. ## Decision Outcome + Chosen option: "Enable the dynamic creation of payload and/or namespacing based on configuration". This option keeps things compatible, so the change won't affect any SK consumer code. Additionally, it lets SK consumer code easily control both mechanisms, turning them on or off based on the scenario. ## Additional details ### Enabling dynamic creation of payload + In order to enable the dynamic creation of payloads/bodies for PUT and POST RestAPI operations, please set the `EnableDynamicPayload` property of the `OpenApiSkillExecutionParameters` execution parameters to `true` when importing the AI plugin: ```csharp @@ -34,12 +41,13 @@ var plugin = await kernel.ImportPluginFunctionsAsync("", new Uri("", new Uri(""), new OpenApiSkillExecutionParameters(httpClient) { EnablePayloadNamespacing = true }); ``` + Remember that the namespacing mechanism depends on prefixing parameter names with their parent parameter name, separated by dots. So, use the 'namespaced' parameter names when adding arguments for them to the context variables. Let's consider this JSON: ```json -{ - "upn": "", +{ + "upn": "", "receiver": { "upn": "" }, @@ -70,7 +80,9 @@ Remember that the namespacing mechanism depends on prefixing parameter names wit } } ``` + It contains `upn` properties at different levels. The the argument registration for the parameters(property values) will look like: + ```csharp var contextVariables = new ContextVariables(); contextVariables.Set("upn", ""); diff --git a/docs/decisions/0008-support-generic-llm-request-settings.md b/docs/decisions/0008-support-generic-llm-request-settings.md index 43dcafe1f1e6..6fae2fdf3ef5 100644 --- a/docs/decisions/0008-support-generic-llm-request-settings.md +++ b/docs/decisions/0008-support-generic-llm-request-settings.md @@ -2,11 +2,12 @@ # These are optional elements. Feel free to remove any of them. status: accepted contact: markwallace-microsoft -date: 2023-=9-15 +date: 2023-9-15 deciders: shawncal -consulted: stoub, lemiller, dmytrostruk -informed: +consulted: stephentoub, lemillermicrosoft, dmytrostruk +informed: --- + # Refactor to support generic LLM request settings ## Context and Problem Statement @@ -25,18 +26,18 @@ Link to issue raised by the implementer of the Oobabooga AI service: -* Good, SK abstractions contain no references to OpenAI specific request settings -* Neutral, because anonymous types can be used which allows a developer to pass in properties that may be supported by multiple AI services e.g., `temperature` or combine properties for different AI services e.g., `max_tokens` (OpenAI) and `max_new_tokens` (Oobabooga). -* Bad, because it's not clear to developers what they should pass when creating a semantic function -* Bad, because it's not clear to implementors of a chat/text completion service what they should accept or how to add service specific properties. -* Bad, there is no compiler type checking for code paths where the dynamic argument has not been resolved which will impact code quality. Type issues manifest as `RuntimeBinderException`'s and may be difficult to troubleshoot. Special care needs to be taken with return types e.g., may be necessary to specify an explicit type rather than just `var` again to avoid errors such as `Microsoft.CSharp.RuntimeBinder.RuntimeBinderException : Cannot apply indexing with [] to an expression of type 'object'` +- Good, SK abstractions contain no references to OpenAI specific request settings +- Neutral, because anonymous types can be used which allows a developer to pass in properties that may be supported by multiple AI services e.g., `temperature` or combine properties for different AI services e.g., `max_tokens` (OpenAI) and `max_new_tokens` (Oobabooga). +- Bad, because it's not clear to developers what they should pass when creating a semantic function +- Bad, because it's not clear to implementors of a chat/text completion service what they should accept or how to add service specific properties. +- Bad, there is no compiler type checking for code paths where the dynamic argument has not been resolved which will impact code quality. Type issues manifest as `RuntimeBinderException`'s and may be difficult to troubleshoot. Special care needs to be taken with return types e.g., may be necessary to specify an explicit type rather than just `var` again to avoid errors such as `Microsoft.CSharp.RuntimeBinder.RuntimeBinderException : Cannot apply indexing with [] to an expression of type 'object'` ### Use `object` to pass request settings @@ -127,11 +128,11 @@ The calling pattern is the same as for the `dynamic` case i.e. use either an ano PR: -* Good, SK abstractions contain no references to OpenAI specific request settings -* Neutral, because anonymous types can be used which allows a developer to pass in properties that may be supported by multiple AI services e.g., `temperature` or combine properties for different AI services e.g., `max_tokens` (OpenAI) and `max_new_tokens` (Oobabooga). -* Bad, because it's not clear to developers what they should pass when creating a semantic function -* Bad, because it's not clear to implementors of a chat/text completion service what they should accept or how to add service specific properties. -* Bad, code is needed to perform type checks and explicit casts. The situation is slightly better than for the `dynamic` case. +- Good, SK abstractions contain no references to OpenAI specific request settings +- Neutral, because anonymous types can be used which allows a developer to pass in properties that may be supported by multiple AI services e.g., `temperature` or combine properties for different AI services e.g., `max_tokens` (OpenAI) and `max_new_tokens` (Oobabooga). +- Bad, because it's not clear to developers what they should pass when creating a semantic function +- Bad, because it's not clear to implementors of a chat/text completion service what they should accept or how to add service specific properties. +- Bad, code is needed to perform type checks and explicit casts. The situation is slightly better than for the `dynamic` case. ### Define a base class for AI request settings which all implementations must extend @@ -221,12 +222,12 @@ this._summarizeConversationFunction = kernel.CreateSemanticFunction( The caveat with this pattern is, assuming a more specific implementation of `AIRequestSettings` uses JSON serialization/deserialization to hydrate an instance from the base `AIRequestSettings`, this will only work if all properties are supported by the default JsonConverter e.g., -* If we have `MyAIRequestSettings` which includes a `Uri` property. The implementation of `MyAIRequestSettings` would make sure to load a URI converter so that it can serialize/deserialize the settings correctly. -* If the settings for `MyAIRequestSettings` are sent to an AI service which relies on the default JsonConverter then a `NotSupportedException` exception will be thrown. +- If we have `MyAIRequestSettings` which includes a `Uri` property. The implementation of `MyAIRequestSettings` would make sure to load a URI converter so that it can serialize/deserialize the settings correctly. +- If the settings for `MyAIRequestSettings` are sent to an AI service which relies on the default JsonConverter then a `NotSupportedException` exception will be thrown. PR: -* Good, SK abstractions contain no references to OpenAI specific request settings -* Good, because it is clear to developers what they should pass when creating a semantic function and it is easy to discover what service specific request setting implementations exist. -* Good, because it is clear to implementors of a chat/text completion service what they should accept and how to extend the base abstraction to add service specific properties. -* Neutral, because `ExtensionData` can be used which allows a developer to pass in properties that may be supported by multiple AI services e.g., `temperature` or combine properties for different AI services e.g., `max_tokens` (OpenAI) and `max_new_tokens` (Oobabooga). +- Good, SK abstractions contain no references to OpenAI specific request settings +- Good, because it is clear to developers what they should pass when creating a semantic function and it is easy to discover what service specific request setting implementations exist. +- Good, because it is clear to implementors of a chat/text completion service what they should accept and how to extend the base abstraction to add service specific properties. +- Neutral, because `ExtensionData` can be used which allows a developer to pass in properties that may be supported by multiple AI services e.g., `temperature` or combine properties for different AI services e.g., `max_tokens` (OpenAI) and `max_new_tokens` (Oobabooga). diff --git a/docs/decisions/0009-support-multiple-named-args-in-template-function-calls.md b/docs/decisions/0009-support-multiple-named-args-in-template-function-calls.md index 40251ece1aca..fbf533aa4d77 100644 --- a/docs/decisions/0009-support-multiple-named-args-in-template-function-calls.md +++ b/docs/decisions/0009-support-multiple-named-args-in-template-function-calls.md @@ -4,9 +4,10 @@ status: accepted contact: dmytrostruk date: 2013-06-16 deciders: shawncal, hario90 -consulted: dmytrostruk, matthewbolanos +consulted: dmytrostruk, matthewbolanos informed: lemillermicrosoft --- + # Add support for multiple named arguments in template function calls ## Context and Problem Statement @@ -15,30 +16,30 @@ Native functions now support multiple parameters, populated from context values ## Decision Drivers -* Parity with Guidance -* Readability -* Similarity to languages familiar to SK developers -* YAML compatibility +- Parity with Guidance +- Readability +- Similarity to languages familiar to SK developers +- YAML compatibility ## Considered Options ### Syntax idea 1: Using commas - + ```handlebars {{Skill.MyFunction street: "123 Main St", zip: "98123", city:"Seattle", age: 25}} ``` Pros: -* Commas could make longer function calls easier to read, especially if spaces before and after the arg separator (a colon in this case) are allowed. +- Commas could make longer function calls easier to read, especially if spaces before and after the arg separator (a colon in this case) are allowed. Cons: -* Guidance doesn't use commas -* Spaces are already used as delimiters elsewhere so the added complexity of supporting commas isn't necessary +- Guidance doesn't use commas +- Spaces are already used as delimiters elsewhere so the added complexity of supporting commas isn't necessary ### Syntax idea 2: JavaScript/C#-Style delimiter (colon) - + ```handlebars {{MyFunction street:"123 Main St" zip:"98123" city:"Seattle" age: "25"}} @@ -47,12 +48,12 @@ Cons: Pros: -* Resembles JavaScript Object syntax and C# named argument syntax +- Resembles JavaScript Object syntax and C# named argument syntax Cons: -* Doesn't align with Guidance syntax which uses equal signs as arg part delimiters -* Too similar to YAML key/value pairs if we support YAML prompts in the future. It's likely possible to support colons as delimiters but would be better to have a separator that is distinct from normal YAML syntax. +- Doesn't align with Guidance syntax which uses equal signs as arg part delimiters +- Too similar to YAML key/value pairs if we support YAML prompts in the future. It's likely possible to support colons as delimiters but would be better to have a separator that is distinct from normal YAML syntax. ### Syntax idea 3: Python/Guidance-Style delimiter @@ -62,29 +63,29 @@ Cons: Pros: -* Resembles Python's keyword argument syntax -* Resembles Guidance's named argument syntax -* Not too similar to YAML key/value pairs if we support YAML prompts in the future. +- Resembles Python's keyword argument syntax +- Resembles Guidance's named argument syntax +- Not too similar to YAML key/value pairs if we support YAML prompts in the future. Cons: -* Doesn't align with C# syntax +- Doesn't align with C# syntax ### Syntax idea 4: Allow whitespace between arg name/value delimiter ```handlebars -{{MyFunction street = "123 Main St" zip = "98123" city = "Seattle"}} +{{MyFunction street="123 Main St" zip="98123" city="Seattle"}} ``` Pros: -* Follows the convention followed by many programming languages of whitespace flexibility where spaces, tabs, and newlines within code don't impact a program's functionality +- Follows the convention followed by many programming languages of whitespace flexibility where spaces, tabs, and newlines within code don't impact a program's functionality Cons: -* Promotes code that is harder to read unless commas can be used (see [Using Commas](#syntax-idea-1-using-commas)) -* More complexity to support -* Doesn't align with Guidance which doesn't support spaces before and after the = sign. +- Promotes code that is harder to read unless commas can be used (see [Using Commas](#syntax-idea-1-using-commas)) +- More complexity to support +- Doesn't align with Guidance which doesn't support spaces before and after the = sign. ## Decision Outcome @@ -92,20 +93,18 @@ Chosen options: "Syntax idea 3: Python/Guidance-Style keyword arguments", becaus Additional decisions: -* Continue supporting up to 1 positional argument for backward compatibility. Currently, the argument passed to a function is assumed to be the `$input` context variable. +- Continue supporting up to 1 positional argument for backward compatibility. Currently, the argument passed to a function is assumed to be the `$input` context variable. Example ```handlebars - {{MyFunction "inputVal" street="123 Main St" zip="98123" city="Seattle"}} - ``` -* Allow arg values to be defined as strings or variables ONLY, e.g. - +- Allow arg values to be defined as strings or variables ONLY, e.g. + ```handlebars -{{MyFunction street=$street zip="98123" city='Seattle'}} +{{MyFunction street=$street zip="98123" city="Seattle"}} ``` If function expects a value other than a string for an argument, the SDK will use the corresponding TypeConverter to parse the string provided when evaluating the expression. diff --git a/docs/decisions/0010-dotnet-project-structure.md b/docs/decisions/0010-dotnet-project-structure.md index 21ff81f6e962..7b21e0711647 100644 --- a/docs/decisions/0010-dotnet-project-structure.md +++ b/docs/decisions/0010-dotnet-project-structure.md @@ -1,13 +1,15 @@ --- - # These are optional elements. Feel free to remove any of them status: accepted contact: markwallace-microsoft date: 2023-09-29 -deciders: semenshi, dmytrostruk, rbarreto -consulted: shawncal, stoub, lemiller -informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} +deciders: SergeyMenshykh, dmytrostruk, RogerBarreto +consulted: shawncal, stephentoub, lemillermicrosoft +informed: + { + list everyone who is kept up-to-date on progress; and with whom there is a one-way communication, + } --- # DotNet Project Structure for 1.0 Release @@ -49,13 +51,13 @@ Chosen option: Option #2: Folder naming matches assembly name, because: Main categories for the projects will be: -1. `Connectors`: ***A connector project allows the Semantic Kernel to connect to AI and Memory services***. Some of the existing connector projects may move to other repositories. -1. `Planners`: ***A planner project provides one or more planner implementations which take an ask and convert it into an executable plan to achieve that ask***. This category will include the current action, sequential and stepwise planners (these could be merged into a single project). Additional planning implementations e.g., planners that generate Powershell or Python code can be added as separate projects. -1. `Functions`: ***A function project that enables the Semantic Kernel to access the functions it will orchestrate***. This category will include: - 1. Semantic functions i.e., prompts executed against an LLM - 1. GRPC remote procedures i.e., procedures executed remotely using the GRPC framework - 1. Open API endpoints i.e., REST endpoints that have Open API definitions executed remotely using the HTTP protocol -1. `Plugins`: ***A plugin project contains the implementation(s) of a Semantic Kernel plugin***. A Semantic Kernel plugin is contains a concrete implementation of a function e.g., a plugin may include code for basic text operations. +1. `Connectors`: **_A connector project allows the Semantic Kernel to connect to AI and Memory services_**. Some of the existing connector projects may move to other repositories. +1. `Planners`: **_A planner project provides one or more planner implementations which take an ask and convert it into an executable plan to achieve that ask_**. This category will include the current action, sequential and stepwise planners (these could be merged into a single project). Additional planning implementations e.g., planners that generate Powershell or Python code can be added as separate projects. +1. `Functions`: **_A function project that enables the Semantic Kernel to access the functions it will orchestrate_**. This category will include: + 1. Semantic functions i.e., prompts executed against an LLM + 1. GRPC remote procedures i.e., procedures executed remotely using the GRPC framework + 1. Open API endpoints i.e., REST endpoints that have Open API definitions executed remotely using the HTTP protocol +1. `Plugins`: **_A plugin project contains the implementation(s) of a Semantic Kernel plugin_**. A Semantic Kernel plugin is contains a concrete implementation of a function e.g., a plugin may include code for basic text operations. ### Option #1: New `planning`, `functions` and `plugins` project areas @@ -97,17 +99,17 @@ SK-dotnet ### Changes -| Project | Description | -|-------------------------------------|-------------| -| `Functions.Native` | Extract native functions from Semantic Kernel core and abstractions. | -| `Functions.Semantic` | Extract semantic functions from Semantic Kernel core and abstractions. Include the prompt template engine. | -| `Functions.Planning` | Extract planning from Semantic Kernel core and abstractions. | -| `Functions.Grpc` | Old `Skills.Grpc` project | -| `Functions.OpenAPI` | Old `Skills.OpenAPI` project | -| `Plugins.Core` | Old `Skills.Core` project | -| `Plugins.Document` | Old `Skills.Document` project | -| `Plugins.MsGraph` | Old `Skills.MsGraph` project | -| `Plugins.WebSearch` | Old `Skills.WebSearch` project | +| Project | Description | +| -------------------- | ---------------------------------------------------------------------------------------------------------- | +| `Functions.Native` | Extract native functions from Semantic Kernel core and abstractions. | +| `Functions.Semantic` | Extract semantic functions from Semantic Kernel core and abstractions. Include the prompt template engine. | +| `Functions.Planning` | Extract planning from Semantic Kernel core and abstractions. | +| `Functions.Grpc` | Old `Skills.Grpc` project | +| `Functions.OpenAPI` | Old `Skills.OpenAPI` project | +| `Plugins.Core` | Old `Skills.Core` project | +| `Plugins.Document` | Old `Skills.Document` project | +| `Plugins.MsGraph` | Old `Skills.MsGraph` project | +| `Plugins.WebSearch` | Old `Skills.WebSearch` project | ### Semantic Kernel Skills and Functions @@ -125,19 +127,19 @@ SK-dotnet │ ├── Microsoft.SemanticKernel.Connectors.AI.OpenAI* │ ├── src - │ └── tests + │ └── tests │ (Not shown but all projects will have src and tests subfolders) ├── Microsoft.SemanticKernel.Connectors.AI.HuggingFace ├── Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch ├── Microsoft.SemanticKernel.Connectors.Memory.Qdrant │ ├── Microsoft.SemanticKernel.Planners* - │ + │ ├── Microsoft.SemanticKernel.Reliability.Basic* ├── Microsoft.SemanticKernel.Reliability.Polly - │ + │ ├── Microsoft.SemanticKernel.TemplateEngines.Basic* - │ + │ ├── Microsoft.SemanticKernel.Functions.Semantic* ├── Microsoft.SemanticKernel.Functions.Grpc ├── Microsoft.SemanticKernel.Functions.OpenAPI @@ -156,7 +158,7 @@ SK-dotnet └── Microsoft.SemanticKernel.MetaPackage ``` -***Notes:*** +**_Notes:_** - There will only be a single solution file (initially). - Projects will be grouped in the solution i.e., connectors, planners, plugins, functions, extensions, ... @@ -197,33 +199,33 @@ SK-dotnet └── SemanticKernel.UnitTests ``` -\\* - Means the project is part of the Semantic Kernel meta package +\\\* - Means the project is part of the Semantic Kernel meta package ### Project Descriptions -| Project | Description | -|-------------------------------------|-------------| -| Connectors.AI.OpenAI | Azure OpenAI and OpenAI service connectors | -| Connectors... | Collection of other AI service connectors, some of which will move to another repository | -| Connectors.UnitTests | Connector unit tests | -| Planner.ActionPlanner | Semantic Kernel implementation of an action planner | -| Planner.SequentialPlanner | Semantic Kernel implementation of a sequential planner | -| Planner.StepwisePlanner | Semantic Kernel implementation of a stepwise planner | -| TemplateEngine.Basic | Prompt template engine basic implementations which are used by Semantic Functions only | -| Extensions.UnitTests | Extensions unit tests | -| InternalUtilities | Internal utilities which are reused by multiple NuGet packages (all internal) | -| Skills.Core | Core set of native functions which are provided to support Semantic Functions | -| Skills.Document | Native functions for interacting with Microsoft documents | -| Skills.Grpc | Semantic Kernel integration for GRPC based endpoints | -| Skills.MsGraph | Native functions for interacting with Microsoft Graph endpoints | -| Skills.OpenAPI | Semantic Kernel integration for OpenAI endpoints and reference Azure Key Vault implementation | -| Skills.Web | Native functions for interacting with Web endpoints e.g., Bing, Google, File download | -| Skills.UnitTests | Skills unit tests | -| IntegrationTests | Semantic Kernel integration tests | -| SemanticKernel | Semantic Kernel core implementation | -| SemanticKernel.Abstractions | Semantic Kernel abstractions i.e., interface, abstract classes, supporting classes, ... | -| SemanticKernel.MetaPackage | Semantic Kernel meta package i.e., a NuGet package that references other required Semantic Kernel NuGet packages | -| SemanticKernel.UnitTests | Semantic Kernel unit tests | +| Project | Description | +| --------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| Connectors.AI.OpenAI | Azure OpenAI and OpenAI service connectors | +| Connectors... | Collection of other AI service connectors, some of which will move to another repository | +| Connectors.UnitTests | Connector unit tests | +| Planner.ActionPlanner | Semantic Kernel implementation of an action planner | +| Planner.SequentialPlanner | Semantic Kernel implementation of a sequential planner | +| Planner.StepwisePlanner | Semantic Kernel implementation of a stepwise planner | +| TemplateEngine.Basic | Prompt template engine basic implementations which are used by Semantic Functions only | +| Extensions.UnitTests | Extensions unit tests | +| InternalUtilities | Internal utilities which are reused by multiple NuGet packages (all internal) | +| Skills.Core | Core set of native functions which are provided to support Semantic Functions | +| Skills.Document | Native functions for interacting with Microsoft documents | +| Skills.Grpc | Semantic Kernel integration for GRPC based endpoints | +| Skills.MsGraph | Native functions for interacting with Microsoft Graph endpoints | +| Skills.OpenAPI | Semantic Kernel integration for OpenAI endpoints and reference Azure Key Vault implementation | +| Skills.Web | Native functions for interacting with Web endpoints e.g., Bing, Google, File download | +| Skills.UnitTests | Skills unit tests | +| IntegrationTests | Semantic Kernel integration tests | +| SemanticKernel | Semantic Kernel core implementation | +| SemanticKernel.Abstractions | Semantic Kernel abstractions i.e., interface, abstract classes, supporting classes, ... | +| SemanticKernel.MetaPackage | Semantic Kernel meta package i.e., a NuGet package that references other required Semantic Kernel NuGet packages | +| SemanticKernel.UnitTests | Semantic Kernel unit tests | ### Naming Patterns @@ -283,10 +285,9 @@ dotnet/ This diagram show current skills are integrated with the Semantic Kernel core. -***Note:*** +**_Note:_** - This is not a true class hierarchy diagram. It show some class relationships and dependencies. - Namespaces are abbreviated to remove Microsoft.SemanticKernel prefix. Namespaces use `_` rather than `.`. ISKFunction class relationships - diff --git a/docs/decisions/0010-openai-function-calling.md b/docs/decisions/0010-openai-function-calling.md deleted file mode 100644 index 63c605916cdd..000000000000 --- a/docs/decisions/0010-openai-function-calling.md +++ /dev/null @@ -1,69 +0,0 @@ ---- -status: accepted -contact: gitri-ms -date: 2023-09-21 -deciders: gitri-ms, shawncal -consulted: lemillermicrosoft, awharrison-28, dmytrostruk, nacharya1 -informed: eavanvalkenburg, kevdome3000 ---- -# OpenAI Function Calling Support - -## Context and Problem Statement - -The [function calling](https://platform.openai.com/docs/guides/gpt/function-calling) capability of OpenAI's Chat Completions API allows developers to describe functions to the model, and have the model decide whether to output a JSON object specifying a function and appropriate arguments to call in response to the given prompt. This capability is enabled by two new API parameters to the `/v1/chat/completions` endpoint: -- `function_call` - auto (default), none, or a specific function to call -- `functions` - JSON descriptions of the functions available to the model - -Functions provided to the model are injected as part of the system message and are billed/counted as input tokens. - -We have received several community requests to provide support for this capability when using SK with the OpenAI chat completion models that support it. - -## Decision Drivers - -* Minimize changes to the core kernel for OpenAI-specific functionality -* Cost concerns with including a long list of function descriptions in the request -* Security and cost concerns with automatically executing functions returned by the model - -## Considered Options - -* Support sending/receiving functions via chat completions endpoint _with_ modifications to interfaces -* Support sending/receiving functions via chat completions endpoint _without_ modifications to interfaces -* Implement a planner around the function calling capability - -## Decision Outcome - -Chosen option: "Support sending/receiving functions via chat completions endpoint _without_ modifications to interfaces" - -With this option, we utilize the existing request settings object to send functions to the model. The app developer controls what functions are included and is responsible for validating and executing the function result. - -### Consequences - -* Good, because avoids breaking changes to the core kernel -* Good, because OpenAI-specific functionality is contained to the OpenAI connector package -* Good, because allows app to control what functions are available to the model (including non-SK functions) -* Good, because keeps the option open for integrating with planners in the future -* Neutral, because requires app developer to validate and execute resulting function -* Bad, because not as obvious how to use this capability and access the function results - -## Pros and Cons of the Options - -### Support sending/receiving functions _with_ modifications to chat completions interfaces - -This option would update the `IChatCompletion` and `IChatResult` interfaces to expose parameters/methods for providing and accessing function information. - -* Good, because provides a clear path for using the function calling capability -* Good, because allows app to control what functions are available to the model (including non-SK functions) -* Neutral, because requires app developer to validate and execute resulting function -* Bad, because introduces breaking changes to core kernel abstractions -* Bad, because OpenAI-specific functionality would be included in core kernel abstractions and would need to be ignored by other model providers - -### Implement a planner around the function calling capability - -Orchestrating external function calls fits within SK's concept of planning. With this approach, we would implement a planner that would take the function calling result and produce a plan that the app developer could execute (similar to SK's ActionPlanner). - -* Good, because producing a plan result makes it easy for the app developer to execute the chosen function -* Bad, because functions would need to be registered with the kernel in order to be executed -* Bad, because would create confusion about when to use which planner - -## Additional notes -There has been much discussion and debate over the pros and cons of automatically invoking a function returned by the OpenAI model, if it is registered with the kernel. As there are still many open questions around this behavior and its implications, we have decided to not include this capability in the initial implementation. We will continue to explore this option and may include it in a future update. \ No newline at end of file diff --git a/docs/decisions/0012-kernel-service-registration.md b/docs/decisions/0012-kernel-service-registration.md index db2e8ac14478..2e69907bcc45 100644 --- a/docs/decisions/0012-kernel-service-registration.md +++ b/docs/decisions/0012-kernel-service-registration.md @@ -4,9 +4,10 @@ status: accepted contact: dmytrostruk date: 2023-10-03 deciders: dmytrostruk -consulted: semenshi, rbarreto, markwallace-microsoft -informed: +consulted: SergeyMenshykh, RogerBarreto, markwallace-microsoft +informed: --- + # Kernel Service Registration ## Context and Problem Statement @@ -82,12 +83,12 @@ Custom service collection and service provider on Kernel level to simplify depen Interface `IKernel` will have its own service provider `KernelServiceProvider` with minimal functionality to get required service. ```csharp -public interface IKernelServiceProvider +public interface IKernelServiceProvider { T? GetService(string? name = null); -} +} -public interface IKernel +public interface IKernel { IKernelServiceProvider Services { get; } } @@ -174,7 +175,7 @@ Cons: - Additional dependency for Semantic Kernel package - `Microsoft.Extensions.DependencyInjection`. - No possibility to include specific list of services (lack of isolation from host application). -- Possibility of `Microsoft.Extensions.DependencyInjection` version mismatch and runtime errors (e.g. users have `Microsoft.Extensions.DependencyInjection` `--version 2.0` while Semantic Kernel uses `--version 6.0`) +- Possibility of `Microsoft.Extensions.DependencyInjection` version mismatch and runtime errors (e.g. users have `Microsoft.Extensions.DependencyInjection` `--version 2.0` while Semantic Kernel uses `--version 6.0`) ## Decision Outcome diff --git a/docs/decisions/0014-chat-completion-roles-in-prompt.md b/docs/decisions/0014-chat-completion-roles-in-prompt.md index 3f4b39b12fab..354367c27746 100644 --- a/docs/decisions/0014-chat-completion-roles-in-prompt.md +++ b/docs/decisions/0014-chat-completion-roles-in-prompt.md @@ -1,146 +1,165 @@ --- # These are optional elements. Feel free to remove any of them. status: accepted -contact: semenshi +contact: SergeyMenshykh date: 2023-10-23 -deciders: markwallace-microsoft, mabolan +deciders: markwallace-microsoft, matthewbolanos consulted: informed: --- + # SK prompt syntax for chat completion roles ## Context and Problem Statement + Today, SK does not have the ability to mark a block of text in a prompt as a message with a specific role, such as assistant, system, or user. As a result, SK can't chunk the prompt into the list of messages required by chat completion connectors. Additionally, prompts can be defined using a range of template syntaxes supported by various template engines, such as Handlebars, Jinja, and others. Each of these syntaxes may represent chat messages or roles in a distinct way. Consequently, the template engine syntax may leak into SK's domain if no proper abstraction is put in place, coupling SK with the template engines and making it impossible to support new ones. + ## Decision Drivers -* It should be possible to mark a block of text in a prompt as a message with a role so that it can be converted into a list of chat messages for use by chat completion connectors. -* The syntax specific to the template engine message/role should be mapped to the SK message/role syntax to abstract SK from a specific template engine syntax. + +- It should be possible to mark a block of text in a prompt as a message with a role so that it can be converted into a list of chat messages for use by chat completion connectors. +- The syntax specific to the template engine message/role should be mapped to the SK message/role syntax to abstract SK from a specific template engine syntax. ## Considered Options + **1. Message/role tags are generated by functions specified in a prompt.** This option relies on the fact that many template engines can invoke functions specified in the template. Therefore, an internal function can be registered with a template engine, and the function will create a message/model tag based on the provided arguments. The prompt template engine will execute the function and emit the function result into the prompt template, and the rendered prompt will have a section for each message/role decorated with these tags. Here's an example of how this can be done using the SK basic template engine and Handlebars: - Function: - ```csharp - internal class SystemFunctions - { - public string Message(string role) - { - return $""; - } - } - ``` - - Prompt: - - ```bash - {{message role="system"}} - You are a bank manager. Be helpful, respectful, appreciate diverse language styles. - {{message role="system"}} - - {{message role="user"}} - I want to {{$input}} - {{message role="user"}} - ``` - - Rendered prompt: - - ```xml - - You are a bank manager. Be helpful, respectful, appreciate diverse language styles. - - - I want to buy a house. - - ``` - -**2. Message/role tags are generated by a prompt-specific mechanism.** This option utilizes template engine syntax constructions, helpers, and handlers other than functions to inject SK message/role tags into the final prompt. - In the example below, to parse the prompt that uses the handlebars syntax we need to register a block helper (a callback that is invoked when the Handlebars engine encounters it) to emit the SK message/role tags in the resulting prompt. - - Block helpers: - ```csharp - this.handlebarsEngine.RegisterHelper("system", (EncodedTextWriter output, Context context, Arguments arguments) => { - //Emit the tags - }); - this.handlebarsEngine.RegisterHelper("user", (EncodedTextWriter output, Context context, Arguments arguments) => { - //Emit the tags - }); - ``` - - Prompt: - ```bash - {{#system~}} - You are a bank manager. Be helpful, respectful, appreciate diverse language styles. - {{~/system}} - {{#user~}} - I want to {{$input}} - {{~/user}} - ``` - - Rendered prompt: - ```xml - - You are a bank manager. Be helpful, respectful, appreciate diverse language styles. - - - I want to buy a house. - - ``` +Function: + +```csharp +internal class SystemFunctions +{ + public string Message(string role) + { + return $""; + } +} +``` + +Prompt: + +```bash +{{message role="system"}} +You are a bank manager. Be helpful, respectful, appreciate diverse language styles. +{{message role="system"}} + +{{message role="user"}} +I want to {{$input}} +{{message role="user"}} +``` + +Rendered prompt: + +```xml + +You are a bank manager. Be helpful, respectful, appreciate diverse language styles. + + +I want to buy a house. + +``` + +**2. Message/role tags are generated by a prompt-specific mechanism.** This option utilizes template engine syntax constructions, helpers, and handlers other than functions to inject SK message/role tags into the final prompt. +In the example below, to parse the prompt that uses the handlebars syntax we need to register a block helper (a callback that is invoked when the Handlebars engine encounters it) to emit the SK message/role tags in the resulting prompt. + +Block helpers: + +```csharp +this.handlebarsEngine.RegisterHelper("system", (EncodedTextWriter output, Context context, Arguments arguments) => { + //Emit the tags +}); +this.handlebarsEngine.RegisterHelper("user", (EncodedTextWriter output, Context context, Arguments arguments) => { + //Emit the tags +}); +``` + +Prompt: + +```bash +{{#system~}} +You are a bank manager. Be helpful, respectful, appreciate diverse language styles. +{{~/system}} +{{#user~}} +I want to {{$input}} +{{~/user}} +``` + +Rendered prompt: + +```xml + +You are a bank manager. Be helpful, respectful, appreciate diverse language styles. + + +I want to buy a house. + +``` **3. Message/role tags are applied on top of prompt template engine**. This option presumes specifying the SK message/role tags directly in a prompt to denote message/role blocks in way that template engine does not parse/handle them and considers them as a regular text. - In the example below, the prompt the `` tags are marking boundaries of the system and user messages and SK basic template engine consider them as regular text without processing them. - - Prompt: - ```xml - - You are a bank manager. Be helpful, respectful, appreciate diverse language styles. - - - I want to {{$input}} - - ``` - - Rendered prompt: - ```xml - - You are a bank manager. Be helpful, respectful, appreciate diverse language styles. - - - I want to buy a house. - - ``` +In the example below, the prompt the `` tags are marking boundaries of the system and user messages and SK basic template engine consider them as regular text without processing them. + +Prompt: + +```xml + +You are a bank manager. Be helpful, respectful, appreciate diverse language styles. + + +I want to {{$input}} + +``` + +Rendered prompt: + +```xml + +You are a bank manager. Be helpful, respectful, appreciate diverse language styles. + + +I want to buy a house. + +``` ## Pros and Cons + **1. Message/role tags are generated by functions specified in a prompt** - - Pros: - * Functions can be defined once and reused in prompt templates that support function calling. - Cons: - * Functions might not be supported by some template engines. - * The system/internal functions should be pre-registered by SK so users don't need to import them. - * Each prompt template engine will have how to discover and call the system/internal functions. +Pros: + +- Functions can be defined once and reused in prompt templates that support function calling. + +Cons: + +- Functions might not be supported by some template engines. +- The system/internal functions should be pre-registered by SK so users don't need to import them. +- Each prompt template engine will have how to discover and call the system/internal functions. **2. Message/role tags are generated by prompt specific mechanism** - Pros: - * Enables message/role representation with the optimal template engine syntax constructions, aligning with other constructions for that specific engine. - - Cons: - * Each prompt template engine will have to register callbacks/handlers to handle template syntax constructions rendering to emit SK message/role tags. +Pros: + +- Enables message/role representation with the optimal template engine syntax constructions, aligning with other constructions for that specific engine. + +Cons: + +- Each prompt template engine will have to register callbacks/handlers to handle template syntax constructions rendering to emit SK message/role tags. **3. Message/role tags are applied on top of prompt template engine** - Pros: - * No changes are required to prompt template engines. +Pros: - Cons: - * The message/role tag syntax may not align with other syntax constructions for that template engine. - * Syntax errors in message/role tags will be detected by components parsing the prompt and not by prompt template engines. +- No changes are required to prompt template engines. + +Cons: + +- The message/role tag syntax may not align with other syntax constructions for that template engine. +- Syntax errors in message/role tags will be detected by components parsing the prompt and not by prompt template engines. ## Decision Outcome + It was agreed not to limit ourselves to only one possible option because it may not be feasible to apply that option to new template engines we might need to support in the future. Instead, each time a new template engine is added, every option should be considered, and the optimal one should be preferred for that particular template engine. -It was also agreed that, at the moment, we will go with the "3. Message/role tags are applied on top of the prompt template engine" option to support the message/role prompt syntax in SK, which currently uses the `BasicPromptTemplateEngine` engine. \ No newline at end of file +It was also agreed that, at the moment, we will go with the "3. Message/role tags are applied on top of the prompt template engine" option to support the message/role prompt syntax in SK, which currently uses the `BasicPromptTemplateEngine` engine. diff --git a/docs/decisions/0015-completion-service-selection.md b/docs/decisions/0015-completion-service-selection.md index c669d03798fa..624fcfd886b0 100644 --- a/docs/decisions/0015-completion-service-selection.md +++ b/docs/decisions/0015-completion-service-selection.md @@ -1,24 +1,31 @@ --- # These are optional elements. Feel free to remove any of them. status: accepted +contact: SergeyMenshykh date: 2023-10-25 -deciders: markwallace, mabolan +deciders: markwallace-microsoft, matthewbolanos consulted: informed: --- + # Completion service type selection strategy ## Context and Problem Statement + Today, SK runs all text prompts using the text completion service. With the addition of a new chat completion prompts and potentially other prompt types, such as image, on the horizon, we need a way to select a completion service type to run these prompts. + ## Decision Drivers -* Semantic function should be able to identify a completion service type to use when processing text, chat, or image prompts. + +- Semantic function should be able to identify a completion service type to use when processing text, chat, or image prompts. ## Considered Options + **1. Completion service type identified by the "prompt_type" property.** This option presumes adding the 'prompt_type' property to the prompt template config model class, 'PromptTemplateConfig.' The property will be specified once by a prompt developer and will be used by the 'SemanticFunction' class to decide which completion service type (not instance) to use when resolving an instance of that particular completion service type. **Prompt template** + ```json { "schema": "1", @@ -29,12 +36,13 @@ Today, SK runs all text prompts using the text completion service. With the addi ``` **Semantic function pseudocode** + ```csharp if(string.IsNullOrEmpty(promptTemplateConfig.PromptType) || promptTemplateConfig.PromptType == "text") { var service = this._serviceSelector.SelectAIService(context.ServiceProvider, this._modelSettings); //render the prompt, call the service, process and return result -} +} else (promptTemplateConfig.PromptType == "chat") { var service = this._serviceSelector.SelectAIService(context.ServiceProvider, this._modelSettings); @@ -68,12 +76,12 @@ config: { ``` Pros: - - Deterministically specifies which completion service **type** to use, so image prompts won't be rendered by a text completion service, and vice versa. -Cons: - - Another property to specify by a prompt developer. +- Deterministically specifies which completion service **type** to use, so image prompts won't be rendered by a text completion service, and vice versa. +Cons: +- Another property to specify by a prompt developer. **2. Completion service type identified by prompt content.** The idea behind this option is to analyze the rendered prompt by using regex to check for the presence of specific markers associated with the prompt type. For example, the presence of the `` tag in the rendered prompt might indicate that the prompt is a chat prompt and should be handled by the chat completion service. This approach may work reliably when we have two completion service types - text and chat - since the logic would be straightforward: if the message tag is found in the rendered prompt, handle it with the chat completion service; otherwise, use the text completion service. However, this logic becomes unreliable when we start adding new prompt types, and those prompts lack markers specific to their prompt type. For example, if we add an image prompt, we won't be able to distinguish between a text prompt and an image prompt unless the image prompt has a unique marker identifying it as such. @@ -107,11 +115,15 @@ config: { ... } ``` + Pros: + - No need for a new property to identify the prompt type. Cons: + - Unreliable unless the prompt contains unique markers specifically identifying the prompt type. ## Decision Outcome + We decided to choose the '2. Completion service type identified by prompt content' option and will reconsider it when we encounter another completion service type that cannot be supported by this option or when we have a solid set of requirements for using a different mechanism for selecting the completion service type. diff --git a/docs/decisions/0016-custom-prompt-template-formats.md b/docs/decisions/0016-custom-prompt-template-formats.md new file mode 100644 index 000000000000..c5b39fdfa805 --- /dev/null +++ b/docs/decisions/0016-custom-prompt-template-formats.md @@ -0,0 +1,289 @@ +--- +status: approved +contact: markwallace-microsoft +date: 2023-10-26 +deciders: matthewbolanos, markwallace-microsoft, SergeyMenshykh, RogerBarreto +consulted: dmytrostruk +informed: +--- + +# Custom Prompt Template Formats + +## Context and Problem Statement + +Semantic Kernel currently supports a custom prompt template language that allows for variable interpolation and function execution. +Semantic Kernel allows for custom prompt template formats to be integrated e.g., prompt templates using [Handlebars](https://handlebarsjs.com/) syntax. + +The purpose of this ADR is to describe how a custom prompt template formats will be supported in the Semantic Kernel. + +### Current Design + +By default the `Kernel` uses the `BasicPromptTemplateEngine` which supports the Semantic Kernel specific template format. + +#### Code Patterns + +Below is an expanded example of how to create a semantic function from a prompt template string which uses the built-in Semantic Kernel format: + +```csharp +IKernel kernel = Kernel.Builder + .WithPromptTemplateEngine(new BasicPromptTemplateEngine()) + .WithOpenAIChatCompletionService( + modelId: openAIModelId, + apiKey: openAIApiKey) + .Build(); + +kernel.ImportFunctions(new TimePlugin(), "time"); + +string templateString = "Today is: {{time.Date}} Is it weekend time (weekend/not weekend)?"; +var promptTemplateConfig = new PromptTemplateConfig(); +var promptTemplate = new PromptTemplate(templateString, promptTemplateConfig, kernel.PromptTemplateEngine); +var kindOfDay = kernel.RegisterSemanticFunction("KindOfDay", promptTemplateConfig, promptTemplate); + +var result = await kernel.RunAsync(kindOfDay); +Console.WriteLine(result.GetValue()); +``` + +We have an extension method `var kindOfDay = kernel.CreateSemanticFunction(promptTemplate);` to simplify the process to create and register a semantic function but the expanded format is shown above to highlight the dependency on `kernel.PromptTemplateEngine`. +Also the `BasicPromptTemplateEngine` is the default prompt template engine and will be loaded automatically if the package is available and not other prompt template engine is specified. + +Some issues with this: + +1. `Kernel` only supports a single `IPromptTemplateEngine` so we cannot support using multiple prompt templates at the same time. +1. `IPromptTemplateEngine` is stateless and must perform a parse of the template for each render +1. Our semantic function extension methods relay on our implementation of `IPromptTemplate` (i.e., `PromptTemplate`) which stores the template string and uses the `IPromptTemplateEngine` to render it every time. Note implementations of `IPromptTemplate` are currently stateful as they also store the parameters. + +#### Performance + +The `BasicPromptTemplateEngine` uses the `TemplateTokenizer` to parse the template i.e. extract the blocks. +Then it renders the template i.e. inserts variables and executes functions. Some sample timings for these operations: + +| Operation | Ticks | Milliseconds | +| ---------------- | ------- | ------------ | +| Extract blocks | 1044427 | 103 | +| Render variables | 168 | 0 | + +Sample template used was: `"{{variable1}} {{variable2}} {{variable3}} {{variable4}} {{variable5}}"` + +**Note: We will use the sample implementation to support the f-string template format.** + +Using `HandlebarsDotNet` for the same use case results in the following timings: + +| Operation | Ticks | Milliseconds | +| ---------------- | ----- | ------------ | +| Compile template | 66277 | 6 | +| Render variables | 4173 | 0 | + +**By separating the extract blocks/compile from the render variables operation it will be possible to optimise performance by compiling templates just once.** + +#### Implementing a Custom Prompt Template Engine + +There are two interfaces provided: + +```csharp +public interface IPromptTemplateEngine +{ + Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default); +} + +public interface IPromptTemplate +{ + IReadOnlyList Parameters { get; } + + public Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken = default); +} +``` + +A prototype implementation of a handlebars prompt template engine could look something like this: + +```csharp +public class HandlebarsTemplateEngine : IPromptTemplateEngine +{ + private readonly ILoggerFactory _loggerFactory; + + public HandlebarsTemplateEngine(ILoggerFactory? loggerFactory = null) + { + this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; + } + + public async Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default) + { + var handlebars = HandlebarsDotNet.Handlebars.Create(); + + var functionViews = context.Functions.GetFunctionViews(); + foreach (FunctionView functionView in functionViews) + { + var skfunction = context.Functions.GetFunction(functionView.PluginName, functionView.Name); + handlebars.RegisterHelper($"{functionView.PluginName}_{functionView.Name}", async (writer, hcontext, parameters) => + { + var result = await skfunction.InvokeAsync(context).ConfigureAwait(true); + writer.WriteSafeString(result.GetValue()); + }); + } + + var template = handlebars.Compile(templateText); + + var prompt = template(context.Variables); + + return await Task.FromResult(prompt).ConfigureAwait(true); + } +} +``` + +**Note: This is just a prototype implementation for illustration purposes only.** + +Some issues: + +1. The `IPromptTemplate` interface is not used and causes confusion. +1. There is no way to allow developers to support multiple prompt template formats at the same time. + +There is one implementation of `IPromptTemplate` provided in the Semantic Kernel core package. +The `RenderAsync` implementation just delegates to the `IPromptTemplateEngine`. +The `Parameters` list get's populated with the parameters defined in the `PromptTemplateConfig` and any missing variables defined in the template. + +#### Handlebars Considerations + +Handlebars does not support dynamic binding of helpers. Consider the following snippet: + +```csharp +HandlebarsHelper link_to = (writer, context, parameters) => +{ + writer.WriteSafeString($"{context["text"]}"); +}; + +string source = @"Click here: {{link_to}}"; + +var data = new +{ + url = "https://github.com/rexm/handlebars.net", + text = "Handlebars.Net" +}; + +// Act +var handlebars = HandlebarsDotNet.Handlebars.Create(); +handlebars.RegisterHelper("link_to", link_to); +var template = handlebars1.Compile(source); +// handlebars.RegisterHelper("link_to", link_to); This also works +var result = template1(data); +``` + +Handlebars allows the helpers to be registered with the `Handlebars` instance either before or after a template is compiled. +The optimum would be to have a shared `Handlebars` instance for a specific collection of functions and register the helpers just once. +For use cases where the Kernel function collection may have been mutated we will be forced to create a `Handlebars` instance at render time +and then register the helpers. This means we cannot take advantage of the performance improvement provided by compiling the template. + +## Decision Drivers + +In no particular order: + +- Support creating a semantic function without a `IKernel`instance. +- Support late binding of functions i.e., having functions resolved when the prompt is rendered. +- Support allowing the prompt template to be parsed (compiled) just once to optimize performance if needed. +- Support using multiple prompt template formats with a single `Kernel` instance. +- Provide simple abstractions which allow third parties to implement support for custom prompt template formats. + +## Considered Options + +- Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory`. +- + +### Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory` + +ISKFunction class relationships + +Below is an expanded example of how to create a semantic function from a prompt template string which uses the built-in Semantic Kernel format: + +```csharp +// Semantic function can be created once +var promptTemplateFactory = new BasicPromptTemplateFactory(); +string templateString = "Today is: {{time.Date}} Is it weekend time (weekend/not weekend)?"; +var promptTemplateConfig = new PromptTemplateConfig(); +// Line below will replace the commented out code +var promptTemplate = promptTemplateFactory.CreatePromptTemplate(templateString, promptTemplateConfig); +var kindOfDay = ISKFunction.CreateSemanticFunction("KindOfDay", promptTemplateConfig, promptTemplate) +// var promptTemplate = new PromptTemplate(promptTemplate, promptTemplateConfig, kernel.PromptTemplateEngine); +// var kindOfDay = kernel.RegisterSemanticFunction("KindOfDay", promptTemplateConfig, promptTemplate); + +// Create Kernel after creating the semantic function +// Later we will support passing a function collection to the KernelBuilder +IKernel kernel = Kernel.Builder + .WithOpenAIChatCompletionService( + modelId: openAIModelId, + apiKey: openAIApiKey) + .Build(); + +kernel.ImportFunctions(new TimePlugin(), "time"); +// Optionally register the semantic function with the Kernel +kernel.RegisterCustomFunction(kindOfDay); + +var result = await kernel.RunAsync(kindOfDay); +Console.WriteLine(result.GetValue()); +``` + +**Notes:** + +- `BasicPromptTemplateFactory` will be the default implementation and will be automatically provided in `KernelSemanticFunctionExtensions`. Developers will also be able to provide their own implementation. +- The factory uses the new `PromptTemplateConfig.TemplateFormat` to create the appropriate `IPromptTemplate` instance. +- We should look to remove `promptTemplateConfig` as a parameter to `CreateSemanticFunction`. That change is outside of the scope of this ADR. + +The `BasicPromptTemplateFactory` and `BasicPromptTemplate` implementations look as follows: + +```csharp +public sealed class BasicPromptTemplateFactory : IPromptTemplateFactory +{ + private readonly IPromptTemplateFactory _promptTemplateFactory; + private readonly ILoggerFactory _loggerFactory; + + public BasicPromptTemplateFactory(IPromptTemplateFactory promptTemplateFactory, ILoggerFactory? loggerFactory = null) + { + this._promptTemplateFactory = promptTemplateFactory; + this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; + } + + public IPromptTemplate? CreatePromptTemplate(string templateString, PromptTemplateConfig promptTemplateConfig) + { + if (promptTemplateConfig.TemplateFormat.Equals(PromptTemplateConfig.SEMANTICKERNEL, System.StringComparison.Ordinal)) + { + return new BasicPromptTemplate(templateString, promptTemplateConfig, this._loggerFactory); + } + else if (this._promptTemplateFactory is not null) + { + return this._promptTemplateFactory.CreatePromptTemplate(templateString, promptTemplateConfig); + } + + throw new SKException($"Invalid prompt template format {promptTemplateConfig.TemplateFormat}"); + } +} + +public sealed class BasicPromptTemplate : IPromptTemplate +{ + public BasicPromptTemplate(string templateString, PromptTemplateConfig promptTemplateConfig, ILoggerFactory? loggerFactory = null) + { + this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; + this._logger = this._loggerFactory.CreateLogger(typeof(BasicPromptTemplate)); + this._templateString = templateString; + this._promptTemplateConfig = promptTemplateConfig; + this._parameters = new(() => this.InitParameters()); + this._blocks = new(() => this.ExtractBlocks(this._templateString)); + this._tokenizer = new TemplateTokenizer(this._loggerFactory); + } + + public IReadOnlyList Parameters => this._parameters.Value; + + public async Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken = default) + { + return await this.RenderAsync(this._blocks.Value, executionContext, cancellationToken).ConfigureAwait(false); + } + + // Not showing the implementation details +} +``` + +**Note:** + +- The call to `ExtractBlocks` is called lazily once for each prompt template +- The `RenderAsync` doesn't need to extract the blocks every time + +## Decision Outcome + +Chosen option: "Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory`", because +addresses the requirements and provides good flexibility for the future. diff --git a/docs/decisions/0016-semantic-function-multiple-model-support.md b/docs/decisions/0016-semantic-function-multiple-model-support.md deleted file mode 100644 index bb2a3a72ae33..000000000000 --- a/docs/decisions/0016-semantic-function-multiple-model-support.md +++ /dev/null @@ -1,173 +0,0 @@ ---- -# These are optional elements. Feel free to remove any of them. -status: approved -contact: markwallace-microsoft -date: 2023-10-26 -deciders: markwallace-microsoft, semenshi, rogerbarreto -consulted: mabolan, dmytrostruk -informed: ---- -# Multiple Model Support for Semantic Functions - -## Context and Problem Statement - -Developers need to be able to use multiple models simultaneously e.g., using GPT4 for certain prompts and GPT3.5 for others to reduce cost. - -## Use Cases - -In scope for Semantic Kernel V1.0 is the ability to select AI Service and Model Request Settings: - -1. By service id. - * A Service id uniquely identifies a registered AI Service and is typically defined in the scope of an application. -1. By developer defined strategy. - * A _developer defined strategy_ is a code first approach where a developer provides the logic. -1. By model id. - * A model id uniquely identifies a Large Language Model. Multiple AI service providers can support the same LLM. -1. By arbitrary AI service attributes - * E.g. an AI service can define a provider id which uniquely identifies an AI provider e.g. "Azure OpenAI", "OpenAI", "Hugging Face" - -**This ADR focuses on items 1 & 2 in the above list. To implement 3 & 4 we need to provide the ability to store `AIService` metadata.** - -## Decision Outcome - -Support use cases 1 & 2 listed in this ADR and create separate ADR to add support for AI service metadata. - -## Descriptions of the Use Cases - -**Note: All code is pseudo code and does not accurately reflect what the final implementations will look like.** - -### Select Model Request Settings by Service Id - -_As a developer using the Semantic Kernel I can configure multiple request settings for a semantic function and associate each one with a service id so that the correct request settings are used when different services are used to execute my semantic function._ - -The semantic function template configuration allows multiple model request settings to be configured. In this case the developer configures different settings based on the service id that is used to execute the semantic function. -In the example below the semantic function is executed with "AzureText" using `max_tokens=60` because "AzureText" is the first service id in the list of models configured for the prompt. - -```csharp -// Configure a Kernel with multiple LLM's -IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureTextCompletionService(deploymentName: aoai.DeploymentName, - endpoint: aoai.Endpoint, serviceId: "AzureText", apiKey: aoai.ApiKey) - .WithAzureChatCompletionService(deploymentName: aoai.ChatDeploymentName, - endpoint: aoai.Endpoint, serviceId: "AzureChat", apiKey: aoai.ApiKey) - .WithOpenAITextCompletionService(modelId: oai.ModelId, - serviceId: "OpenAIText", apiKey: oai.ApiKey, setAsDefault: true) - .WithOpenAIChatCompletionService(modelId: oai.ChatModelId, - serviceId: "OpenAIChat", apiKey: oai.ApiKey, setAsDefault: true) - .Build(); - -// Configure semantic function with multiple LLM request settings -var modelSettings = new List -{ - new OpenAIRequestSettings() { ServiceId = "AzureText", MaxTokens = 60 }, - new OpenAIRequestSettings() { ServiceId = "AzureChat", MaxTokens = 120 }, - new OpenAIRequestSettings() { ServiceId = "OpenAIText", MaxTokens = 180 }, - new OpenAIRequestSettings() { ServiceId = "OpenAIChat", MaxTokens = 240 } -}; -var prompt = "Hello AI, what can you do for me?"; -var promptTemplateConfig = new PromptTemplateConfig() { ModelSettings = modelSettings }; -var func = kernel.CreateSemanticFunction(prompt, config: promptTemplateConfig, "HelloAI"); - -// Semantic function is executed with AzureText using max_tokens=60 -result = await kernel.RunAsync(func); -``` - -This works by using the `IAIServiceSelector` interface as the strategy for selecting the AI service and request settings to user when invoking a semantic function. -The interface is defined as follows: - -```csharp -public interface IAIServiceSelector -{ - (T?, AIRequestSettings?) SelectAIService( - string renderedPrompt, - IAIServiceProvider serviceProvider, - IReadOnlyList? modelSettings) where T : IAIService; -} -``` - -A default `OrderedIAIServiceSelector` implementation is provided which selects the AI service based on the order of the model request settings defined for the semantic function. - -* The implementation checks if a service exists which the corresponding service id and if it does it and the associated model request settings will be used. -* In no model request settings are defined then the default text completion service is used. -* A default set of request settings can be specified by leaving the service id undefined or empty, the first such default will be used. -* If no default if specified and none of the specified services are available the operation will fail. - -### Select AI Service and Model Request Settings By Developer Defined Strategy - -_As a developer using the Semantic Kernel I can provide an implementation which selects the AI service and request settings used to execute my function so that I can dynamically control which AI service and settings are used to execute my semantic function._ - -In this case the developer configures different settings based on the service id and provides an AI Service Selector which determines which AI Service will be used when the semantic function is executed. -In the example below the semantic function is executed with whatever AI Service and AI Request Settings `MyAIServiceSelector` returns e.g. it will be possible to create an AI Service Selector that computes the token count of the rendered prompt and uses that to determine which service to use. - -```csharp -// Configure a Kernel with multiple LLM's -IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureTextCompletionService(deploymentName: aoai.DeploymentName, - endpoint: aoai.Endpoint, serviceId: "AzureText", apiKey: aoai.ApiKey) - .WithAzureChatCompletionService(deploymentName: aoai.ChatDeploymentName, - endpoint: aoai.Endpoint, serviceId: "AzureChat", apiKey: aoai.ApiKey) - .WithOpenAITextCompletionService(modelId: oai.ModelId, - serviceId: "OpenAIText", apiKey: oai.ApiKey, setAsDefault: true) - .WithOpenAIChatCompletionService(modelId: oai.ChatModelId, - serviceId: "OpenAIChat", apiKey: oai.ApiKey, setAsDefault: true) - .WithAIServiceSelector(new MyAIServiceSelector()) - .Build(); - -// Configure semantic function with multiple LLM request settings -var modelSettings = new List -{ - new OpenAIRequestSettings() { ServiceId = "AzureText", MaxTokens = 60 }, - new OpenAIRequestSettings() { ServiceId = "AzureChat", MaxTokens = 120 }, - new OpenAIRequestSettings() { ServiceId = "OpenAIText", MaxTokens = 180 }, - new OpenAIRequestSettings() { ServiceId = "OpenAIChat", MaxTokens = 240 } -}; -var prompt = "Hello AI, what can you do for me?"; -var promptTemplateConfig = new PromptTemplateConfig() { ModelSettings = modelSettings }; -var func = kernel.CreateSemanticFunction(prompt, config: promptTemplateConfig, "HelloAI"); - -// Semantic function is executed with AI Service and AI request Settings dynamically determined -result = await kernel.RunAsync(func, funcVariables); -``` - -## More Information - -### Select AI Service by Service Id - -The following use case is supported. Developers can create a `Kernel`` instance with multiple named AI services. When invoking a semantic function the service id (and optionally request settings to be used) can be specified. The named AI service will be used to execute the prompt. - -```csharp -var aoai = TestConfiguration.AzureOpenAI; -var oai = TestConfiguration.OpenAI; - -// Configure a Kernel with multiple LLM's -IKernel kernel = Kernel.Builder - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureTextCompletionService(deploymentName: aoai.DeploymentName, - endpoint: aoai.Endpoint, serviceId: "AzureText", apiKey: aoai.ApiKey) - .WithAzureChatCompletionService(deploymentName: aoai.ChatDeploymentName, - endpoint: aoai.Endpoint, serviceId: "AzureChat", apiKey: aoai.ApiKey) - .WithOpenAITextCompletionService(modelId: oai.ModelId, - serviceId: "OpenAIText", apiKey: oai.ApiKey) - .WithOpenAIChatCompletionService(modelId: oai.ChatModelId, - serviceId: "OpenAIChat", apiKey: oai.ApiKey) - .Build(); - -// Invoke the semantic function and service and request settings to use -result = await kernel.InvokeSemanticFunctionAsync(prompt, - requestSettings: new OpenAIRequestSettings() - { ServiceId = "AzureText", MaxTokens = 60 }); - -result = await kernel.InvokeSemanticFunctionAsync(prompt, - requestSettings: new OpenAIRequestSettings() - { ServiceId = "AzureChat", MaxTokens = 120 }); - -result = await kernel.InvokeSemanticFunctionAsync(prompt, - requestSettings: new OpenAIRequestSettings() - { ServiceId = "OpenAIText", MaxTokens = 180 }); - -result = await kernel.InvokeSemanticFunctionAsync(prompt, - requestSettings: new OpenAIRequestSettings() - { ServiceId = "OpenAIChat", MaxTokens = 240 }); -``` diff --git a/docs/decisions/0017-custom-prompt-template-formats.md b/docs/decisions/0017-custom-prompt-template-formats.md deleted file mode 100644 index 755d19a75472..000000000000 --- a/docs/decisions/0017-custom-prompt-template-formats.md +++ /dev/null @@ -1,284 +0,0 @@ ---- -status: approved -contact: markwallace-microsoft -date: 2023-10-26 -deciders: mabolan, markwallace-microsoft, semenshi, rbarreto -consulted: dmytrostruk -informed: ---- -# Custom Prompt Template Formats - -## Context and Problem Statement - -Semantic Kernel currently supports a custom prompt template language that allows for variable interpolation and function execution. -Semantic Kernel allows for custom prompt template formats to be integrated e.g., prompt templates using [Handlebars](https://handlebarsjs.com/) syntax. - -The purpose of this ADR is to describe how a custom prompt template formats will be supported in the Semantic Kernel. - -### Current Design - -By default the `Kernel` uses the `BasicPromptTemplateEngine` which supports the Semantic Kernel specific template format. - -#### Code Patterns - -Below is an expanded example of how to create a semantic function from a prompt template string which uses the built-in Semantic Kernel format: - -```csharp -IKernel kernel = Kernel.Builder - .WithPromptTemplateEngine(new BasicPromptTemplateEngine()) - .WithOpenAIChatCompletionService( - modelId: openAIModelId, - apiKey: openAIApiKey) - .Build(); - -kernel.ImportFunctions(new TimePlugin(), "time"); - -string templateString = "Today is: {{time.Date}} Is it weekend time (weekend/not weekend)?"; -var promptTemplateConfig = new PromptTemplateConfig(); -var promptTemplate = new PromptTemplate(templateString, promptTemplateConfig, kernel.PromptTemplateEngine); -var kindOfDay = kernel.RegisterSemanticFunction("KindOfDay", promptTemplateConfig, promptTemplate); - -var result = await kernel.RunAsync(kindOfDay); -Console.WriteLine(result.GetValue()); -``` - -We have an extension method `var kindOfDay = kernel.CreateSemanticFunction(promptTemplate);` to simplify the process to create and register a semantic function but the expanded format is shown above to highlight the dependency on `kernel.PromptTemplateEngine`. -Also the `BasicPromptTemplateEngine` is the default prompt template engine and will be loaded automatically if the package is available and no other prompt template engine is specified. - -Some issues with this: - -1. You need to have a `Kernel` instance to create a semantic function, which is contrary to one of the goals of allow semantic functions to be created once and reused across multiple `Kernel` instances. -1. `Kernel` only supports a single `IPromptTemplateEngine` so we cannot support using multiple prompt templates at the same time. -1. `IPromptTemplateEngine` is stateless and must perform a parse of the template for each render -1. Our semantic function extension methods rely on our implementation of `IPromptTemplate` (i.e., `PromptTemplate`) which stores the template string and uses the `IPromptTemplateEngine` to render it every time. Note implementations of `IPromptTemplate` are currently stateful as they also store the parameters. - -#### Performance - -The `BasicPromptTemplateEngine` uses the `TemplateTokenizer` to parse the template i.e. extract the blocks. -Then it renders the template i.e. inserts variables and executes functions. Some sample timings for these operations: - -| Operation | Ticks | Milliseconds | -|------------------|---------|--------------| -| Extract blocks | 1044427 | 103 | -| Render variables | 168 | 0 | - -Sample template used was: `"{{variable1}} {{variable2}} {{variable3}} {{variable4}} {{variable5}}"` - -**Note: We will use the sample implementation to support the f-string template format.** - -Using `HandlebarsDotNet` for the same use case results in the following timings: - -| Operation | Ticks | Milliseconds | -|------------------|---------|--------------| -| Compile template | 66277 | 6 | -| Render variables | 4173 | 0 | - -**By separating the extract blocks/compile from the render variables operation it will be possible to optimise performance by compiling templates just once.** - -#### Implementing a Custom Prompt Template Engine - -There are two interfaces provided: - -```csharp -public interface IPromptTemplateEngine -{ - Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default); -} - -public interface IPromptTemplate -{ - IReadOnlyList Parameters { get; } - - public Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken = default); -} -``` - -A prototype implementation of a handlebars prompt template engine could look something like this: - -```csharp -public class HandlebarsTemplateEngine : IPromptTemplateEngine -{ - private readonly ILoggerFactory _loggerFactory; - - public HandlebarsTemplateEngine(ILoggerFactory? loggerFactory = null) - { - this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; - } - - public async Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default) - { - var handlebars = HandlebarsDotNet.Handlebars.Create(); - - var functionViews = context.Functions.GetFunctionViews(); - foreach (FunctionView functionView in functionViews) - { - var skfunction = context.Functions.GetFunction(functionView.PluginName, functionView.Name); - handlebars.RegisterHelper($"{functionView.PluginName}_{functionView.Name}", async (writer, hcontext, parameters) => - { - var result = await skfunction.InvokeAsync(context).ConfigureAwait(true); - writer.WriteSafeString(result.GetValue()); - }); - } - - var template = handlebars.Compile(templateText); - - var prompt = template(context.Variables); - - return await Task.FromResult(prompt).ConfigureAwait(true); - } -} -``` - -**Note: This is just a prototype implementation for illustration purposes only.** - -Some issues: - -1. The `IPromptTemplate` interface is not used and causes confusion. -1. There is no way to allow developers to support multiple prompt template formats at the same time. - -There is one implementation of `IPromptTemplate` provided in the Semantic Kernel core package. -The `RenderAsync` implementation just delegates to the `IPromptTemplateEngine`. -The `Parameters` list get's populated with the parameters defined in the `PromptTemplateConfig` and any missing variables defined in the template. - -#### Handlebars Considerations - -Handlebars does not support dynamic binding of helpers. Consider the following snippet: - -```csharp -HandlebarsHelper link_to = (writer, context, parameters) => -{ - writer.WriteSafeString($"{context["text"]}"); -}; - -string source = @"Click here: {{link_to}}"; - -var data = new -{ - url = "https://github.com/rexm/handlebars.net", - text = "Handlebars.Net" -}; - -// Act -var handlebars = HandlebarsDotNet.Handlebars.Create(); -handlebars.RegisterHelper("link_to", link_to); -var template = handlebars1.Compile(source); -// handlebars.RegisterHelper("link_to", link_to); This also works -var result = template1(data); -``` - -Handlebars allows the helpers to be registered with the `Handlebars` instance either before or after a template is compiled. -The optimum would be to have a shared `Handlebars` instance for a specific collection of functions and register the helpers just once. -For use cases where the Kernel function collection may have been mutated we will be forced to create a `Handlebars` instance at render time -and then register the helpers. This means we cannot take advantage of the performance improvement provided by compiling the template. - -## Decision Drivers - -In no particular order: - -* Support creating a semantic function without a `IKernel`instance. -* Support late binding of functions i.e., having functions resolved when the prompt is rendered. -* Support allowing the prompt template to be parsed (compiled) just once to optimize performance if needed. -* Support using multiple prompt template formats with a single `Kernel` instance. -* Provide simple abstractions which allow third parties to implement support for custom prompt template formats. - -## Considered Options - -* Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory`. - -### Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory` - -ISKFunction class relationships - -Below is an expanded example of how to create a semantic function from a prompt template string which uses the built-in Semantic Kernel format: - -```csharp -// Semantic function can be created once -var promptTemplateFactory = new BasicPromptTemplateFactory(); -string templateString = "Today is: {{time.Date}} Is it weekend time (weekend/not weekend)?"; -var promptTemplate = promptTemplateFactory.CreatePromptTemplate(templateString, new PromptTemplateConfig()); -var kindOfDay = ISKFunction.CreateSemanticFunction("KindOfDay", promptTemplateConfig, promptTemplate) - -// Create Kernel after creating the semantic function -// Later we will support passing a function collection to the KernelBuilder -IKernel kernel = Kernel.Builder - .WithOpenAIChatCompletionService( - modelId: openAIModelId, - apiKey: openAIApiKey) - .Build(); - -kernel.ImportFunctions(new TimePlugin(), "time"); -// Optionally register the semantic function with the Kernel -// kernel.RegisterCustomFunction(kindOfDay); - -var result = await kernel.RunAsync(kindOfDay); -Console.WriteLine(result.GetValue()); -``` - -**Notes:** - -* `BasicPromptTemplateFactory` will be the default implementation and will be automatically provided in `KernelSemanticFunctionExtensions`. Developers will also be able to provide their own implementation. -* The factory uses the new `PromptTemplateConfig.TemplateFormat` to create the appropriate `IPromptTemplate` instance. -* We should look to remove `promptTemplateConfig` as a parameter to `CreateSemanticFunction`. That change is outside of the scope of this ADR. - -The `BasicPromptTemplateFactory` and `BasicPromptTemplate` implementations look as follows: - -```csharp -public sealed class BasicPromptTemplateFactory : IPromptTemplateFactory -{ - private readonly IPromptTemplateFactory _promptTemplateFactory; - private readonly ILoggerFactory _loggerFactory; - - public BasicPromptTemplateFactory(IPromptTemplateFactory promptTemplateFactory, ILoggerFactory? loggerFactory = null) - { - this._promptTemplateFactory = promptTemplateFactory; - this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; - } - - public IPromptTemplate? CreatePromptTemplate(string templateString, PromptTemplateConfig promptTemplateConfig) - { - if (promptTemplateConfig.TemplateFormat.Equals(PromptTemplateConfig.SEMANTICKERNEL, System.StringComparison.Ordinal)) - { - return new BasicPromptTemplate(templateString, promptTemplateConfig, this._loggerFactory); - } - else if (this._promptTemplateFactory is not null) - { - return this._promptTemplateFactory.CreatePromptTemplate(templateString, promptTemplateConfig); - } - - throw new SKException($"Invalid prompt template format {promptTemplateConfig.TemplateFormat}"); - } -} - -public sealed class BasicPromptTemplate : IPromptTemplate -{ - public BasicPromptTemplate(string templateString, PromptTemplateConfig promptTemplateConfig, ILoggerFactory? loggerFactory = null) - { - this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; - this._logger = this._loggerFactory.CreateLogger(typeof(BasicPromptTemplate)); - this._templateString = templateString; - this._promptTemplateConfig = promptTemplateConfig; - this._parameters = new(() => this.InitParameters()); - this._blocks = new(() => this.ExtractBlocks(this._templateString)); - this._tokenizer = new TemplateTokenizer(this._loggerFactory); - } - - public IReadOnlyList Parameters => this._parameters.Value; - - public async Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken = default) - { - return await this.RenderAsync(this._blocks.Value, executionContext, cancellationToken).ConfigureAwait(false); - } - - // Not showing the implementation details -} -``` - -**Note:** - -* The call to `ExtractBlocks` is called lazily once for each prompt template -* The `RenderAsync` doesn't need to extract the blocks every time - -## Decision Outcome - -Chosen option: "Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory`", because -addresses the requirements and provides good flexibility for the future. diff --git a/docs/decisions/0017-openai-function-calling.md b/docs/decisions/0017-openai-function-calling.md new file mode 100644 index 000000000000..f0035ce458c1 --- /dev/null +++ b/docs/decisions/0017-openai-function-calling.md @@ -0,0 +1,72 @@ +--- +status: accepted +contact: gitri-ms +date: 2023-09-21 +deciders: gitri-ms, shawncal +consulted: lemillermicrosoft, awharrison-28, dmytrostruk, nacharya1 +informed: eavanvalkenburg, kevdome3000 +--- + +# OpenAI Function Calling Support + +## Context and Problem Statement + +The [function calling](https://platform.openai.com/docs/guides/gpt/function-calling) capability of OpenAI's Chat Completions API allows developers to describe functions to the model, and have the model decide whether to output a JSON object specifying a function and appropriate arguments to call in response to the given prompt. This capability is enabled by two new API parameters to the `/v1/chat/completions` endpoint: + +- `function_call` - auto (default), none, or a specific function to call +- `functions` - JSON descriptions of the functions available to the model + +Functions provided to the model are injected as part of the system message and are billed/counted as input tokens. + +We have received several community requests to provide support for this capability when using SK with the OpenAI chat completion models that support it. + +## Decision Drivers + +- Minimize changes to the core kernel for OpenAI-specific functionality +- Cost concerns with including a long list of function descriptions in the request +- Security and cost concerns with automatically executing functions returned by the model + +## Considered Options + +- Support sending/receiving functions via chat completions endpoint _with_ modifications to interfaces +- Support sending/receiving functions via chat completions endpoint _without_ modifications to interfaces +- Implement a planner around the function calling capability + +## Decision Outcome + +Chosen option: "Support sending/receiving functions via chat completions endpoint _without_ modifications to interfaces" + +With this option, we utilize the existing request settings object to send functions to the model. The app developer controls what functions are included and is responsible for validating and executing the function result. + +### Consequences + +- Good, because avoids breaking changes to the core kernel +- Good, because OpenAI-specific functionality is contained to the OpenAI connector package +- Good, because allows app to control what functions are available to the model (including non-SK functions) +- Good, because keeps the option open for integrating with planners in the future +- Neutral, because requires app developer to validate and execute resulting function +- Bad, because not as obvious how to use this capability and access the function results + +## Pros and Cons of the Options + +### Support sending/receiving functions _with_ modifications to chat completions interfaces + +This option would update the `IChatCompletion` and `IChatResult` interfaces to expose parameters/methods for providing and accessing function information. + +- Good, because provides a clear path for using the function calling capability +- Good, because allows app to control what functions are available to the model (including non-SK functions) +- Neutral, because requires app developer to validate and execute resulting function +- Bad, because introduces breaking changes to core kernel abstractions +- Bad, because OpenAI-specific functionality would be included in core kernel abstractions and would need to be ignored by other model providers + +### Implement a planner around the function calling capability + +Orchestrating external function calls fits within SK's concept of planning. With this approach, we would implement a planner that would take the function calling result and produce a plan that the app developer could execute (similar to SK's ActionPlanner). + +- Good, because producing a plan result makes it easy for the app developer to execute the chosen function +- Bad, because functions would need to be registered with the kernel in order to be executed +- Bad, because would create confusion about when to use which planner + +## Additional notes + +There has been much discussion and debate over the pros and cons of automatically invoking a function returned by the OpenAI model, if it is registered with the kernel. As there are still many open questions around this behavior and its implications, we have decided to not include this capability in the initial implementation. We will continue to explore this option and may include it in a future update. diff --git a/docs/decisions/0018-custom-prompt-template-formats.md b/docs/decisions/0018-custom-prompt-template-formats.md new file mode 100644 index 000000000000..5cd1f7f90cb4 --- /dev/null +++ b/docs/decisions/0018-custom-prompt-template-formats.md @@ -0,0 +1,285 @@ +--- +status: approved +contact: markwallace-microsoft +date: 2023-10-26 +deciders: matthewbolanos, markwallace-microsoft, SergeyMenshykh, RogerBarreto +consulted: dmytrostruk +informed: +--- + +# Custom Prompt Template Formats + +## Context and Problem Statement + +Semantic Kernel currently supports a custom prompt template language that allows for variable interpolation and function execution. +Semantic Kernel allows for custom prompt template formats to be integrated e.g., prompt templates using [Handlebars](https://handlebarsjs.com/) syntax. + +The purpose of this ADR is to describe how a custom prompt template formats will be supported in the Semantic Kernel. + +### Current Design + +By default the `Kernel` uses the `BasicPromptTemplateEngine` which supports the Semantic Kernel specific template format. + +#### Code Patterns + +Below is an expanded example of how to create a semantic function from a prompt template string which uses the built-in Semantic Kernel format: + +```csharp +IKernel kernel = Kernel.Builder + .WithPromptTemplateEngine(new BasicPromptTemplateEngine()) + .WithOpenAIChatCompletionService( + modelId: openAIModelId, + apiKey: openAIApiKey) + .Build(); + +kernel.ImportFunctions(new TimePlugin(), "time"); + +string templateString = "Today is: {{time.Date}} Is it weekend time (weekend/not weekend)?"; +var promptTemplateConfig = new PromptTemplateConfig(); +var promptTemplate = new PromptTemplate(templateString, promptTemplateConfig, kernel.PromptTemplateEngine); +var kindOfDay = kernel.RegisterSemanticFunction("KindOfDay", promptTemplateConfig, promptTemplate); + +var result = await kernel.RunAsync(kindOfDay); +Console.WriteLine(result.GetValue()); +``` + +We have an extension method `var kindOfDay = kernel.CreateSemanticFunction(promptTemplate);` to simplify the process to create and register a semantic function but the expanded format is shown above to highlight the dependency on `kernel.PromptTemplateEngine`. +Also the `BasicPromptTemplateEngine` is the default prompt template engine and will be loaded automatically if the package is available and no other prompt template engine is specified. + +Some issues with this: + +1. You need to have a `Kernel` instance to create a semantic function, which is contrary to one of the goals of allow semantic functions to be created once and reused across multiple `Kernel` instances. +1. `Kernel` only supports a single `IPromptTemplateEngine` so we cannot support using multiple prompt templates at the same time. +1. `IPromptTemplateEngine` is stateless and must perform a parse of the template for each render +1. Our semantic function extension methods rely on our implementation of `IPromptTemplate` (i.e., `PromptTemplate`) which stores the template string and uses the `IPromptTemplateEngine` to render it every time. Note implementations of `IPromptTemplate` are currently stateful as they also store the parameters. + +#### Performance + +The `BasicPromptTemplateEngine` uses the `TemplateTokenizer` to parse the template i.e. extract the blocks. +Then it renders the template i.e. inserts variables and executes functions. Some sample timings for these operations: + +| Operation | Ticks | Milliseconds | +| ---------------- | ------- | ------------ | +| Extract blocks | 1044427 | 103 | +| Render variables | 168 | 0 | + +Sample template used was: `"{{variable1}} {{variable2}} {{variable3}} {{variable4}} {{variable5}}"` + +**Note: We will use the sample implementation to support the f-string template format.** + +Using `HandlebarsDotNet` for the same use case results in the following timings: + +| Operation | Ticks | Milliseconds | +| ---------------- | ----- | ------------ | +| Compile template | 66277 | 6 | +| Render variables | 4173 | 0 | + +**By separating the extract blocks/compile from the render variables operation it will be possible to optimise performance by compiling templates just once.** + +#### Implementing a Custom Prompt Template Engine + +There are two interfaces provided: + +```csharp +public interface IPromptTemplateEngine +{ + Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default); +} + +public interface IPromptTemplate +{ + IReadOnlyList Parameters { get; } + + public Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken = default); +} +``` + +A prototype implementation of a handlebars prompt template engine could look something like this: + +```csharp +public class HandlebarsTemplateEngine : IPromptTemplateEngine +{ + private readonly ILoggerFactory _loggerFactory; + + public HandlebarsTemplateEngine(ILoggerFactory? loggerFactory = null) + { + this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; + } + + public async Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default) + { + var handlebars = HandlebarsDotNet.Handlebars.Create(); + + var functionViews = context.Functions.GetFunctionViews(); + foreach (FunctionView functionView in functionViews) + { + var skfunction = context.Functions.GetFunction(functionView.PluginName, functionView.Name); + handlebars.RegisterHelper($"{functionView.PluginName}_{functionView.Name}", async (writer, hcontext, parameters) => + { + var result = await skfunction.InvokeAsync(context).ConfigureAwait(true); + writer.WriteSafeString(result.GetValue()); + }); + } + + var template = handlebars.Compile(templateText); + + var prompt = template(context.Variables); + + return await Task.FromResult(prompt).ConfigureAwait(true); + } +} +``` + +**Note: This is just a prototype implementation for illustration purposes only.** + +Some issues: + +1. The `IPromptTemplate` interface is not used and causes confusion. +1. There is no way to allow developers to support multiple prompt template formats at the same time. + +There is one implementation of `IPromptTemplate` provided in the Semantic Kernel core package. +The `RenderAsync` implementation just delegates to the `IPromptTemplateEngine`. +The `Parameters` list get's populated with the parameters defined in the `PromptTemplateConfig` and any missing variables defined in the template. + +#### Handlebars Considerations + +Handlebars does not support dynamic binding of helpers. Consider the following snippet: + +```csharp +HandlebarsHelper link_to = (writer, context, parameters) => +{ + writer.WriteSafeString($"{context["text"]}"); +}; + +string source = @"Click here: {{link_to}}"; + +var data = new +{ + url = "https://github.com/rexm/handlebars.net", + text = "Handlebars.Net" +}; + +// Act +var handlebars = HandlebarsDotNet.Handlebars.Create(); +handlebars.RegisterHelper("link_to", link_to); +var template = handlebars1.Compile(source); +// handlebars.RegisterHelper("link_to", link_to); This also works +var result = template1(data); +``` + +Handlebars allows the helpers to be registered with the `Handlebars` instance either before or after a template is compiled. +The optimum would be to have a shared `Handlebars` instance for a specific collection of functions and register the helpers just once. +For use cases where the Kernel function collection may have been mutated we will be forced to create a `Handlebars` instance at render time +and then register the helpers. This means we cannot take advantage of the performance improvement provided by compiling the template. + +## Decision Drivers + +In no particular order: + +- Support creating a semantic function without a `IKernel`instance. +- Support late binding of functions i.e., having functions resolved when the prompt is rendered. +- Support allowing the prompt template to be parsed (compiled) just once to optimize performance if needed. +- Support using multiple prompt template formats with a single `Kernel` instance. +- Provide simple abstractions which allow third parties to implement support for custom prompt template formats. + +## Considered Options + +- Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory`. + +### Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory` + +ISKFunction class relationships + +Below is an expanded example of how to create a semantic function from a prompt template string which uses the built-in Semantic Kernel format: + +```csharp +// Semantic function can be created once +var promptTemplateFactory = new BasicPromptTemplateFactory(); +string templateString = "Today is: {{time.Date}} Is it weekend time (weekend/not weekend)?"; +var promptTemplate = promptTemplateFactory.CreatePromptTemplate(templateString, new PromptTemplateConfig()); +var kindOfDay = ISKFunction.CreateSemanticFunction("KindOfDay", promptTemplateConfig, promptTemplate) + +// Create Kernel after creating the semantic function +// Later we will support passing a function collection to the KernelBuilder +IKernel kernel = Kernel.Builder + .WithOpenAIChatCompletionService( + modelId: openAIModelId, + apiKey: openAIApiKey) + .Build(); + +kernel.ImportFunctions(new TimePlugin(), "time"); +// Optionally register the semantic function with the Kernel +// kernel.RegisterCustomFunction(kindOfDay); + +var result = await kernel.RunAsync(kindOfDay); +Console.WriteLine(result.GetValue()); +``` + +**Notes:** + +- `BasicPromptTemplateFactory` will be the default implementation and will be automatically provided in `KernelSemanticFunctionExtensions`. Developers will also be able to provide their own implementation. +- The factory uses the new `PromptTemplateConfig.TemplateFormat` to create the appropriate `IPromptTemplate` instance. +- We should look to remove `promptTemplateConfig` as a parameter to `CreateSemanticFunction`. That change is outside of the scope of this ADR. + +The `BasicPromptTemplateFactory` and `BasicPromptTemplate` implementations look as follows: + +```csharp +public sealed class BasicPromptTemplateFactory : IPromptTemplateFactory +{ + private readonly IPromptTemplateFactory _promptTemplateFactory; + private readonly ILoggerFactory _loggerFactory; + + public BasicPromptTemplateFactory(IPromptTemplateFactory promptTemplateFactory, ILoggerFactory? loggerFactory = null) + { + this._promptTemplateFactory = promptTemplateFactory; + this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; + } + + public IPromptTemplate? CreatePromptTemplate(string templateString, PromptTemplateConfig promptTemplateConfig) + { + if (promptTemplateConfig.TemplateFormat.Equals(PromptTemplateConfig.SEMANTICKERNEL, System.StringComparison.Ordinal)) + { + return new BasicPromptTemplate(templateString, promptTemplateConfig, this._loggerFactory); + } + else if (this._promptTemplateFactory is not null) + { + return this._promptTemplateFactory.CreatePromptTemplate(templateString, promptTemplateConfig); + } + + throw new SKException($"Invalid prompt template format {promptTemplateConfig.TemplateFormat}"); + } +} + +public sealed class BasicPromptTemplate : IPromptTemplate +{ + public BasicPromptTemplate(string templateString, PromptTemplateConfig promptTemplateConfig, ILoggerFactory? loggerFactory = null) + { + this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; + this._logger = this._loggerFactory.CreateLogger(typeof(BasicPromptTemplate)); + this._templateString = templateString; + this._promptTemplateConfig = promptTemplateConfig; + this._parameters = new(() => this.InitParameters()); + this._blocks = new(() => this.ExtractBlocks(this._templateString)); + this._tokenizer = new TemplateTokenizer(this._loggerFactory); + } + + public IReadOnlyList Parameters => this._parameters.Value; + + public async Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken = default) + { + return await this.RenderAsync(this._blocks.Value, executionContext, cancellationToken).ConfigureAwait(false); + } + + // Not showing the implementation details +} +``` + +**Note:** + +- The call to `ExtractBlocks` is called lazily once for each prompt template +- The `RenderAsync` doesn't need to extract the blocks every time + +## Decision Outcome + +Chosen option: "Obsolete `IPromptTemplateEngine` and replace with `IPromptTemplateFactory`", because +addresses the requirements and provides good flexibility for the future. diff --git a/docs/decisions/0018-kernel-hooks-phase2.md b/docs/decisions/0018-kernel-hooks-phase2.md new file mode 100644 index 000000000000..0b1de476b117 --- /dev/null +++ b/docs/decisions/0018-kernel-hooks-phase2.md @@ -0,0 +1,454 @@ +## Context and Problem Statement + +Currently Kernel invoking and invoked handlers don't expose the prompt to the handlers. + +The proposal is a way to expose the prompt to the handlers. + +- Pre-Execution / Invoking + + - Get: Prompt generated by the current `SemanticFunction.TemplateEngine` before calling the LLM + - Set: Modify a prompt content before sending it to LLM + +- Post-Execution / Invoked + + - Get: Generated Prompt + +## Decision Drivers + +- Prompt template should be generated just once per function execution within the Kernel.RunAsync execution. +- Handlers should be able to see and modify the prompt before the LLM execution. +- Handlers should be able to see prompt after the LLM execution. +- Calling Kernel.RunAsync(function) or ISKFunction.InvokeAsync(kernel) should trigger the events. + +## Out of Scope + +- Skip plan steps using Pre-Hooks. +- Get the used services (Template Engine, IAIServices, etc) in the Pre/Post Hooks. +- Get the request settings in the Pre/Post Hooks. + +## Current State of Kernel for Pre/Post Hooks + +Current state of Kernel: + +```csharp +class Kernel : IKernel + +RunAsync() +{ + var context = this.CreateNewContext(variables); + var functionDetails = skFunction.Describe(); + var functionInvokingArgs = this.OnFunctionInvoking(functionDetails, context); + + functionResult = await skFunction.InvokeAsync(context, cancellationToken: cancellationToken); + var functionInvokedArgs = this.OnFunctionInvoked(functionDetails, functionResult); +} +``` + +## Developer Experience + +Below is the expected end user experience when coding using Pre/Post Hooks to get or modify prompts. + +```csharp +const string FunctionPrompt = "Write a random paragraph about: {{$input}}."; + +var excuseFunction = kernel.CreateSemanticFunction(...); + +void MyPreHandler(object? sender, FunctionInvokingEventArgs e) +{ + Console.WriteLine($"{e.FunctionView.PluginName}.{e.FunctionView.Name} : Pre Execution Handler - Triggered"); + + // Will be false for non semantic functions + if (e.TryGetRenderedPrompt(out var prompt)) + { + Console.WriteLine("Rendered Prompt:"); + Console.WriteLine(prompt); + + // Update the prompt if needed + e.TryUpdateRenderedPrompt("Write a random paragraph about: Overriding a prompt"); + } +} + +void MyPostHandler(object? sender, FunctionInvokedEventArgs e) +{ + Console.WriteLine($"{e.FunctionView.PluginName}.{e.FunctionView.Name} : Post Execution Handler - Triggered"); + // Will be false for non semantic functions + if (e.TryGetRenderedPrompt(out var prompt)) + { + Console.WriteLine("Used Prompt:"); + Console.WriteLine(prompt); + } +} + +kernel.FunctionInvoking += MyPreHandler; +kernel.FunctionInvoked += MyPostHandler; + +const string Input = "I missed the F1 final race"; +var result = await kernel.RunAsync(Input, excuseFunction); +Console.WriteLine($"Function Result: {result.GetValue()}"); +``` + +Expected output: + +``` +MyPlugin.MyFunction : Pre Execution Handler - Triggered +Rendered Prompt: +Write a random paragraph about: I missed the F1 final race. + +MyPlugin.MyFunction : Post Execution Handler - Triggered +Used Prompt: +Write a random paragraph about: Overriding a prompt + +FunctionResult: +``` + +## Considered Options + +### Improvements Common to all options + +Move `Dictionary` property `Metadata` from `FunctionInvokedEventArgs` to `SKEventArgs` abstract class. + +Pro: + +- This will make all SKEventArgs extensible, allowing extra information to be passed to the EventArgs when `specialization` isn't possible. + +### Option 1: Kernel awareness of SemanticFunctions + +```csharp +class Kernel : IKernel + +RunAsync() +{ + + if (skFunction is SemanticFunction semanticFunction) + { + var prompt = await semanticFunction.TemplateEngine.RenderAsync(semanticFunction.Template, context); + var functionInvokingArgs = this.OnFunctionInvoking(functionDetails, context, prompt); + // InvokeWithPromptAsync internal + functionResult = await semanticFunction.InternalInvokeWithPromptAsync(prompt, context, cancellationToken: cancellationToken); + } + else + { + functionResult = await skFunction.InvokeAsync(context, cancellationToken: cancellationToken); + } +} +class SemanticFunction : ISKFunction + +public InvokeAsync(context, cancellationToken) +{ + var prompt = _templateEngine.RenderAsync(); + return InternalInvokeWithPromptAsync(prompt, context, cancellationToken); +} + +internal InternalInvokeWithPromptAsync(string prompt) +{ + ... current logic to call LLM +} +``` + +### Pros and Cons + +Pros: + +- Simpler and quicker to implement +- Small number of changes limited mostly to `Kernel` and `SemanticFunction` classes + +Cons: + +- `Kernel` is aware of `SemanticFunction` implementation details +- Not extensible to show prompts of custom `ISKFunctions` implementations + +### Option 2: Delegate to the ISKFunction how to handle events (Interfaces approach) + +```csharp +class Kernel : IKernel +{ + RunAsync() { + var functionInvokingArgs = await this.TriggerEvent(this.FunctionInvoking, skFunction, context); + + var functionResult = await skFunction.InvokeAsync(context, cancellationToken: cancellationToken); + + var functionInvokedArgs = await this.TriggerEvent( + this.FunctionInvoked, + skFunction, + context); + } + + private TEventArgs? TriggerEvent(EventHandler? eventHandler, ISKFunction function, SKContext context) where TEventArgs : SKEventArgs + { + if (eventHandler is null) + { + return null; + } + + if (function is ISKFunctionEventSupport supportedFunction) + { + var eventArgs = await supportedFunction.PrepareEventArgsAsync(context); + eventHandler.Invoke(this, eventArgs); + return eventArgs; + } + + // Think about allowing to add data with the extra interface. + + // If a function don't support the specific event we can: + return null; // Ignore or Throw. + throw new NotSupportedException($"The provided function \"{function.Name}\" does not supports and implements ISKFunctionHandles<{typeof(TEventArgs).Name}>"); + } +} + +public interface ISKFunctionEventSupport where TEventArgs : SKEventArgs +{ + Task PrepareEventArgsAsync(SKContext context, TEventArgs? eventArgs = null); +} + +class SemanticFunction : ISKFunction, + ISKFunctionEventSupport, + ISKFunctionEventSupport +{ + + public FunctionInvokingEventArgs PrepareEventArgsAsync(SKContext context, FunctionInvokingEventArgs? eventArgs = null) + { + var renderedPrompt = await this.RenderPromptTemplateAsync(context); + context.Variables.Set(SemanticFunction.RenderedPromptKey, renderedPrompt); + + return new SemanticFunctionInvokingEventArgs(this.Describe(), context); + // OR Metadata Dictionary + return new FunctionInvokingEventArgs(this.Describe(), context, new Dictionary() { { RenderedPrompt, renderedPrompt } }); + } + + public FunctionInvokedEventArgs PrepareEventArgsAsync(SKContext context, FunctionInvokedEventArgs? eventArgs = null) + { + return Task.FromResult(new SemanticFunctionInvokedEventArgs(this.Describe(), context)); + } +} + +public sealed class SemanticFunctionInvokedEventArgs : FunctionInvokedEventArgs +{ + public SemanticFunctionInvokedEventArgs(FunctionDescription functionDescription, SKContext context) + : base(functionDescription, context) + { + _context = context; + Metadata[RenderedPromptKey] = this._context.Variables[RenderedPromptKey]; + } + + public string? RenderedPrompt => this.Metadata[RenderedPromptKey]; + +} + +public sealed class SemanticFunctionInvokingEventArgs : FunctionInvokingEventArgs +{ + public SemanticFunctionInvokingEventArgs(FunctionDescription functionDescription, SKContext context) + : base(functionDescription, context) + { + _context = context; + } + public string? RenderedPrompt => this._context.Variables[RenderedPromptKey]; +} +``` + +### Pros and Cons + +Pros: + +- `Kernel` is not aware of `SemanticFunction` implementation details or any other `ISKFunction` implementation +- Extensible to show dedicated EventArgs per custom `ISKFunctions` implementation, including prompts for semantic functions +- Extensible to support future events on the Kernel thru the `ISKFunctionEventSupport` interface +- Functions can have their own EventArgs specialization. +- Interface is optional, so custom `ISKFunctions` can choose to implement it or not + +Cons: + +- Any custom functions now will have to responsibility implement the `ISKFunctionEventSupport` interface if they want to support events. +- Handling events in another `ISKFunction` requires more complex approaches to manage the context and the prompt + any other data in different event handling methods. + +### Option 3: Delegate to the ISKFunction how to handle events (InvokeAsync Delegates approach) + +Add Kernel event handler delegate wrappers to `ISKFunction.InvokeAsync` interface. +This approach shares the responsibility of handling the events between the `Kernel` and the `ISKFunction` implementation, flow control will be handled by the Kernel and the `ISKFunction` will be responsible for calling the delegate wrappers and adding data to the `SKEventArgs` that will be passed to the handlers. + +```csharp +class Kernel : IKernel +{ + RunAsync() { + var functionInvokingDelegateWrapper = new(this.FunctionInvoking); + var functionInvokedDelegateWrapper = new(this.FunctionInvoked); + + var functionResult = await skFunction.InvokeAsync(context, functionInvokingDelegateWrapper, functionInvokingDelegateWrapper, functionInvokedDelegateWrapper); + + // Kernel will analyze the delegate results and make flow related decisions + if (functionInvokingDelegateWrapper.EventArgs.CancelRequested ... ) { ... } + if (functionInvokingDelegateWrapper.EventArgs.SkipRequested ... ) { ... } + if (functionInvokedDelegateWrapper.EventArgs.Repeat ... ) { ... } + } +} + +class SemanticFunction : ISKFunction { + InvokeAsync( + SKContext context, + FunctionInvokingDelegateWrapper functionInvokingDelegateWrapper, + FunctionInvokedDelegateWrapper functionInvokedDelegateWrapper) + { + // The Semantic will have to call the delegate wrappers and share responsibility with the `Kernel`. + if (functionInvokingDelegateWrapper.Handler is not null) + { + var renderedPrompt = await this.RenderPromptTemplateAsync(context); + functionInvokingDelegateWrapper.EventArgs.RenderedPrompt = renderedPrompt; + + functionInvokingDelegateWrapper.Handler.Invoke(this, functionInvokingDelegateWrapper.EventArgs); + + if (functionInvokingDelegateWrapper.EventArgs?.CancelToken.IsCancellationRequested ?? false) + { + // Need to enforce an non processed result + return new SKFunctionResult(context); + + //OR make InvokeAsync allow returning null FunctionResult? + return null; + } + } + } +} + +// Wrapper for the EventHandler +class FunctionDelegateWrapper where TEventArgs : SKEventArgs +{ + FunctionInvokingDelegateWrapper(EventHandler eventHandler) {} + + // Set allows specialized eventargs to be set. + public TEventArgs EventArgs { get; set; } + public EventHandler Handler => _eventHandler; +} +``` + +### Pros and Cons + +Pros: + +- `ISKFunction` has less code/complexity to handle and expose data (Rendered Prompt) and state in the EventArgs. +- `Kernel` is not aware of `SemanticFunction` implementation details or any other `ISKFunction` implementation +- `Kernel` has less code/complexity +- Could be extensible to show dedicated EventArgs per custom `ISKFunctions` implementation, including prompts for semantic functions + +Cons: + +- Unable to add new events if needed (ISKFunction interface change needed) +- Functions need to implement behavior related to dependency (Kernel) events +- Since Kernel needs to interact with the result of an event handler, a wrapper strategy is needed to access results by reference at the kernel level (control of flow) +- Passing Kernel event handlers full responsibility downstream to the functions don't sound quite right (Single Responsibility) + +### Option 4: Delegate to the ISKFunction how to handle events (SKContext Delegates approach) + +Add Kernel event handler delegate wrappers to `ISKFunction.InvokeAsync` interface. +This approach shares the responsibility of handling the events between the `Kernel` and the `ISKFunction` implementation, flow control will be handled by the Kernel and the `ISKFunction` will be responsible for calling the delegate wrappers and adding data to the `SKEventArgs` that will be passed to the handlers. + +```csharp +class Kernel : IKernel +{ + CreateNewContext() { + var context = new SKContext(...); + context.AddEventHandlers(this.FunctionInvoking, this.FunctionInvoked); + return context; + } + RunAsync() { + functionResult = await skFunction.InvokeAsync(context, ...); + if (this.IsCancelRequested(functionResult.Context))) + break; + if (this.IsSkipRequested(functionResult.Context)) + continue; + if (this.IsRepeatRequested(...)) + goto repeat; + + ... + } +} + +class SKContext { + + internal EventHandlerWrapper? FunctionInvokingHandler { get; private set; } + internal EventHandlerWrapper? FunctionInvokedHandler { get; private set; } + + internal SKContext( + ... + ICollection? eventHandlerWrappers = null + { + ... + this.InitializeEventWrappers(eventHandlerWrappers); + } + + void InitializeEventWrappers(ICollection? eventHandlerWrappers) + { + if (eventHandlerWrappers is not null) + { + foreach (var handler in eventHandlerWrappers) + { + if (handler is EventHandlerWrapper invokingWrapper) + { + this.FunctionInvokingHandler = invokingWrapper; + continue; + } + + if (handler is EventHandlerWrapper invokedWrapper) + { + this.FunctionInvokedHandler = invokedWrapper; + } + } + } + } +} + +class SemanticFunction : ISKFunction { + InvokeAsync( + SKContext context + { + string renderedPrompt = await this._promptTemplate.RenderAsync(context, cancellationToken).ConfigureAwait(false); + + this.CallFunctionInvoking(context, renderedPrompt); + if (this.IsInvokingCancelOrSkipRequested(context, out var stopReason)) + { + return new StopFunctionResult(this.Name, this.PluginName, context, stopReason!.Value); + } + + string completion = await GetCompletionsResultContentAsync(...); + + var result = new FunctionResult(this.Name, this.PluginName, context, completion); + result.Metadata.Add(SemanticFunction.RenderedPromptMetadataKey, renderedPrompt); + + this.CallFunctionInvoked(result, context, renderedPrompt); + if (this.IsInvokedCancelRequested(context, out stopReason)) + { + return new StopFunctionResult(this.Name, this.PluginName, context, result.Value, stopReason!.Value); + } + + return result; + } +} +``` + +### Pros and Cons + +Pros: + +- `ISKFunction` has less code/complexity to handle and expose data (Rendered Prompt) and state in the EventArgs. +- `Kernel` is not aware of `SemanticFunction` implementation details or any other `ISKFunction` implementation +- `Kernel` has less code/complexity +- Could be extensible to show dedicated EventArgs per custom `ISKFunctions` implementation, including prompts for semantic functions +- More extensible as `ISKFunction` interface doesn't need to change to add new events. +- `SKContext` can be extended to add new events without introducing breaking changes. + +Cons: + +- Functions now need to implement logic to handle in-context events +- Since Kernel needs to interact with the result of an event handler, a wrapper strategy is needed to access results by reference at the kernel level (control of flow) +- Passing Kernel event handlers full responsibility downstream to the functions don't sound quite right (Single Responsibility) + +## Decision outcome + +### Option 4: Delegate to the ISKFunction how to handle events (SKContext Delegates approach) + +This allow the functions to implement some of the kernel logic but has the big benefit of not splitting logic in different methods for the same Execution Context. + +Biggest benefit: +**`ISKFunction` has less code/complexity to handle and expose data and state in the EventArgs.** +**`ISKFunction` interface doesn't need to change to add new events.** + +This implementation allows to get the renderedPrompt in the InvokeAsync without having to manage the context and the prompt in different methods. + +The above also applies for any other data that is available in the invocation and can be added as a new EventArgs property. diff --git a/docs/decisions/0019-semantic-function-multiple-model-support.md b/docs/decisions/0019-semantic-function-multiple-model-support.md new file mode 100644 index 000000000000..569bc6b293a5 --- /dev/null +++ b/docs/decisions/0019-semantic-function-multiple-model-support.md @@ -0,0 +1,174 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: approved +contact: markwallace-microsoft +date: 2023-10-26 +deciders: markwallace-microsoft, SergeyMenshykh, rogerbarreto +consulted: matthewbolanos, dmytrostruk +informed: +--- + +# Multiple Model Support for Semantic Functions + +## Context and Problem Statement + +Developers need to be able to use multiple models simultaneously e.g., using GPT4 for certain prompts and GPT3.5 for others to reduce cost. + +## Use Cases + +In scope for Semantic Kernel V1.0 is the ability to select AI Service and Model Request Settings: + +1. By service id. + - A Service id uniquely identifies a registered AI Service and is typically defined in the scope of an application. +1. By developer defined strategy. + - A _developer defined strategy_ is a code first approach where a developer provides the logic. +1. By model id. + - A model id uniquely identifies a Large Language Model. Multiple AI service providers can support the same LLM. +1. By arbitrary AI service attributes + - E.g. an AI service can define a provider id which uniquely identifies an AI provider e.g. "Azure OpenAI", "OpenAI", "Hugging Face" + +**This ADR focuses on items 1 & 2 in the above list. To implement 3 & 4 we need to provide the ability to store `AIService` metadata.** + +## Decision Outcome + +Support use cases 1 & 2 listed in this ADR and create separate ADR to add support for AI service metadata. + +## Descriptions of the Use Cases + +**Note: All code is pseudo code and does not accurately reflect what the final implementations will look like.** + +### Select Model Request Settings by Service Id + +_As a developer using the Semantic Kernel I can configure multiple request settings for a semantic function and associate each one with a service id so that the correct request settings are used when different services are used to execute my semantic function._ + +The semantic function template configuration allows multiple model request settings to be configured. In this case the developer configures different settings based on the service id that is used to execute the semantic function. +In the example below the semantic function is executed with "AzureText" using `max_tokens=60` because "AzureText" is the first service id in the list of models configured for the prompt. + +```csharp +// Configure a Kernel with multiple LLM's +IKernel kernel = new KernelBuilder() + .WithLoggerFactory(ConsoleLogger.LoggerFactory) + .WithAzureTextCompletionService(deploymentName: aoai.DeploymentName, + endpoint: aoai.Endpoint, serviceId: "AzureText", apiKey: aoai.ApiKey) + .WithAzureChatCompletionService(deploymentName: aoai.ChatDeploymentName, + endpoint: aoai.Endpoint, serviceId: "AzureChat", apiKey: aoai.ApiKey) + .WithOpenAITextCompletionService(modelId: oai.ModelId, + serviceId: "OpenAIText", apiKey: oai.ApiKey, setAsDefault: true) + .WithOpenAIChatCompletionService(modelId: oai.ChatModelId, + serviceId: "OpenAIChat", apiKey: oai.ApiKey, setAsDefault: true) + .Build(); + +// Configure semantic function with multiple LLM request settings +var modelSettings = new List +{ + new OpenAIRequestSettings() { ServiceId = "AzureText", MaxTokens = 60 }, + new OpenAIRequestSettings() { ServiceId = "AzureChat", MaxTokens = 120 }, + new OpenAIRequestSettings() { ServiceId = "OpenAIText", MaxTokens = 180 }, + new OpenAIRequestSettings() { ServiceId = "OpenAIChat", MaxTokens = 240 } +}; +var prompt = "Hello AI, what can you do for me?"; +var promptTemplateConfig = new PromptTemplateConfig() { ModelSettings = modelSettings }; +var func = kernel.CreateSemanticFunction(prompt, config: promptTemplateConfig, "HelloAI"); + +// Semantic function is executed with AzureText using max_tokens=60 +result = await kernel.RunAsync(func); +``` + +This works by using the `IAIServiceSelector` interface as the strategy for selecting the AI service and request settings to user when invoking a semantic function. +The interface is defined as follows: + +```csharp +public interface IAIServiceSelector +{ + (T?, AIRequestSettings?) SelectAIService( + string renderedPrompt, + IAIServiceProvider serviceProvider, + IReadOnlyList? modelSettings) where T : IAIService; +} +``` + +A default `OrderedIAIServiceSelector` implementation is provided which selects the AI service based on the order of the model request settings defined for the semantic function. + +- The implementation checks if a service exists which the corresponding service id and if it does it and the associated model request settings will be used. +- In no model request settings are defined then the default text completion service is used. +- A default set of request settings can be specified by leaving the service id undefined or empty, the first such default will be used. +- If no default if specified and none of the specified services are available the operation will fail. + +### Select AI Service and Model Request Settings By Developer Defined Strategy + +_As a developer using the Semantic Kernel I can provide an implementation which selects the AI service and request settings used to execute my function so that I can dynamically control which AI service and settings are used to execute my semantic function._ + +In this case the developer configures different settings based on the service id and provides an AI Service Selector which determines which AI Service will be used when the semantic function is executed. +In the example below the semantic function is executed with whatever AI Service and AI Request Settings `MyAIServiceSelector` returns e.g. it will be possible to create an AI Service Selector that computes the token count of the rendered prompt and uses that to determine which service to use. + +```csharp +// Configure a Kernel with multiple LLM's +IKernel kernel = new KernelBuilder() + .WithLoggerFactory(ConsoleLogger.LoggerFactory) + .WithAzureTextCompletionService(deploymentName: aoai.DeploymentName, + endpoint: aoai.Endpoint, serviceId: "AzureText", apiKey: aoai.ApiKey) + .WithAzureChatCompletionService(deploymentName: aoai.ChatDeploymentName, + endpoint: aoai.Endpoint, serviceId: "AzureChat", apiKey: aoai.ApiKey) + .WithOpenAITextCompletionService(modelId: oai.ModelId, + serviceId: "OpenAIText", apiKey: oai.ApiKey, setAsDefault: true) + .WithOpenAIChatCompletionService(modelId: oai.ChatModelId, + serviceId: "OpenAIChat", apiKey: oai.ApiKey, setAsDefault: true) + .WithAIServiceSelector(new MyAIServiceSelector()) + .Build(); + +// Configure semantic function with multiple LLM request settings +var modelSettings = new List +{ + new OpenAIRequestSettings() { ServiceId = "AzureText", MaxTokens = 60 }, + new OpenAIRequestSettings() { ServiceId = "AzureChat", MaxTokens = 120 }, + new OpenAIRequestSettings() { ServiceId = "OpenAIText", MaxTokens = 180 }, + new OpenAIRequestSettings() { ServiceId = "OpenAIChat", MaxTokens = 240 } +}; +var prompt = "Hello AI, what can you do for me?"; +var promptTemplateConfig = new PromptTemplateConfig() { ModelSettings = modelSettings }; +var func = kernel.CreateSemanticFunction(prompt, config: promptTemplateConfig, "HelloAI"); + +// Semantic function is executed with AI Service and AI request Settings dynamically determined +result = await kernel.RunAsync(func, funcVariables); +``` + +## More Information + +### Select AI Service by Service Id + +The following use case is supported. Developers can create a `Kernel`` instance with multiple named AI services. When invoking a semantic function the service id (and optionally request settings to be used) can be specified. The named AI service will be used to execute the prompt. + +```csharp +var aoai = TestConfiguration.AzureOpenAI; +var oai = TestConfiguration.OpenAI; + +// Configure a Kernel with multiple LLM's +IKernel kernel = Kernel.Builder + .WithLoggerFactory(ConsoleLogger.LoggerFactory) + .WithAzureTextCompletionService(deploymentName: aoai.DeploymentName, + endpoint: aoai.Endpoint, serviceId: "AzureText", apiKey: aoai.ApiKey) + .WithAzureChatCompletionService(deploymentName: aoai.ChatDeploymentName, + endpoint: aoai.Endpoint, serviceId: "AzureChat", apiKey: aoai.ApiKey) + .WithOpenAITextCompletionService(modelId: oai.ModelId, + serviceId: "OpenAIText", apiKey: oai.ApiKey) + .WithOpenAIChatCompletionService(modelId: oai.ChatModelId, + serviceId: "OpenAIChat", apiKey: oai.ApiKey) + .Build(); + +// Invoke the semantic function and service and request settings to use +result = await kernel.InvokeSemanticFunctionAsync(prompt, + requestSettings: new OpenAIRequestSettings() + { ServiceId = "AzureText", MaxTokens = 60 }); + +result = await kernel.InvokeSemanticFunctionAsync(prompt, + requestSettings: new OpenAIRequestSettings() + { ServiceId = "AzureChat", MaxTokens = 120 }); + +result = await kernel.InvokeSemanticFunctionAsync(prompt, + requestSettings: new OpenAIRequestSettings() + { ServiceId = "OpenAIText", MaxTokens = 180 }); + +result = await kernel.InvokeSemanticFunctionAsync(prompt, + requestSettings: new OpenAIRequestSettings() + { ServiceId = "OpenAIChat", MaxTokens = 240 }); +``` diff --git a/docs/decisions/0020-prompt-syntax-mapping-to-completion-service-model.md b/docs/decisions/0020-prompt-syntax-mapping-to-completion-service-model.md new file mode 100644 index 000000000000..5c3b7f4b5a7a --- /dev/null +++ b/docs/decisions/0020-prompt-syntax-mapping-to-completion-service-model.md @@ -0,0 +1,57 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: accepted +date: 2023-10-27 +contact: SergeyMenshykh +deciders: markwallace, mabolan +consulted: +informed: +--- +# Mapping of prompt syntax to completion service model + +## Context and Problem Statement +Today, SK runs all prompts using the text completion service by simply passing the rendered prompt as is, without any modifications, directly to a configured text completion service/connector. With the addition of new chat completion prompt and potentially other prompt types, such as image, on the horizon, we need a way to map completion-specific prompt syntax to the corresponding completion service data model. + +For example, [the chat completion syntax](https://github.com/microsoft/semantic-kernel/blob/main/docs/decisions/0014-chat-completion-roles-in-prompt.md) in chat completion prompts: +```xml + + You are a creative assistant helping individuals and businesses with their innovative projects. + + + I want to brainstorm the idea of {{$input}} + +``` +should be mapped to an instance of the [ChatHistory](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs) class with two chat messages: + +```csharp +var messages = new ChatHistory(); +messages.Add(new ChatMessage(new AuthorRole("system"), "You are a creative assistant helping individuals and businesses with their innovative projects.")); +messages.Add(new ChatMessage(new AuthorRole("user"), "I want to brainstorm the idea of {{$input}}")); +``` + +This ADR outlines potential options for the location of the prompt syntax mapping functionality. + +## Considered Options +**1. Completion connector classes.** This option proposes to have the completion connector classes responsible for the `prompt syntax -> completion service data model` mapping. The decision regarding whether this mapping functionality will be implemented in the connector classes themselves or delegated to mapper classes should be made during the implementation phase and is out of the scope of this ADR. + +Pros: + - The `SemanticFunction` won't need to change to support the mapping of a new prompt syntax when new completion type connectors (audio, video, etc.) are added. + + - Prompts can be run by + - Kernel.RunAsync + - Completion connectors + +Cons: + - Every new completion connector, whether of an existing type or a new type, will have to implement the mapping functionality + +**2. The SemanticFunction class.** This option proposes that the `SemanticFunction` class be responsible for the mapping. Similar to the previous option, the exact location of this functionality (whether in the `SemanticFunction` class or in the mapper classes) should be decided during the implementation phase. + +Pros: + - New connectors of a new type or existing ones don't have to implement the mapping functionality + +Cons: + - The `SemanticFunction` class has to be changed every time a new completion type needs to be supported by SK + - Prompts can be run by Kernel.RunAsync method only. + +## Decision Outcome +It was agreed to go with the option 1 - `1. Completion connector classes` since it a more flexible solution and allows adding new connectors without modifying the `SemanticFunction` class. \ No newline at end of file diff --git a/docs/decisions/0021-aiservice-metadata.md b/docs/decisions/0021-aiservice-metadata.md new file mode 100644 index 000000000000..70822b1e82c4 --- /dev/null +++ b/docs/decisions/0021-aiservice-metadata.md @@ -0,0 +1,157 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: {proposed} +date: {2023-11-10} +deciders: SergeyMenshykh, markwallace, rbarreto, dmytrostruk +consulted: +informed: +--- +# Add AI Service Metadata + +## Context and Problem Statement + +Developers need to be able to know more information about the `IAIService` that will be used to execute a semantic function or a plan. +Some examples of why they need this information: + +1. As an SK developer I want to write a `IAIServiceSelector` which allows me to select the OpenAI service to used based on the configured model id so that I can select the optimum (could eb cheapest) model to use based on the prompt I am executing. +2. As an SK developer I want to write a pre-invocation hook which will compute the token size of a prompt before the prompt is sent to the LLM, so that I can determine the optimum `IAIService` to use. The library I am using to compute the token size of the prompt requires the model id. + +Current implementation of `IAIService` is empty. + +```csharp +public interface IAIService +{ +} +``` + +We can retrieve `IAIService` instances using `T IKernel.GetService(string? name = null) where T : IAIService;` i.e., by service type and name (aka service id). +The concrete instance of an `IAIService` can have different attributes depending on the service provider e.g. Azure OpenAI has a deployment name and OpenAI services have a model id. + +Consider the following code snippet: + +```csharp +IKernel kernel = new KernelBuilder() + .WithLoggerFactory(ConsoleLogger.LoggerFactory) + .WithAzureChatCompletionService( + deploymentName: chatDeploymentName, + endpoint: endpoint, + serviceId: "AzureOpenAIChat", + apiKey: apiKey) + .WithOpenAIChatCompletionService( + modelId: openAIModelId, + serviceId: "OpenAIChat", + apiKey: openAIApiKey) + .Build(); + +var service = kernel.GetService("OpenAIChat"); +``` + +For Azure OpenAI we create the service with a deployment name. This is an arbitrary name specified by the person who deployed the AI model e.g. it could be `eastus-gpt-4` or `foo-bar`. +For OpenAI we create the service with a model id. This must match one of the deployed OpenAI models. + +From the perspective of a prompt creator using OpenAI, they will typically tune their prompts based on the model. So when the prompt is executed we need to be able to retrieve the service using the model id. As shown in the code snippet above the `IKernel` only supports retrieving an `IAService` instance by id. Additionally the `IChatCompletion` is a generic interface so it doesn't contain any properties which provide information about a specific connector instance. + +## Decision Drivers + +* We need a mechanism to store generic metadata for an `IAIService` instance. + * It will be the responsibility of the concrete `IAIService` instance to store the metadata that is relevant e.g., model id for OpenAI and HuggingFace AI services. +* We need to be able to iterate over the available `IAIService` instances. + +## Considered Options + +* Option #1 + * Extend `IAIService` to include the following properties: + * `string? ModelId { get; }` which returns the model id. It will be the responsibility of each `IAIService` implementation to populate this with the appropriate value. + * `IReadOnlyDictionary Attributes { get; }` which returns the attributes as a readonly dictionary. It will be the responsibility of each `IAIService` implementation to populate this with the appropriate metadata. + * Extend `INamedServiceProvider` to include this method `ICollection GetServices() where T : TService;` + * Extend `OpenAIKernelBuilderExtensions` so that `WithAzureXXX` methods will include a `modelId` property if a specific model can be targeted. +* Option #2 + * Extend `IAIService` to include the following method: + * `T? GetAttributes() where T : AIServiceAttributes;` which returns an instance of `AIServiceAttributes`. It will be the responsibility of each `IAIService` implementation to define it's own service attributes class and populate this with the appropriate values. + * Extend `INamedServiceProvider` to include this method `ICollection GetServices() where T : TService;` + * Extend `OpenAIKernelBuilderExtensions` so that `WithAzureXXX` methods will include a `modelId` property if a specific model can be targeted. +* Option #3 +* Option #2 + * Extend `IAIService` to include the following properties: + * `public IReadOnlyDictionary Attributes => this.InternalAttributes;` which returns a read only dictionary. It will be the responsibility of each `IAIService` implementation to define it's own service attributes class and populate this with the appropriate values. + * `ModelId` + * `Endpoint` + * `ApiVersion` + * Extend `INamedServiceProvider` to include this method `ICollection GetServices() where T : TService;` + * Extend `OpenAIKernelBuilderExtensions` so that `WithAzureXXX` methods will include a `modelId` property if a specific model can be targeted. + +These options would be used as follows: + +As an SK developer I want to write a custom `IAIServiceSelector` which will select an AI service based on the model id because I want to restrict which LLM is used. +In the sample below the service selector implementation looks for the first service that is a GPT3 model. + +### Option 1 + +``` csharp +public class Gpt3xAIServiceSelector : IAIServiceSelector +{ + public (T?, AIRequestSettings?) SelectAIService(string renderedPrompt, IAIServiceProvider serviceProvider, IReadOnlyList? modelSettings) where T : IAIService + { + var services = serviceProvider.GetServices(); + foreach (var service in services) + { + if (!string.IsNullOrEmpty(service.ModelId) && service.ModelId.StartsWith("gpt-3", StringComparison.OrdinalIgnoreCase)) + { + Console.WriteLine($"Selected model: {service.ModelId}"); + return (service, new OpenAIRequestSettings()); + } + } + + throw new SKException("Unable to find AI service for GPT 3.x."); + } +} +``` + +## Option 2 + +``` csharp +public class Gpt3xAIServiceSelector : IAIServiceSelector +{ + public (T?, AIRequestSettings?) SelectAIService(string renderedPrompt, IAIServiceProvider serviceProvider, IReadOnlyList? modelSettings) where T : IAIService + { + var services = serviceProvider.GetServices(); + foreach (var service in services) + { + var serviceModelId = service.GetAttributes()?.ModelId; + if (!string.IsNullOrEmpty(serviceModelId) && serviceModelId.StartsWith("gpt-3", StringComparison.OrdinalIgnoreCase)) + { + Console.WriteLine($"Selected model: {serviceModelId}"); + return (service, new OpenAIRequestSettings()); + } + } + + throw new SKException("Unable to find AI service for GPT 3.x."); + } +} +``` + +## Option 3 + +```csharp +public (T?, AIRequestSettings?) SelectAIService(string renderedPrompt, IAIServiceProvider serviceProvider, IReadOnlyList? modelSettings) where T : IAIService +{ + var services = serviceProvider.GetServices(); + foreach (var service in services) + { + var serviceModelId = service.GetModelId(); + var serviceOrganization = service.GetAttribute(OpenAIServiceAttributes.OrganizationKey); + var serviceDeploymentName = service.GetAttribute(AzureOpenAIServiceAttributes.DeploymentNameKey); + if (!string.IsNullOrEmpty(serviceModelId) && serviceModelId.StartsWith("gpt-3", StringComparison.OrdinalIgnoreCase)) + { + Console.WriteLine($"Selected model: {serviceModelId}"); + return (service, new OpenAIRequestSettings()); + } + } + + throw new SKException("Unable to find AI service for GPT 3.x."); +} +``` + +## Decision Outcome + +Chosen option: Option 1, because it's a simple implementation and allows easy iteration over all possible attributes. diff --git a/docs/decisions/0021-json-serializable-custom-types.md b/docs/decisions/0021-json-serializable-custom-types.md new file mode 100644 index 000000000000..d7a0072409a7 --- /dev/null +++ b/docs/decisions/0021-json-serializable-custom-types.md @@ -0,0 +1,124 @@ +--- +status: proposed +contact: dehoward +date: 2023-11-06 +deciders: alliscode, markwallace-microsoft +consulted: +informed: +--- + +# JSON Serializable Custom Types + +## Context and Problem Statement + +This ADR aims to simplify the usage of custom types by allowing developers to use any type that can be serialized using `System.Text.Json`. + +Standardizing on a JSON-serializable type is necessary to allow functions to be described using a JSON Schema within a planner's function manual. Using a JSON Schema to describe a function's input and output types will allow the planner to validate that the function is being used correctly. + +Today, use of custom types within Semantic Kernel requires developers to implement a custom `TypeConverter` to convert to/from the string representation of the type. This is demonstrated in [Example60_AdvancedNativeFunctions](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/KernelSyntaxExamples/Example60_AdvancedNativeFunctions.cs#L202C44-L202C44) as seen below: + +```csharp + [TypeConverter(typeof(MyCustomTypeConverter))] + private sealed class MyCustomType + { + public int Number { get; set; } + + public string? Text { get; set; } + } + + private sealed class MyCustomTypeConverter : TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => true; + + public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) + { + return JsonSerializer.Deserialize((string)value); + } + + public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) + { + return JsonSerializer.Serialize(value); + } + } +``` + +The above approach will now only be needed when a custom type cannot be serialized using `System.Text.Json`. + +## Considered Options + +**1. Fallback to serialization using `System.Text.Json` if a `TypeConverter` is not available for the given type** + +- Primitive types will be handled using their native `TypeConverter`s + - We preserve the use of the native `TypeConverter` for primitive types to prevent any lossy conversions. +- Complex types will be handled by their registered `TypeConverter`, if provided. +- If no `TypeConverter` is registered for a complex type, our own `JsonSerializationTypeConverter` will be used to attempt JSON serialization/deserialization using `System.Text.Json`. + - A detailed error message will be thrown if the type cannot be serialized/deserialized. + +This will change the `GetTypeConverter()` method in `NativeFunction.cs` to look like the following, where before `null` was returned if no `TypeConverter` was found for the type: + +```csharp +private static TypeConverter GetTypeConverter(Type targetType) + { + if (targetType == typeof(byte)) { return new ByteConverter(); } + if (targetType == typeof(sbyte)) { return new SByteConverter(); } + if (targetType == typeof(bool)) { return new BooleanConverter(); } + if (targetType == typeof(ushort)) { return new UInt16Converter(); } + if (targetType == typeof(short)) { return new Int16Converter(); } + if (targetType == typeof(char)) { return new CharConverter(); } + if (targetType == typeof(uint)) { return new UInt32Converter(); } + if (targetType == typeof(int)) { return new Int32Converter(); } + if (targetType == typeof(ulong)) { return new UInt64Converter(); } + if (targetType == typeof(long)) { return new Int64Converter(); } + if (targetType == typeof(float)) { return new SingleConverter(); } + if (targetType == typeof(double)) { return new DoubleConverter(); } + if (targetType == typeof(decimal)) { return new DecimalConverter(); } + if (targetType == typeof(TimeSpan)) { return new TimeSpanConverter(); } + if (targetType == typeof(DateTime)) { return new DateTimeConverter(); } + if (targetType == typeof(DateTimeOffset)) { return new DateTimeOffsetConverter(); } + if (targetType == typeof(Uri)) { return new UriTypeConverter(); } + if (targetType == typeof(Guid)) { return new GuidConverter(); } + + if (targetType.GetCustomAttribute() is TypeConverterAttribute tca && + Type.GetType(tca.ConverterTypeName, throwOnError: false) is Type converterType && + Activator.CreateInstance(converterType) is TypeConverter converter) + { + return converter; + } + + // now returns a JSON-serializing TypeConverter by default, instead of returning null + return new JsonSerializationTypeConverter(); + } + + private sealed class JsonSerializationTypeConverter : TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => true; + + public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) + { + return JsonSerializer.Deserialize((string)value); + } + + public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) + { + return JsonSerializer.Serialize(value); + } + } + +``` + +_When is serialization/deserialization required?_ + +Required + +- **Native to Semantic:** Passing variables from Native to Semantic **will** require serialization of the output of the Native Function from complex type to string so that it can be passed to the LLM. +- **Semantic to Native:** Passing variables from Semantic to Native **will** require de-serialization of the output of the Semantic Function between string to the complex type format that the Native Function is expecting. + +Not required + +- **Native to Native:** Passing variables from Native to Native **will not** require any serialization or deserialization as the complex type can be passed as-is. +- **Semantic to Semantic:** Passing variables from Semantic to Semantic **will not** require any serialization or deserialization as the the complex type will be passed around using its string representation. + +**2. Only use native serialization methods** +This option was originally considered, which would have effectively removed the use of the `TypeConverter`s in favor of a simple `JsonConverter`, but it was pointed out that this may result in lossy conversion between primitive types. For example, when converting from a `float` to an `int`, the primitive may be truncated in a way by the native serialization methods that does not provide an accurate result. + +## Decision Outcome diff --git a/docs/decisions/0022-skfunction.md b/docs/decisions/0022-skfunction.md new file mode 100644 index 000000000000..ffc06531a3df --- /dev/null +++ b/docs/decisions/0022-skfunction.md @@ -0,0 +1,46 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: markwallace-microsoft +date: 2023-11-21 +deciders: SergeyMenshykh, markwallace, rbarreto, mabolan, stephentoub +consulted: +informed: +--- + +# Semantic Kernel Functions are defined using Interface or Abstract Base Class + +## Context and Problem Statement + +The Semantic Kernel must define an abstraction to represent a Function i.e. a method that can be called as part of an AI orchestration. +Currently this abstraction is the `ISKFunction` interface. +The goal of the ADR is decide if this is the best abstraction to use to meet the long term goals of Semantic Kernel. + +## Decision Drivers + +- The abstraction **must** extensible so that new functionality can be added later. +- Changes to the abstraction **must not** result in breaking changes for consumers. +- It is not clear at this time if we need to allow consumers to provide their own `SKFunction` implementations. If we do we this may cause problems as we add new functionality to the Semantic Kernel e.g. what if we define a new hook type? + +## Considered Options + +- `ISKFunction` interface +- `SKFunction` base class + +### `ISKFunction` Interface + +- Good, because implementations can extend any arbitrary class +- Bad, because we can only change the default behavior of our implementations and customer implementations may become incompatible. +- Bad, because we cannot prevent customers for implementing this interface. +- Bad, because changes to the interface are breaking changes for consumers. + +### `SKFunction` Case Class + +- Good, because the changes to the interface are **not** breaking changes for consumers. +- Good, because class constructor can be made `internal` so we can prevent extensions until we know there are valid use cases. +- Good, because we can change the default implementation easily in future. +- Bad, because implementations can only extend `SKFunction`. + +## Decision Outcome + +Chosen option: "`SKFunction` base class", because we can provide some default implementation and we can restrict creation of new SKFunctions until we better understand those use cases. diff --git a/docs/decisions/0023-handlebars-template-engine.md b/docs/decisions/0023-handlebars-template-engine.md new file mode 100644 index 000000000000..0b8f1566bb93 --- /dev/null +++ b/docs/decisions/0023-handlebars-template-engine.md @@ -0,0 +1,267 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: accepted +contact: teresaqhoang +date: 2023-12-06 +deciders: markwallace, alliscode, SergeyMenshykh +consulted: markwallace, mabolan +informed: stephentoub +--- + +# Handlebars Prompt Template Helpers + +## Context and Problem Statement + +We want to use Handlebars as a template factory for rendering prompts and planners in the Semantic Kernel. Handlebars provides a simple and expressive syntax for creating dynamic templates with logic and data. However, Handlebars does not have built-in support for some features and scenarios that are relevant for our use cases, such as: + +- Marking a block of text as a message with a role for chat completion connectors. +- Invoking functions from the kernel and passing parameters to them. +- Setting and getting variables in the template context. +- Performing common operations such as concatenation, arithmetic, comparison, and JSON serialization. +- Supporting different output types and formats for the rendered template. + +Therefore, we need to extend Handlebars with custom helpers that can address these gaps and provide a consistent and convenient way for prompt and planner engineers to write templates. + +First, we will do this by **_baking in a defined set of custom system helpers_** for common operations and utilities that are not provided any the built-in Handlebars helpers, which: + +- Allows us full control over what functionality can be executed by the Handlebars template factory. +- Enhances the functionality and usability of the template factory, by providing helpers for common operations and utilities that are not provided by any built-in Handlebars helpers but are commonly hallucinated by the model. +- Improves the expressiveness and readability of the rendered template, as the helpers can be used to perform simple or complex logic or transformations on the template data / arguments. +- Provides flexibility and convenience for the users, as they can: + + - Choose the syntax, and + - Extend, add, or omit certain helpers + + to best suits their needs and preferences. + +- Allows for customization of specific operations or utilities that may have different behavior or requirements, such as handling output types, formats, or errors. + +These helpers would handle the evaluation of the arguments, the execution of the operation or utility, and the writing of the result to the template. Examples of such operations are `{{concat string1 string2 ...}}`, `{{equal value1 value2}}`, `{{json object}}`, `{{set name=value}}`, `{{get name}}`, `{{or condition1 condition2}}`, etc. + +Secondly, we have to **_expose the functions that are registered in the Kernel as helpers_** to the Handlebars template factory. Options for this are detailed below. + +## Decision Drivers + +- We want to leverage the existing Handlebars helpers, syntax, and mechanisms for loading helpers as much as possible, without introducing unnecessary complexity or inconsistency. +- We want to provide helpers that are useful and intuitive for prompt and SK engineers. +- We want to ensure that the helpers are well-documented, tested, and maintained, and that they do not conflict with each other or with the built-in Handlebars helpers. +- We want to support different output types and formats for the rendered template, such as text, JSON, or complex objects, and allow the template to specify the desired output type. + +## Considered Options + +We considered the following options for extending Handlebars with kernel functions as custom helpers: + +**1. Use a single helper for invoking functions from the kernel.** This option would use a generic helper, such as `{{invoke pluginName-functionName param1=value1 param2=value2 ...}}`, to call any function from the kernel and pass parameters to it. The helper would handle the execution of the function, the conversion of the parameters and the result, and the writing of the result to the template. + +**2. Use a separate helper for each function from the kernel.** This option would register a new helper for each function, such as `{{pluginName-functionName param1=value1 param2=value2 ...}}`, to handle the execution of the function, the conversion of the parameters and the result, and the writing of the result to the template. + +## Pros and Cons + +### 1. Use a single generic helper for invoking functions from the kernel + +Pros: + +- Simplifies the registration and maintenance of the helper, as only one helper, `invoke`, needs to be defined and updated. +- Provides a consistent and uniform syntax for calling any function from the kernel, regardless of the plugin or function name, parameter details, or the result. +- Allows for customization and special logic of kernel functions, such as handling output types, execution restrictions, or errors. +- Allows the use of positional or named arguments, as well as hash arguments, for passing parameters to the function. + +Cons: + +- Reduces the expressiveness and readability of the template, as the function name and parameters are wrapped in a generic helper invocation. +- Adds additional syntax for the model to learn and keep track of, potentially leading to more errors during render. + +### 2. Use a generic helper for _each_ function from the kernel + +Pros: + +- Has all the benefits of option 1, but largely improves the expressiveness and readability of the template, as the function name and parameters are directly written in the template. +- Maintains ease of maintenance for handling each function, as each helper will follow the same templated logic for registration and execution. + +Cons: + +- May cause conflicts or confusion with the built-in Handlebars helpers or the kernel variables, if the function name or the parameter name matches them. + +## Decision Outcome + +We decided to go with option 2: providing special helpers to invoke any function in the kernel. These helpers will follow the same logic and syntax for each registered function. We believe that this approach, alongside the custom system helpers that will enable special utility logic or behavior, provides the best balance between simplicity, expressiveness, flexibility, and functionality for the Handlebars template factory and our users. + +With this approach, + +- We will allow customers to use any of the built-in [Handlebars.Net helpers](https://github.com/Handlebars-Net/Handlebars.Net.Helpers). +- We will provide utility helpers, which are registered by default. +- We will provide prompt helpers (e.g. chat message), which are registered by default. +- We will register all plugin functions registered on the `Kernel`. +- We will allow customers to control which plugins are registered as helpers and the syntax of helpers' signatures. + - By default, we will honor all options defined in [HandlebarsHelperOptions](https://github.com/Handlebars-Net/Handlebars.Net.Helpers/blob/8f7c9c082e18845f6a620bbe34bf4607dcba405b/src/Handlebars.Net.Helpers/Options/HandlebarsHelpersOptions.cs#L12). + - Additionally, we will extend this configuration to include a `RegisterCustomHelpersCallback` option that users can set to register custom helpers. +- We will allow Kernel function arguments to be easily accessed, i.e., function variables and execution settings, via a `KernelArguments` object. +- We will allow customers to control when plugin functions are registered as helpers. + - By default, this is done when template is rendered. + - Optionally, this can be done when the Handlebars template factory is constructed by passing in a Plugin collection. +- If conflicts arise between built-in helpers, variables, or kernel objects: + - We will throw an error clearly explaining what the issue is, as well as + - Allow customers to provide their own implementations and overrides, including an option to not register default helpers. This can be done by setting `Options.Categories` to an empty array `[]`. + +We also decided to follow some guidelines and best practices for designing and implementing the helpers, such as: + +- Documenting the purpose, syntax, parameters, and behavior of each helper, and providing examples and tests for them. +- Naming the helpers in a clear and consistent way, and avoiding conflicts or confusion with the built-in Handlebars helpers or the kernel functions or variables. + - Using standalone function names for custom system helpers (i.e., json, set) + - Using the delimiter "`-`" for helpers registered to handle the kernel functions, to distinguish them from each other and from our system or built-in Handlebars helpers. +- Supporting both positional and hash arguments, for passing parameters to the helpers, and validating the arguments for the required type and count. +- Handling the output types, formats, and errors of the helpers, including complex types or JSON schemas. +- Implementing the helpers in a performant and secure way, and avoiding any side effects or unwanted modifications to the template context or data. + +Effectively, there will be four buckets of helpers enabled in the Handlebars Template Engine: + +1. Default helpers from the Handlebars library, including: + - [Built-in helpers](https://handlebarsjs.com/guide/builtin-helpers.html) that enable loops and conditions (#if, #each, #with, #unless) + - [Handlebars.Net.Helpers](https://github.com/Handlebars-Net/Handlebars.Net.Helpers/wiki) +2. Functions in the kernel +3. Helpers helpful to prompt engineers (i.e., message, or) +4. Utility helpers that can be used to perform simple logic or transformations on the template data or arguments (i.e., set, get, json, concat, equals, range, array) + +### Pseudocode for the Handlebars Prompt Template Engine + +A prototype implementation of a Handlebars prompt template factory with built-in helpers could look something like this: + +```csharp +/// Options for Handlebars helpers (built-in and custom). +public sealed class HandlebarsPromptTemplateOptions : HandlebarsHelpersOptions +{ + // Categories tracking built-in system helpers + public enum KernelHelperCategories + { + Prompt, + Plugin, + Context, + String, + ... + } + + /// Default character to use for delimiting plugin name and function name in a Handlebars template. + public string DefaultNameDelimiter { get; set; } = "-"; + + /// Delegate for registering custom helpers. + public delegate void RegisterCustomHelpersCallback(IHandlebars handlebarsInstance, KernelArguments executionContext); + + /// Callback for registering custom helpers. + public RegisterCustomHelpersCallback? RegisterCustomHelpers { get; set; } = null; + + // Pseudocode, some combination of both KernelHelperCategories and the default HandlebarsHelpersOptions.Categories. + public List AllCategories = KernelHelperCategories.AddRange(Categories); +} +``` + +```csharp +// Handlebars Prompt Template +internal class HandlebarsPromptTemplate : IPromptTemplate +{ + public async Task RenderAsync(Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken = default) + { + arguments ??= new(); + var handlebarsInstance = HandlebarsDotNet.Handlebars.Create(); + + // Add helpers for kernel functions + KernelFunctionHelpers.Register(handlebarsInstance, kernel, arguments, this._options.PrefixSeparator, cancellationToken); + + // Add built-in system helpers + KernelSystemHelpers.Register(handlebarsInstance, arguments, this._options); + + // Register any custom helpers + if (this._options.RegisterCustomHelpers is not null) + { + this._options.RegisterCustomHelpers(handlebarsInstance, arguments); + } + ... + + return await Task.FromResult(prompt).ConfigureAwait(true); + } +} + +``` + +```csharp +/// Extension class to register Kernel functions as helpers. +public static class KernelFunctionHelpers +{ + public static void Register( + IHandlebars handlebarsInstance, + Kernel kernel, + KernelArguments executionContext, + string nameDelimiter, + CancellationToken cancellationToken = default) + { + kernel.Plugins.GetFunctionsMetadata().ToList() + .ForEach(function => + RegisterFunctionAsHelper(kernel, executionContext, handlebarsInstance, function, nameDelimiter, cancellationToken) + ); + } + + private static void RegisterFunctionAsHelper( + Kernel kernel, + KernelArguments executionContext, + IHandlebars handlebarsInstance, + KernelFunctionMetadata functionMetadata, + string nameDelimiter, + CancellationToken cancellationToken = default) + { + // Register helper for each function + handlebarsInstance.RegisterHelper(fullyResolvedFunctionName, (in HelperOptions options, in Context context, in Arguments handlebarsArguments) => + { + // Get parameters from template arguments; check for required parameters + type match + + // If HashParameterDictionary + ProcessHashArguments(functionMetadata, executionContext, handlebarsArguments[0] as IDictionary, nameDelimiter); + + // Else + ProcessPositionalArguments(functionMetadata, executionContext, handlebarsArguments); + + KernelFunction function = kernel.Plugins.GetFunction(functionMetadata.PluginName, functionMetadata.Name); + + InvokeSKFunction(kernel, function, GetKernelArguments(executionContext), cancellationToken); + }); + } + ... +} +``` + +```csharp +/// Extension class to register additional helpers as Kernel System helpers. +public static class KernelSystemHelpers +{ + public static void Register(IHandlebars handlebarsInstance, KernelArguments arguments, HandlebarsPromptTemplateOptions options) + { + RegisterHandlebarsDotNetHelpers(handlebarsInstance, options); + RegisterSystemHelpers(handlebarsInstance, arguments, options); + } + + // Registering all helpers provided by https://github.com/Handlebars-Net/Handlebars.Net.Helpers. + private static void RegisterHandlebarsDotNetHelpers(IHandlebars handlebarsInstance, HandlebarsPromptTemplateOptions helperOptions) + { + HandlebarsHelpers.Register(handlebarsInstance, optionsCallback: options => + { + ...helperOptions + }); + } + + // Registering all helpers built by the SK team to support the kernel. + private static void RegisterSystemHelpers( + IHandlebars handlebarsInstance, KernelArguments arguments, HandlebarsPromptTemplateOptions helperOptions) + { + // Where each built-in helper will have its own defined class, following the same pattern that is used by Handlebars.Net.Helpers. + // https://github.com/Handlebars-Net/Handlebars.Net.Helpers + if (helperOptions.AllCategories contains helperCategory) + ... + KernelPromptHelpers.Register(handlebarsContext); + KernelPluginHelpers.Register(handlebarsContext); + KernelStringHelpers..Register(handlebarsContext); + ... + } +} +``` + +**Note: This is just a prototype implementation for illustration purposes only.** + +Handlebars supports different object types as variables on render. This opens up the option to use objects outright rather than just strings in semantic functions, i.e., loop over arrays or access properties of complex objects, without serializing or deserializing objects before invocation. diff --git a/docs/decisions/0023-kernel-streaming.md b/docs/decisions/0023-kernel-streaming.md new file mode 100644 index 000000000000..c4116cb5eaa9 --- /dev/null +++ b/docs/decisions/0023-kernel-streaming.md @@ -0,0 +1,350 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +date: 2023-11-13 +deciders: rogerbarreto,markwallace-microsoft,SergeyMenshykh,dmytrostruk +consulted: +informed: +--- + +# Streaming Capability for Kernel and Functions usage - Phase 1 + +## Context and Problem Statement + +It is quite common in co-pilot implementations to have a streamlined output of messages from the LLM (large language models)M and currently that is not possible while using ISKFunctions.InvokeAsync or Kernel.RunAsync methods, which enforces users to work around the Kernel and Functions to use `ITextCompletion` and `IChatCompletion` services directly as the only interfaces that currently support streaming. + +Currently streaming is a capability that not all providers do support and this as part of our design we try to ensure the services will have the proper abstractions to support streaming not only of text but be open to other types of data like images, audio, video, etc. + +Needs to be clear for the sk developer when he is attempting to get streaming data. + +## Decision Drivers + +1. The sk developer should be able to get streaming data from the Kernel and Functions using Kernel.RunAsync or ISKFunctions.InvokeAsync methods + +2. The sk developer should be able to get the data in a generic way, so the Kernel and Functions can be able to stream data of any type, not limited to text. + +3. The sk developer when using streaming from a model that does not support streaming should still be able to use it with only one streaming update representing the whole data. + +## Out of Scope + +- Streaming with plans will not be supported in this phase. Attempting to do so will throw an exception. +- Kernel streaming will not support multiple functions (pipeline). +- Input streaming will not be supported in this phase. +- Post Hook Skipping, Repeat and Cancelling of streaming functions are not supported. + +## Considered Options + +### Option 1 - Dedicated Streaming Interfaces + +Using dedicated streaming interfaces that allow the sk developer to get the streaming data in a generic way, including string, byte array directly from the connector as well as allowing the Kernel and Functions implementations to be able to stream data of any type, not limited to text. + +This approach also exposes dedicated interfaces in the kernel and functions to use streaming making it clear to the sk developer what is the type of data being returned in IAsyncEnumerable format. + +`ITextCompletion` and `IChatCompletion` will have new APIs to get `byte[]` and `string` streaming data directly as well as the specialized `StreamingContent` return. + +The sk developer will be able to specify a generic type to the `Kernel.RunStreamingAsync()` and `ISKFunction.InvokeStreamingAsync` to get the streaming data. If the type is not specified, the Kernel and Functions will return the data as StreamingContent. + +If the type is not specified or if the string representation cannot be cast, an exception will be thrown. + +If the type specified is `StreamingContent` or another any type supported by the connector no error will be thrown. + +## User Experience Goal + +```csharp +//(providing the type at as generic parameter) + +// Getting a Raw Streaming data from Kernel +await foreach(string update in kernel.RunStreamingAsync(function, variables)) + +// Getting a String as Streaming data from Kernel +await foreach(string update in kernel.RunStreamingAsync(function, variables)) + +// Getting a StreamingContent as Streaming data from Kernel +await foreach(StreamingContent update in kernel.RunStreamingAsync(variables, function)) +// OR +await foreach(StreamingContent update in kernel.RunStreamingAsync(function, variables)) // defaults to Generic above) +{ + Console.WriteLine(update); +} +``` + +Abstraction class for any stream content, connectors will be responsible to provide the specialized type of `StreamingContent` which will contain the data as well as any metadata related to the streaming result. + +```csharp + +public abstract class StreamingContent +{ + public abstract int ChoiceIndex { get; } + + /// Returns a string representation of the chunk content + public abstract override string ToString(); + + /// Abstract byte[] representation of the chunk content in a way it could be composed/appended with previous chunk contents. + /// Depending on the nature of the underlying type, this method may be more efficient than . + public abstract byte[] ToByteArray(); + + /// Internal chunk content object reference. (Breaking glass). + /// Each connector will have its own internal object representing the content chunk content. + /// The usage of this property is considered "unsafe". Use it only if strictly necessary. + public object? InnerContent { get; } + + /// The metadata associated with the content. + public Dictionary? Metadata { get; set; } + + /// The current context associated the function call. + internal SKContext? Context { get; set; } + + /// Inner content object reference + protected StreamingContent(object? innerContent) + { + this.InnerContent = innerContent; + } +} +``` + +Specialization example of a StreamingChatContent + +```csharp +// +public class StreamingChatContent : StreamingContent +{ + public override int ChoiceIndex { get; } + public FunctionCall? FunctionCall { get; } + public string? Content { get; } + public AuthorRole? Role { get; } + public string? Name { get; } + + public StreamingChatContent(AzureOpenAIChatMessage chatMessage, int resultIndex) : base(chatMessage) + { + this.ChoiceIndex = resultIndex; + this.FunctionCall = chatMessage.InnerChatMessage?.FunctionCall; + this.Content = chatMessage.Content; + this.Role = new AuthorRole(chatMessage.Role.ToString()); + this.Name = chatMessage.InnerChatMessage?.Name; + } + + public override byte[] ToByteArray() => Encoding.UTF8.GetBytes(this.ToString()); + public override string ToString() => this.Content ?? string.Empty; +} +``` + +`IChatCompletion` and `ITextCompletion` interfaces will have new APIs to get a generic streaming content data. + +```csharp +interface ITextCompletion + IChatCompletion +{ + IAsyncEnumerable GetStreamingContentAsync(...); + + // Throw exception if T is not supported +} + +interface IKernel +{ + // Get streaming function content of T + IAsyncEnumerable RunStreamingAsync(ContextVariables variables, ISKFunction function); +} + +interface ISKFunction +{ + // Get streaming function content of T + IAsyncEnumerable InvokeStreamingAsync(SKContext context); +} +``` + +## Prompt/Semantic Functions Behavior + +When Prompt Functions are invoked using the Streaming API, they will attempt to use the Connectors streaming implementation. +The connector will be responsible to provide the specialized type of `StreamingContent` and even if the underlying backend API don't support streaming the output will be one streamingcontent with the whole data. + +## Method/Native Functions Behavior + +Method Functions will support `StreamingContent` automatically with as a `StreamingMethodContent` wrapping the object returned in the iterator. + +```csharp +public sealed class StreamingMethodContent : StreamingContent +{ + public override int ChoiceIndex => 0; + + /// Method object value that represents the content chunk + public object Value { get; } + + /// Default implementation + public override byte[] ToByteArray() + { + if (this.Value is byte[]) + { + // If the method value is byte[] we return it directly + return (byte[])this.Value; + } + + // By default if a native value is not byte[] we output the UTF8 string representation of the value + return Encoding.UTF8.GetBytes(this.Value?.ToString()); + } + + /// + public override string ToString() + { + return this.Value.ToString(); + } + + /// + /// Initializes a new instance of the class. + /// + /// Underlying object that represents the chunk + public StreamingMethodContent(object innerContent) : base(innerContent) + { + this.Value = innerContent; + } +} +``` + +If a MethodFunction is returning an `IAsyncEnumerable` each enumerable result will be automatically wrapped in the `StreamingMethodContent` keeping the streaming behavior and the overall abstraction consistent. + +When a MethodFunction is not an `IAsyncEnumerable`, the complete result will be wrapped in a `StreamingMethodContent` and will be returned as a single item. + +## Pros + +1. All the User Experience Goal section options will be possible. +2. Kernel and Functions implementations will be able to stream data of any type, not limited to text +3. The sk developer will be able to provide the streaming content type it expects from the `GetStreamingContentAsync` method. +4. Sk developer will be able to get streaming from the Kernel, Functions and Connectors with the same result type. + +## Cons + +1. If the sk developer wants to use the specialized type of `StreamingContent` he will need to know what the connector is being used to use the correct **StreamingContent extension method** or to provide directly type in ``. +2. Connectors will have greater responsibility to support the correct special types of `StreamingContent`. + +### Option 2 - Dedicated Streaming Interfaces (Returning a Class) + +All changes from option 1 with the small difference below: + +- The Kernel and SKFunction streaming APIs interfaces will return `StreamingFunctionResult` which also implements `IAsyncEnumerable` +- Connectors streaming APIs interfaces will return `StreamingConnectorContent` which also implements `IAsyncEnumerable` + +The `StreamingConnectorContent` class is needed for connectors as one way to pass any information relative to the request and not the chunk that can be used by the functions to fill `StreamingFunctionResult` metadata. + +## User Experience Goal + +Option 2 Biggest benefit: + +```csharp +// When the caller needs to know more about the streaming he can get the result reference before starting the streaming. +var streamingResult = await kernel.RunStreamingAsync(function); +// Do something with streamingResult properties + +// Consuming the streamingResult requires an extra await: +await foreach(StreamingContent chunk content in await streamingResult) +``` + +Using the other operations will be quite similar (only needing an extra `await` to get the iterator) + +```csharp +// Getting a Raw Streaming data from Kernel +await foreach(string update in await kernel.RunStreamingAsync(function, variables)) + +// Getting a String as Streaming data from Kernel +await foreach(string update in await kernel.RunStreamingAsync(function, variables)) + +// Getting a StreamingContent as Streaming data from Kernel +await foreach(StreamingContent update in await kernel.RunStreamingAsync(variables, function)) +// OR +await foreach(StreamingContent update in await kernel.RunStreamingAsync(function, variables)) // defaults to Generic above) +{ + Console.WriteLine(update); +} + +``` + +StreamingConnectorResult is a class that can store information regarding the result before the stream is consumed as well as any underlying object (breaking glass) that the stream consumes at the connector level. + +```csharp + +public sealed class StreamingConnectorResult : IAsyncEnumerable +{ + private readonly IAsyncEnumerable _StreamingContentource; + + public object? InnerResult { get; private set; } = null; + + public StreamingConnectorResult(Func> streamingReference, object? innerConnectorResult) + { + this._StreamingContentource = streamingReference.Invoke(); + this.InnerResult = innerConnectorResult; + } +} + +interface ITextCompletion + IChatCompletion +{ + Task> GetStreamingContentAsync(); + // Throw exception if T is not supported + // Initially connectors +} +``` + +StreamingFunctionResult is a class that can store information regarding the result before the stream is consumed as well as any underlying object (breaking glass) that the stream consumes from Kernel and SKFunctions. + +```csharp +public sealed class StreamingFunctionResult : IAsyncEnumerable +{ + internal Dictionary? _metadata; + private readonly IAsyncEnumerable _streamingResult; + + public string FunctionName { get; internal set; } + public Dictionary Metadata { get; internal set; } + + /// + /// Internal object reference. (Breaking glass). + /// Each connector will have its own internal object representing the result. + /// + public object? InnerResult { get; private set; } = null; + + /// + /// Instance of used by the function. + /// + internal SKContext Context { get; private set; } + + public StreamingFunctionResult(string functionName, SKContext context, Func> streamingResult, object? innerFunctionResult) + { + this.FunctionName = functionName; + this.Context = context; + this._streamingResult = streamingResult.Invoke(); + this.InnerResult = innerFunctionResult; + } +} + +interface ISKFunction +{ + // Extension generic method to get from type + Task> InvokeStreamingAsync(...); +} + +static class KernelExtensions +{ + public static async Task> RunStreamingAsync(this Kernel kernel, ISKFunction skFunction, ContextVariables? variables, CancellationToken cancellationToken) + { + ... + } +} +``` + +## Pros + +1. All benefits from Option 1 + +2. Having StreamingFunctionResults allow sk developer to know more details about the result before consuming the stream, like: + - Any metadata provided by the underlying API, + - SKContext + - Function Name and Details +3. Experience using the Streaming is quite similar (need an extra await to get the result) to option 1 +4. APIs behave similarly to the non-streaming API (returning a result representation to get the value) + +## Cons + +1. All cons from Option 1 + +2. Added complexity as the IAsyncEnumerable cannot be passed directly in the method result demanding a delegate approach to be adapted inside of the Results that implements the IAsyncEnumerator. +3. Added complexity where IDisposable is needed to be implemented in the Results to dispose the response object and the caller would need to handle the disposal of the result. +4. As soon the caller gets a `StreamingFunctionResult` a network connection will be kept open until the caller implementation consume it (Enumerate over the `IAsyncEnumerable`). + +## Decision Outcome + +Option 1 was chosen as the best option as small benefit of the Option 2 don't justify the complexity involved described in the Cons. + +Was also decided that the Metadata related to a connector backend response can be added to the `StreamingContent.Metadata` property. This will allow the sk developer to get the metadata even without a `StreamingConnectorResult` or `StreamingFunctionResult`. diff --git a/docs/decisions/0024-connectors-api-equalization.md b/docs/decisions/0024-connectors-api-equalization.md new file mode 100644 index 000000000000..94bee2d67806 --- /dev/null +++ b/docs/decisions/0024-connectors-api-equalization.md @@ -0,0 +1,236 @@ +## Proposal + +### IChatCompletion + +Before: + +```csharp +public interface IChatCompletion : IAIService +{ + ChatHistory CreateNewChat(string? instructions = null); + + Task> GetChatCompletionsAsync(ChatHistory chat, ...); + + Task> GetChatCompletionsAsync(string prompt, ...); + + IAsyncEnumerable GetStreamingContentAsync(ChatHistory chatHistory, ...); +} + +public static class ChatCompletionExtensions +{ + public static async Task GenerateMessageAsync(ChatHistory chat, ...); +} +``` + +After: + +```csharp +public interface IChatCompletion : IAIService +{ + Task> GetChatContentsAsync(ChatHistory chat, ..> tags) + + IAsyncEnumerable GetStreamingChatContentsAsync(ChatHistory chatHistory, ...); +} + +public static class ChatCompletionExtensions +{ + // v Single vv Standardized Prompt (Parse tags) + public static async Task GetChatContentAsync(string prompt, ...); + + // v Single + public static async Task GetChatContentAsync(ChatHistory chatHistory, ...); + + public static IAsyncEnumerable GetStreamingChatContentsAsync(string prompt, ...); +} +``` + +### ITextCompletion + +Before: + +```csharp +public interface ITextCompletion : IAIService +{ + Task> GetCompletionsAsync(string prompt, ...); + + IAsyncEnumerable GetStreamingContentAsync(string prompt, ...); +} + +public static class TextCompletionExtensions +{ + public static async Task CompleteAsync(string text, ...); + + public static IAsyncEnumerable GetStreamingContentAsync(string input, ...); +} +``` + +After: + +```csharp +public interface ITextCompletion : IAIService +{ + Task> GetTextContentsAsync(string prompt, ...); + + IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, ...); +} + +public static class TextCompletionExtensions +{ + public static async Task GetTextContentAsync(string prompt, ...); +} +``` + +## Content Abstractions + +### Model Comparisons + +#### Current Streaming Abstractions + +| Streaming (Current) | Specialized\* Streaming (Current) | +| ------------------------------------------- | --------------------------------------------------------------- | +| `StreamingChatContent` : `StreamingContent` | `OpenAIStreamingChatContent` | +| `StreamingTextContent` : `StreamingContent` | `OpenAIStreamingTextContent`, `HuggingFaceStreamingTextContent` | + +#### Non-Streaming Abstractions (Before and After) + +| Non-Streaming (Before) | Non-Streaming (After) | Specialized\* Non-Streaming (After) | +| ----------------------------- | ------------------------------ | --------------------------------------------- | +| `IChatResult` : `IResultBase` | `ChatContent` : `ModelContent` | `OpenAIChatContent` | +| `ITextResult` : `IResultBase` | `TextContent` : `ModelContent` | `OpenAITextContent`, `HuggingFaceTextContent` | +| `ChatMessage` | `ChatContent` : `ModelContent` | `OpenAIChatContent` | + +_\*Specialized: Connector implementations that are specific to a single AI Service._ + +### New Non-Streaming Abstractions: + +`ModelContent` was chosen to represent a `non-streaming content` top-most abstraction which can be specialized and contains all the information that the AI Service returned. (Metadata, Raw Content, etc.) + +```csharp +/// +/// Base class for all AI non-streaming results +/// +public abstract class ModelContent +{ + /// + /// Raw content object reference. (Breaking glass). + /// + public object? InnerContent { get; } + + /// + /// The metadata associated with the content. + /// ⚠️ (Token Usage + More Backend API Metadata) info will be in this dictionary. Old IResult.ModelResult) ⚠️ + /// + public Dictionary? Metadata { get; } + + /// + /// Initializes a new instance of the class. + /// + /// Raw content object reference + /// Metadata associated with the content + protected CompleteContent(object rawContent, Dictionary? metadata = null) + { + this.InnerContent = rawContent; + this.Metadata = metadata; + } +} +``` + +```csharp +/// +/// Chat content abstraction +/// +public class ChatContent : ModelContent +{ + /// + /// Role of the author of the message + /// + public AuthorRole Role { get; set; } + + /// + /// Content of the message + /// + public string Content { get; protected set; } + + /// + /// Creates a new instance of the class + /// + /// + /// Dictionary for any additional metadata + public ChatContent(ChatMessage chatMessage, Dictionary? metadata = null) : base(chatMessage, metadata) + { + this.Role = chatMessage.Role; + this.Content = chatMessage.Content; + } +} +``` + +```csharp +/// +/// Represents a text content result. +/// +public class TextContent : ModelContent +{ + /// + /// The text content. + /// + public string Text { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// Text content + /// Additional metadata + public TextContent(string text, Dictionary? metadata = null) : base(text, metadata) + { + this.Text = text; + } +} +``` + +### End-User Experience + +- No changes to the end-user experience when using `Function.InvokeAsync` or `Kernel.InvokeAsync` +- Changes only when using Connector APIs directly + +#### Example 16 - Custom LLMS + +Before + +```csharp +await foreach (var message in textCompletion.GetStreamingContentAsync(prompt, executionSettings)) +{ + Console.Write(message); +} +``` + +After + +```csharp +await foreach (var message in textCompletion.GetStreamingTextContentAsync(prompt, executionSettings)) +{ + Console.Write(message); +} +``` + +#### Example 17 - ChatGPT + +Before + +```csharp +string reply = await chatGPT.GenerateMessageAsync(chatHistory); +chatHistory.AddAssistantMessage(reply); +``` + +After + +```csharp +var reply = await chatGPT.GetChatContentAsync(chatHistory); +chatHistory.AddMessage(reply); + +// OR +chatHistory.AddAssistantMessage(reply.Content); +``` + +### Clean-up + +All old interfaces and classes will be removed in favor of the new ones. diff --git a/docs/decisions/0025-chat-content-models.md b/docs/decisions/0025-chat-content-models.md new file mode 100644 index 000000000000..f2afbdb94736 --- /dev/null +++ b/docs/decisions/0025-chat-content-models.md @@ -0,0 +1,305 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: accepted +contact: dmytrostruk +date: 2023-12-08 +deciders: SergeyMenshykh, markwallace, rbarreto, mabolan, stephentoub, dmytrostruk +consulted: +informed: +--- +# Chat Models + +## Context and Problem Statement + +In latest OpenAI API, `content` property of `chat message` object can accept two types of values `string` or `array` ([Documentation](https://platform.openai.com/docs/api-reference/chat/create)). + +We should update current implementation of `ChatMessageContent` class with `string Content` property to support this API. + +## Decision Drivers + +1. New design should not be coupled to OpenAI API and should work for other AI providers. +2. Naming of classes and properties should be consistent and intuitive. + +## Considered Options + +Some of the option variations can be combined. + +### Option #1: Naming updates and new data type for `chat message content` + +Since `chat message content` can be an object now instead of `string`, it requires reserved name for better understanding in domain. + +1. `ChatMessageContent` will be renamed to `ChatMessage`. (Same for `StreamingChatMessageContent`). +2. `GetChatMessageContent` methods will be renamed to `GetChatMessage`. +3. New abstract class `ChatMessageContent` that will have property `ChatMessageContentType Type` with values `text`, `image`. (Will be extended with `audio`, `video` in the future). +4. `ChatMessage` will contain collection of `ChatMessageContent` objects `IList Contents`. +5. There will be concrete implementations of `ChatMessageContent` - `ChatMessageTextContent` and `ChatMessageImageContent`. + +New _ChatMessageContentType.cs_ + +```csharp +public readonly struct ChatMessageContentType : IEquatable +{ + public static ChatMessageContentType Text { get; } = new("text"); + + public static ChatMessageContentType Image { get; } = new("image"); + + public string Label { get; } + + // Implementation of `IEquatable`... +} +``` + +New _ChatMessageContent.cs_ + +```csharp +public abstract class ChatMessageContent +{ + public ChatMessageContentType Type { get; set; } + + public ChatMessageContent(ChatMessageContentType type) + { + this.Type = type; + } +} +``` + +Updated _ChatMessage.cs_: + +```csharp +public class ChatMessage : ContentBase +{ + public AuthorRole Role { get; set; } + + public IList Contents { get; set; } +``` + +New _ChatMessageTextContent.cs_ + +```csharp +public class ChatMessageTextContent : ChatMessageContent +{ + public string Text { get; set; } + + public ChatMessageTextContent(string text) : base(ChatMessageContentType.Text) + { + this.Text = text; + } +} +``` + +New _ChatMessageImageContent.cs_ + +```csharp +public class ChatMessageImageContent : ChatMessageContent +{ + public Uri Uri { get; set; } + + public ChatMessageImageContent(Uri uri) : base(ChatMessageContentType.Image) + { + this.Uri = uri; + } +} +``` + +Usage: + +```csharp +var chatHistory = new ChatHistory("You are friendly assistant."); + +// Construct request +var userContents = new List +{ + new ChatMessageTextContent("What's in this image?"), + new ChatMessageImageContent(new Uri("https://link-to-image.com")) +}; + +chatHistory.AddUserMessage(userContents); + +// Get response +var message = await chatCompletionService.GetChatMessageAsync(chatHistory); + +foreach (var content in message.Contents) +{ + // Possibility to get content type (text or image). + var contentType = content.Type; + + // Cast for specific content type + // Extension methods can be provided for better usability + // (e.g. message GetContent()). + if (content is ChatMessageTextContent textContent) + { + Console.WriteLine(textContent); + } + + if (content is ChatMessageImageContent imageContent) + { + Console.WriteLine(imageContent.Uri); + } +} +``` + +### Option #2: Avoid renaming and new data type for `chat message content` + +Same as Option #1, but without naming changes. In order to differentiate actual `chat message` and `chat message content`: + +- `Chat Message` will be `ChatMessageContent` (as it is right now). +- `Chat Message Content` will be `ChatMessageContentItem`. + +1. New abstract class `ChatMessageContentItem` that will have property `ChatMessageContentItemType Type` with values `text`, `image`. (Will be extended with `audio`, `video` in the future). +2. `ChatMessageContent` will contain collection of `ChatMessageContentItem` objects `IList Items`. +3. There will be concrete implementations of `ChatMessageContentItem` - `ChatMessageTextContentItem` and `ChatMessageImageContentItem`. + +New _ChatMessageContentItemType.cs_ + +```csharp +public readonly struct ChatMessageContentItemType : IEquatable +{ + public static ChatMessageContentItemType Text { get; } = new("text"); + + public static ChatMessageContentItemType Image { get; } = new("image"); + + public string Label { get; } + + // Implementation of `IEquatable`... +} +``` + +New _ChatMessageContentItem.cs_ + +```csharp +public abstract class ChatMessageContentItem +{ + public ChatMessageContentItemType Type { get; set; } + + public ChatMessageContentItem(ChatMessageContentItemType type) + { + this.Type = type; + } +} +``` + +Updated _ChatMessageContent.cs_: + +```csharp +public class ChatMessageContent : ContentBase +{ + public AuthorRole Role { get; set; } + + public IList Items { get; set; } +``` + +New _ChatMessageTextContentItem.cs_ + +```csharp +public class ChatMessageTextContentItem : ChatMessageContentItem +{ + public string Text { get; set; } + + public ChatMessageTextContentItem(string text) : base(ChatMessageContentType.Text) + { + this.Text = text; + } +} +``` + +New _ChatMessageImageContent.cs_ + +```csharp +public class ChatMessageImageContentItem : ChatMessageContentItem +{ + public Uri Uri { get; set; } + + public ChatMessageImageContentItem(Uri uri) : base(ChatMessageContentType.Image) + { + this.Uri = uri; + } +} +``` + +Usage: + +```csharp +var chatHistory = new ChatHistory("You are friendly assistant."); + +// Construct request +var userContentItems = new List +{ + new ChatMessageTextContentItem("What's in this image?"), + new ChatMessageImageContentItem(new Uri("https://link-to-image.com")) +}; + +chatHistory.AddUserMessage(userContentItems); + +// Get response +var message = await chatCompletionService.GetChatMessageContentAsync(chatHistory); + +foreach (var contentItem in message.Items) +{ + // Possibility to get content type (text or image). + var contentItemType = contentItem.Type; + + // Cast for specific content type + // Extension methods can be provided for better usability + // (e.g. message GetContent()). + if (contentItem is ChatMessageTextContentItem textContentItem) + { + Console.WriteLine(textContentItem); + } + + if (contentItem is ChatMessageImageContentItem imageContentItem) + { + Console.WriteLine(imageContentItem.Uri); + } +} +``` + +### Option #3: Add new property to `ChatMessageContent` - collection of content items + +This option will keep `string Content` property as it is, but will add new property - collection of `ContentBase` items. + +Updated _ChatMessageContent.cs_ + +```csharp +public class ChatMessageContent : ContentBase +{ + public AuthorRole Role { get; set; } + + public string? Content { get; set; } + + public ChatMessageContentItemCollection? Items { get; set; } +} +``` + +New _ChatMessageContentItemCollection.cs_ + +```csharp +public class ChatMessageContentItemCollection : IList, IReadOnlyList +{ + // Implementation of IList, IReadOnlyList to catch null values. +} +``` + +Usage: + +```csharp +var chatCompletionService = kernel.GetRequiredService(); + +var chatHistory = new ChatHistory("You are a friendly assistant."); + +chatHistory.AddUserMessage(new ChatMessageContentItemCollection +{ + new TextContent("What’s in this image?"), + new ImageContent(new Uri(ImageUri)) +}); + +var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory); + +Console.WriteLine(reply.Content); +``` + +## Decision Outcome + +Option #3 was preferred as it requires small amount of changes to existing hierarchy and provides clean usability for end-user. + +Diagram: +![Chat and Text models diagram](diagrams/chat-text-models.png) \ No newline at end of file diff --git a/docs/decisions/0025-planner-telemetry-enhancement.md b/docs/decisions/0025-planner-telemetry-enhancement.md new file mode 100644 index 000000000000..a9dcaffcdd2a --- /dev/null +++ b/docs/decisions/0025-planner-telemetry-enhancement.md @@ -0,0 +1,317 @@ +--- +status: { accepted } +contact: { TaoChenOSU } +date: { 2023-11-21 } +deciders: alliscode, dmytrostruk, markwallace, SergeyMenshykh, stephentoub +consulted: {} +informed: {} +--- + +# Planner Telemetry Enhancement + +## Context and Problem Statement + +It would be extremely beneficial for applications using Semantic Kernel's planning features to be able to continuously monitor the performance of planners and plans as well as debugging them. + +## Scenarios + +Contoso is a company that is developing an AI application using SK. + +1. Contoso needs to continuously monitor the token usage of a particular planner, including prompt tokens, completion tokens, and the total tokens. +2. Contoso needs to continuously monitor the time it takes for a particular planner to create a plan. +3. Contoso needs to continuously monitor the success rate of a particular planner in creating a valid plan. +4. Contoso needs to continuously monitor the success rate of a particular plan type being executed successfully. +5. Contoso wants to be able to see the token usage of a particular planner run. +6. Contoso wants to be able to see the time taken to create a plan of a particular planner run. +7. Contoso wants to be able to see the steps in a plan. +8. Contoso wants to be able to see the inputs&outputs of each plan step. +9. Contoso wants to change a few settings that may affect the performance of the planners. They would like to know how the performance will be affected before committing the changes. +10. Contoso wants to update to a new model that is cheaper and faster. They would like to know how the new model performs in planning tasks. + +## Out of scope + +1. We provide an example on how to send telemetry to Application Insights. Although other telemetry service options are supported technically, we will not cover possible ways of setting them up in this ADR. +2. This ADR does not seek to modify the current instrumentation design in SK. +3. We do not consider services that do not return token usage. + +## Decision Drivers + +- The framework should be telemetry service agnostic. +- The following metrics should be emitted by SK: + - Input token usage for prompt (Prompt) + - Description: A prompt is the smallest unit that consumes tokens (`KernelFunctionFromPrompt`). + - Dimensions: ComponentType, ComponentName, Service ID, Model ID + - Type: Histogram + - Example: + | ComponentType | ComponentName | Service ID | Model ID | Value | + |---|---|---|---|---| + | Function | WritePoem | | GPT-3.5-Turbo | 40 + | Function | TellJoke | | GPT-4 | 50 + | Function | WriteAndTellJoke | | GPT-3.5-Turbo | 30 + | Planner | CreateHandlebarsPlan | | GPT-3.5-Turbo | 100 + - Output token usage for prompt (Completion) + - Description: A prompt is the smallest unit that consumes tokens (`KernelFunctionFromPrompt`). + - Dimensions: ComponentType, ComponentName, Service ID, Model ID + - Type: Histogram + - Example: + | ComponentType | ComponentName | Service ID | Model ID | Value | + |---|---|---|---|---| + | Function | WritePoem | | GPT-3.5-Turbo | 40 + | Function | TellJoke | | GPT-4 | 50 + | Function | WriteAndTellJoke | | GPT-3.5-Turbo | 30 + | Planner | CreateHandlebarsPlan | | GPT-3.5-Turbo | 100 + - Aggregated execution time for functions + - Description: A function can consist of zero or more prompts. The execution time of a function is the duration from start to end of a function's `invoke` call. + - Dimensions: ComponentType, ComponentName, Service ID, Model ID + - Type: Histogram + - Example: + | ComponentType | ComponentName | Value | + |---|---|---| + | Function | WritePoem | 1m + | Function | TellJoke | 1m + | Function | WriteAndTellJoke | 1.5m + | Planner | CreateHandlebarsPlan | 2m + - Success/failure count for planners + - Description: A planner run is considered successful when it generates a valid plan. A plan is valid when the model response is successfully parsed into a plan of desired format and it contains one or more steps. + - Dimensions: ComponentType, ComponentName, Service ID, Model ID + - Type: Counter + - Example: + | ComponentType | ComponentName | Fail | Success + |---|---|---|---| + | Planner | CreateHandlebarsPlan | 5 | 95 + | Planner | CreateHSequentialPlan | 20 | 80 + - Success/failure count for plans + - Description: A plan execution is considered successful when all steps in the plan are executed successfully. + - Dimensions: ComponentType, ComponentName, Service ID, Model ID + - Type: Counter + - Example: + | ComponentType | ComponentName | Fail | Success + |---|---|---|---| + | Plan | HandlebarsPlan | 5 | 95 + | Plan | SequentialPlan | 20 | 80 + +## Considered Options + +- Function hooks + - Inject logic to functions that will get executed before or after a function is invoked. +- Instrumentation + - Logging + - Metrics + - Traces + +## Other Considerations + +SK currently tracks token usage metrics in connectors; however, these metrics are not categorized. Consequently, developers cannot determine token usage for different operations. To address this issue, we propose the following two approaches: + +- Bottom-up: Propagate token usage information from connectors back to the functions. +- Top-down: Propagate function information down to the connectors, enabling them to tag metric items with function information. + +We have decided to implement the bottom-up approach for the following reasons: + +1. SK is already configured to propagate token usage information from connectors via `ContentBase`. We simply need to extend the list of items that need to be propagated, such as model information. +2. Currently, SK does not have a method for passing function information down to the connector level. Although we considered using [baggage](https://opentelemetry.io/docs/concepts/signals/baggage/#:~:text=In%20OpenTelemetry%2C%20Baggage%20is%20contextual%20information%20that%E2%80%99s%20passed,available%20to%20any%20span%20created%20within%20that%20trace.) as a means of propagating information downward, experts from the OpenTelemetry team advised against this approach due to security concerns. + +With the bottom-up approach, we need to retrieve the token usage information from the metadata: + +```csharp +// Note that not all services support usage details. +/// +/// Captures usage details, including token information. +/// +private void CaptureUsageDetails(string? modelId, IDictionary? metadata, ILogger logger) +{ + if (string.IsNullOrWhiteSpace(modelId)) + { + logger.LogWarning("No model ID provided to capture usage details."); + return; + } + + if (metadata is null) + { + logger.LogWarning("No metadata provided to capture usage details."); + return; + } + + if (!metadata.TryGetValue("Usage", out object? usageObject) || usageObject is null) + { + logger.LogWarning("No usage details provided to capture usage details."); + return; + } + + var promptTokens = 0; + var completionTokens = 0; + try + { + var jsonObject = JsonSerializer.Deserialize(JsonSerializer.Serialize(usageObject)); + promptTokens = jsonObject.GetProperty("PromptTokens").GetInt32(); + completionTokens = jsonObject.GetProperty("CompletionTokens").GetInt32(); + } + catch (Exception ex) when (ex is KeyNotFoundException) + { + logger.LogInformation("Usage details not found in model result."); + } + catch (Exception ex) + { + logger.LogError(ex, "Error while parsing usage details from model result."); + throw; + } + + logger.LogInformation( + "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}.", + promptTokens, completionTokens); + + TagList tags = new() { + { "semantic_kernel.function.name", this.Name }, + { "semantic_kernel.function.model_id", modelId } + }; + + s_invocationTokenUsagePrompt.Record(promptTokens, in tags); + s_invocationTokenUsageCompletion.Record(completionTokens, in tags); +} +``` + +> Note that we do not consider services that do not return token usage. Currently only OpenAI & Azure OpenAI services return token usage information. + +## Decision Outcome + +1. New metrics names: + | Meter | Metrics | + |---|---| + |Microsoft.SemanticKernel.Planning|
  • semantic_kernel.planning.invoke_plan.duration
| + |Microsoft.SemanticKernel|
  • semantic_kernel.function.invocation.token_usage.prompt
  • semantic_kernel.function.invocation.token_usage.completion
| + > Note: we are also replacing the "sk" prefixes with "semantic_kernel" for all existing metrics to avoid ambiguity. +2. Instrumentation + +## Validation + +Tests can be added to make sure that all the expected telemetry items are in place and of the correct format. + +## Description the Options + +### Function hooks + +Function hooks allow developers to inject logic to the kernel that will be executed before or after a function is invoked. Example use cases include logging the function input before a function is invoked, and logging results after the function returns. +For more information, please refer to the following ADRs: + +1. [Kernel Hooks Phase 1](./0005-kernel-hooks-phase1.md) +2. [Kernel Hooks Phase 2](./0018-kernel-hooks-phase2.md) + +We can inject, during function registration, default callbacks to log critical information for all functions. + +Pros: + +1. Maximum exposure and flexibility to the developers. i.e. App developers can very easily log additional information for individual functions by adding more callbacks. + +Cons: + +1. Does not create metrics and need additional works to aggregate results. +2. Relying only on logs does not provide trace details. +3. Logs are modified more frequently, which could lead an unstable implementation and require extra maintenance. +4. Hooks only have access to limited function data. + +> Note: with distributed tracing already implemented in SK, developers can create custom telemetry within the hooks, which will be sent to the telemetry service once configured, as long as the information is available in the hooks. However, telemetry items created inside the hooks will not be correlated to the functions as parent-child relationships, since they are outside the scope of the functions. + +### Distributed tracing + +Distributed tracing is a diagnostic technique that can localize failures and performance bottlenecks within distributed applications. .Net has native support to add distributed tracing in libraries and .Net libraries are also instrumented to produce distributed tracing information automatically. + +For more information, please refer to this document: [.Net distributed tracing](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/) + +Overall pros: + +1. Native .Net support. +2. Distributed tracing is already implemented in SK. We just need to add more telemetry. +3. Telemetry service agnostic with [OpenTelemetry](https://opentelemetry.io/docs/what-is-opentelemetry/). + +Overall cons: + +1. Less flexibility for app developers consuming SK as a library to add custom traces and metrics. + +#### Logging + +Logs will be used to record interesting events while the code is running. + +```csharp +// Use LoggerMessage attribute for optimal performance +this._logger.LogPlanCreationStarted(); +this._logger.LogPlanCreated(); +``` + +#### [Metrics](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/metrics) + +Metrics will be used to record measurements overtime. + +```csharp +/// for function-related metrics. +private static readonly Meter s_meter = new("Microsoft.SemanticKernel"); + +/// to record plan execution duration. +private static readonly Histogram s_planExecutionDuration = + s_meter.CreateHistogram( + name: "semantic_kernel.planning.invoke_plan.duration", + unit: "s", + description: "Duration time of plan execution."); + +TagList tags = new() { { "semantic_kernel.plan.name", planName } }; + +try +{ + ... +} +catch (Exception ex) +{ + // If a measurement is tagged with "error.type", then it's a failure. + tags.Add("error.type", ex.GetType().FullName); +} + +s_planExecutionDuration.Record(duration.TotalSeconds, in tags); +``` + +#### [Traces](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/distributed-tracing) + +Activities are used to track dependencies through an application, correlating work done by other components, and form a tree of activities known as a trace. + +```csharp +ActivitySource s_activitySource = new("Microsoft.SemanticKernel"); + +// Create and start an activity +using var activity = s_activitySource.StartActivity(this.Name); + +// Use LoggerMessage attribute for optimal performance +logger.LoggerGoal(goal); +logger.LoggerPlan(plan); +``` + +> Note: Trace log will contain sensitive data and should be turned off in production: https://learn.microsoft.com/en-us/dotnet/core/extensions/logging?tabs=command-line#log-level + +## Example of how an application would send the telemetry to Application Insights + +```csharp +using var traceProvider = Sdk.CreateTracerProviderBuilder() + .AddSource("Microsoft.SemanticKernel*") + .AddAzureMonitorTraceExporter(options => options.ConnectionString = connectionString) + .Build(); + +using var meterProvider = Sdk.CreateMeterProviderBuilder() + .AddMeter("Microsoft.SemanticKernel*") + .AddAzureMonitorMetricExporter(options => options.ConnectionString = connectionString) + .Build(); + +using var loggerFactory = LoggerFactory.Create(builder => +{ + // Add OpenTelemetry as a logging provider + builder.AddOpenTelemetry(options => + { + options.AddAzureMonitorLogExporter(options => options.ConnectionString = connectionString); + // Format log messages. This is default to false. + options.IncludeFormattedMessage = true; + }); + builder.SetMinimumLevel(MinLogLevel); +}); +``` + +## More information + +Additional works that need to be done: + +1. Update [telemetry doc](../../dotnet/docs/TELEMETRY.md) diff --git a/docs/decisions/0030-branching-strategy.md b/docs/decisions/0030-branching-strategy.md new file mode 100644 index 000000000000..d0a884ac2e69 --- /dev/null +++ b/docs/decisions/0030-branching-strategy.md @@ -0,0 +1,130 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: SergeyMenshykh +date: 2024-01-04 +deciders: markwallace-microsoft +consulted: rogerbarreto, dmytrostruk +informed: +--- + +# SK Branching Strategy + +## Industry-adopted branching strategies +There are several industry-adopted branching strategies for Git, such as GitHub Flow, Git-Flow, and GitLab Flow. However, we will only focus on the two most widely-used ones: GitHub Flow and Git-Flow. + +### GitHub Flow +GitHub Flow is a straightforward branching strategy that centres around the 'main' branch. Developers create a new branch for each feature or bugfix, make changes, submit a pull request, and merge the changes back to the 'main' branch. Releases are done directly from the 'main' branch, making this model ideal for projects with continuous integration/deployment. Learn more about [GitHub Flow](https://docs.github.com/en/get-started/quickstart/github-flow). + +GitFlow + +[Image source](https://www.abtasty.com/blog/git-branching-strategies/) + +Pros: +- Straightforward with fewer branches to manage and less merge conflicts. +- No long running development branches. + +Cons: +- Not as well organized as Git-Flow. +- The 'main' branch can get cluttered more easily since it functions as both the production and development branch. + +### Git-Flow +Git-Flow is a branching strategy that organizes software development around two long-lived main branches, 'main' and 'develop', along with short-lived feature, release, and hotfix branches. Developers work on new features in feature branches, which are then merged into the 'develop' branch. When preparing for a release, to avoid blocking future release features, a release branch is created, and once finalized (testing & bug fixing), it is merged into both 'main' and 'develop'. Hotfix branches in Git Flow are created from the 'main' branch to address critical bug fixes and are subsequently merged back into both the 'main' and 'develop' branches. The actual release(deployable artifact) is done from the 'main' branch that is reflects actual production worthy official releases. Learn more about [Git-Flow](https://nvie.com/posts/a-successful-git-branching-model/). + +GitFlow + +Pros: +- Clear separation between code under development and production-ready code. +- Efficient release management. + +Cons: +- More complex than GitHub Flow, which may be overwhelming for smaller teams or projects that do not require as much structure. +- Less suited for projects that prioritize continuous deployment, as it emphasizes a more controlled release process. +- Not ideal for projects with continuous deployment due to the overhead of managing multiple branches. +- Spaghetti history in Git - [GitFlow considered harmful](https://www.endoflineblog.com/gitflow-considered-harmful) + +# SK branching strategies +Today, the SK SDK is available in three languages: .NET, Java and Python. All of them coexist in the same Git repository, organized under corresponding folders. However, the branching strategies for those differ. + +For both .NET and Python versions, development takes place in short-lived topic branches that branch off the 'main' branch. These topic branches are merged back into the 'main' branch when features are considered production-ready through PR reviews, unit tests, and integration test runs. Releases are carried out directly from the 'main' branch. This approach aligns with the GitHub Flow branching strategy, with a minor deviation where releases are conducted weekly rather than being continuously deployed. + +The Java version of SK adheres to the Git-Flow strategy by being developed in a dedicated development branch. Topic branches are created from the development branch and merged back through pull requests after unit tests and integration test runs. Release branches are also created from the development branch and merged to both the development branch and the 'main' one when a release is considered production-ready. This strategy deviates slightly from vanilla Git-Flow in that release artifacts are generated from release branches rather than from the 'main' branch. + +## Decision Drivers +- The strategy should be easy to implement and maintain without requiring significant investments. +- The strategy should allow for maintaining several releases in parallel if required. +- Ideally, the strategy is intuitive and simple so that everyone familiar with Git can adopt and follow it. +- Ideally, all SK languages are able to adopt and use the same branching strategy. +- Ability to continually deploy new release with minimal overhead. +- Ability to release language versions independently and on different schedules. +- Allow the .Net, Java and Python teams to be able to operate independently. +- Ability to patch a release (for all languages). +- Consolidation of PR's and Issues to simplify the triage and review process. + +Another aspect to consider when deciding on a branching strategy for SK is access permissions and action scopes. GitHub does not allow enforcing access restrictions on just a part of a repository, such as a folder. This means that it is not possible to restrict SK .NET contributors from pushing Python PRs, which ideally should be done by the corresponding team. However, GitHub does allow assigning access permissions to a branch, which can be successfully leveraged if the appropriate strategy option is chosen. The similar issue occurs with GitHub's required actions/status checks, which can only be set at the branch level. Considering that development for .NET and Python takes place in the 'main' branch, and status checks are configured per branch rather than per folder, it is not possible to configure separate status checks for .NET and Python PRs. As a result, the same status check runs for both .NET and Python PRs, even though it may not be relevant to a specific language. + +!["Net PR status checks"](./diagrams/net-pr-status-checks.png) + +Regardless of the chosen strategy, it should be possible to support multiple versions of SK. For example, applying a bug fix or a security patch to released SK v1.1.0 and v2.4.0 should be feasible while working on v3.0.0. One way to achieve this would be to create a release branch for each SK release. So that the required patch/fix can be pushed to the branch and released from it. However, marking released commits with tags should suffice, as it is always possible to create a new branch from a tag retrospectively when needed, if at all. Existing release pipelines should accept a source branch as a parameter, enabling releases from any branch and not only from the 'main' one. + +## Considered Options + +### Repository per SK language +This option suggests having a separate GitHub repository for each SK language. These repositories can be created under a corresponding organization. Development and releases will follow the GitHub flow, with new features and fixes being developed in topic branches that created from the 'main' branch and eventually merged back. + +Pros: +- Each repository will have only language-specific status checks and actions. +- Branch commits and release history will not contain irrelevant commits or releases. +- Utilizes the familiar GitHub Flow without Git-Flow overhead, resulting in a shorter learning curve. +- Access permissions are limited to the specific owning team. + +Cons: +- There is an initial overhead in setting up the three repositories. +- There may be potential ongoing maintenance overhead for the three repositories. +- Secrets must be managed across three repositories instead of just one. +- Each repo will have a backlog that will have to be managed separately. + +### Branch per SK language +This option involves having a dedicated, language-specific development branch for each SDK language: 'net-development', 'java-development', and 'python-development'. SDK Java is already using this option. Development and releases will follow the GitHub Flow, with new features and fixes being developed in topic branches that are branched off the corresponding language branch and eventually merged back. + +Pros: +- Simple, language specific, status checks, actions and rules configured per language branch. +- Allow only teams that own language-specific branches to push or merge to them, rather than just approving PRs. +- Branch commits history does not contain irrelevant commits. + +Cons: +- GitHub release history contains releases for all languages. +- Language-specific branches may not be straightforward to discover/use. + +This option has two sub-options that define the way the 'main' branch is used: +1. The 'main' branch will contain general/common artifacts such as documentation, GitHub actions, and samples. All language folders will be removed from the 'main' branch, and it can be locked to prevent accidental merges. +2. The 'main' branch will include everything that dev branches have for discoverability purposes. A job/action will be implemented to merge commits from dev branches to the 'main' branch. The number of common artifacts between SK languages should be minimized to reduce the potential for merge conflicts. A solution for the squash merge problem that SK Java is experiencing today should be found before deciding on the sub-option. + +The second sub-option is preferred over the first one due to its discoverability benefits. There is no need to select a development branch in the GitHub UI when searching for something in the repository. The 'main' branch is selected by default, and as soon as the latest bits are in the branch, they can be found easily. This intuitive approach is familiar to many, and changing it by requiring the selection of a branch before searching would complicate the search experience and introduce frustration. + +### All SK languages in the 'main' +This option assumes maintaining the code for all SK languages - .NET, Java, and Python in the 'main' branch. Development would occur using typical topic branches, while releases would also be made from the 'main' branch. This is the strategy currently adopted by .NET and Python, and corresponds to the GitHub Flow. + +Pros: +- All code in one place - the 'main' branch. +- Familiar GitHub Flow, no Git-Flow overhead - shorter learning curve. + +Cons: +- Branch commits/release history contains irrelevant commits/releases. +- Complex and irrelevant GitHub status checks/actions. +- PRs can be pushed by non-owner teams. + +### Current 'Hybrid' approach +This choice keeps the existing method used by SK. .NET and Python development is done in the 'main' branch using GitHub Flow, while Java development happens in the java-development branch following Git-Flow. + +Pros: +- No changes required. +- Each SK language uses a strategy that is convenient for it. + +Cons: +- Branch commits/release history contains irrelevant commits/releases. +- Complex and irrelevant GitHub status checks/actions. +- PRs can be pushed by non-owner teams. + +## Decision Outcome +Chosen option: "Current 'Hybrid' approach" because it works with minor inefficiencies (such as cluttered release history and multi-language complex actions) and requires no investments now. Later, depending on the team size and the problems the team encounters with the "Current 'Hybrid' approach," we may consider either the 'Repository per SK language' option or the 'Branch per SK language' one. diff --git a/docs/decisions/0031-feature-branch-strategy.md b/docs/decisions/0031-feature-branch-strategy.md new file mode 100644 index 000000000000..4f96f690ca27 --- /dev/null +++ b/docs/decisions/0031-feature-branch-strategy.md @@ -0,0 +1,156 @@ +--- +# Strategy for Community Driven Connectors and Features + +status: approved +contact: rogerbarreto +date: 2024-01-24 +deciders: rogerbarreto, markwallace-microsoft, dmytrostruk, sergeymenshik +consulted: +informed: +--- + +# Strategy for Community Driven Connectors and Features + +## Context and Problem Statement + +Normally Connectors are Middle to Complex new Features that can be developed by a single person or a team. In order to avoid conflicts and to have a better control of the development process, we strongly suggest the usage of a Feature Branch Strategy in our repositories. + +In our current software development process, managing changes in the main branch has become increasingly complex, leading to potential conflicts and delays in release cycles. + +## Standards and Guidelines Principles + +- **Pattern**: The Feature Branch Strategy is a well-known pattern for managing changes in a codebase. It is widely used in the industry and is supported by most version control systems, including GitHub, this also gives further clear picture on how the community can meaningfully contribute to the development of connectors or any other bigger feature for SK. +- **Isolated Development Environments**: By using feature branches, each developer can work on different aspects of the project without interfering with others' work. This isolation reduces conflicts and ensures that the main branch remains stable. +- **Streamlined Integration**: Feature branches simplify the process of integrating new code into the main branch. By dealing with smaller, more manageable changes, the risk of major conflicts during integration is minimized. +- **Efficiency in Code Review**: Smaller, more focused changes in feature branches lead to quicker and more efficient code reviews. This efficiency is not just about the ease of reviewing less code at a time but also about the time saved in understanding the context and impact of the changes. +- **Reduced Risk of Bugs**: Isolating development in feature branches reduces the likelihood of introducing bugs into the main branch. It's easier to identify and fix issues within the confined context of a single feature. +- **Timely Feature Integration**: Small, incremental pull requests allow for quicker reviews and faster integration of features into the feature branch and make it easier to merge down into main as the code was already previously reviewed. This timeliness ensures that features are merged and ready for deployment sooner, improving the responsiveness to changes. +- **Code Testing, Coverage and Quality**: To keep a good code quality is imperative that any new code or feature introduced to the codebase is properly tested and validated. Any new feature or code should be covered by unit tests and integration tests. The code should also be validated by our CI/CD pipeline and follow our code quality standards and guidelines. +- **Examples**: Any new feature or code should be accompanied by examples that demonstrate how to use the new feature or code. This is important to ensure that the new feature or code is properly documented and that the community can easily understand and use it. + +### Community Feature Branch Strategy + +As soon we identify that contributors are willing to take/create a Feature Issue as a potential connector implementation, we will create a new branch for that feature. + +Once we have agreed to take a new connector we will work with the contributors to make sure the implementation progresses and is supported if needed. + +The contributor(s) will then be one of the responsibles to incrementally add the majority of changes through small Pull Requests to the feature branch under our supervision and review process. + +This strategy involves creating a separate branch in the repository for each new big feature, like connectors. This isolation means that changes are made in a controlled environment without affecting the main branch. + +We may also engage in the development and changes to the feature branch when needed, the changes and full or co-authorship on the PRs will be tracked and properly referred into the Release Notes. + +#### Pros and Cons + +- Good, because it allows for focused development on one feature at a time. +- Good, because it promotes smaller, incremental Pull Requests (PRs), simplifying review processes. +- Good, because it reduces the risk of major bugs being merged into the main branch. +- Good, because it makes the process of integrating features into the main branch easier and faster. +- Bad, potentially, if not managed properly, as it can lead to outdated branches if not regularly synchronized with the main branch. + +## Local Deployment Platforms / Offline + +### LM Studio + +LM Studio has a local deployment option, which can be used to deploy models locally. This option is available for Windows, Linux, and MacOS. + +Pros: + +- API is very similar to OpenAI API +- Many models are already supported +- Easy to use +- Easy to deploy +- GPU support + +Cons: + +- May require a license to use in a work environment + +### Ollama + +Ollama has a local deployment option, which can be used to deploy models locally. This option is available for Linux and MacOS only for now. + +Pros: + +- Easy to use +- Easy to deploy +- Supports Docker deployment +- GPU support + +Cons: + +- API is not similar to OpenAI API (Needs a dedicated connector) +- Dont have Windows support + +### Comparison + +| Feature | Ollama | LM Studio | +| --------------------- | --------------------------------------------------- | --------------------------------------------------------------------------------------- | +| Local LLM | Yes | Yes | +| OpenAI API Similarity | Yes | Yes | +| Windows Support | No | Yes | +| Linux Support | Yes | Yes | +| MacOS Support | Yes | Yes | +| Number of Models | [61](https://ollama.ai/library) +Any GGUF converted | [25](https://github.com/lmstudio-ai/model-catalog/tree/main/models) +Any GGUF Converted | + +| Model Support | Ollama | LM Studio | +| --------------- | ------ | --------- | +| Phi-2 Support | Yes | Yes | +| Llama-2 Support | Yes | Yes | +| Mistral Support | Yes | Yes | + +## Connector/Model Priorities + +Currently we are looking for community support on the following models + +The support on the below can be either achieved creating a practical example using one of the existing Connectors against one of this models or providing a new Connector that supports a deployment platform that hosts one of the models below: + +| Model Name | Local Support | Deployment | Connectors | +| ---------- | ------------- | -------------------------------------- | ------------------------------------------------------ | +| Gpt-4 | No | OpenAI, Azure | Azure+OpenAI | +| Phi-2 | Yes | Azure, Hugging Face, LM Studio, Ollama | OpenAI, HuggingFace, LM Studio\*\*\*, Ollama\*\* | +| Gemini | No | Google AI Platform | GoogleAI\*\* | +| Llama-2 | Yes | Azure, LM Studio, HuggingFace, Ollama | HuggingFace, Azure+OpenAI, LM Studio\*\*\*, Ollama\*\* | +| Mistral | Yes | Azure, LM Studio, HuggingFace, Ollama | HuggingFace, Azure+OpenAI, LM Studio\*\*\*, Ollama\*\* | +| Claude | No | Anthropic, Amazon Bedrock | Anthropic**, Amazon** | +| Titan | No | Amazon Bedrock | Amazon\*\* | + +_\*\* Connectors not yet available_ + +_\*\*\* May not be needed as an OpenAI Connector can be used_ + +Connectors may be needed not per Model basis but rather per deployment platform. +For example, using OpenAI or HuggingFace connector you may be able to call a Phi-2 Model. + +## Expected Connectors to be implemented + +The following deployment platforms are not yet supported by any Connectors and we strongly encourage the community to engage and support on those: + +Currently the priorities are ordered but not necessarily needs to be implemented sequentially, an + +| Deployment Platform | Local Model Support | +| ------------------- | ------------------- | +| Ollama | Yes | +| GoogleAI | No | +| Anthropic | No | +| Amazon | No | + +## Decision Outcome + +Chosen option: "Feature Branch Strategy", because it allows individual features to be developed in isolation, minimizing conflicts with the main branch and facilitating easier code reviews. + +## Fequent Asked Questions + +### Is there a migration strategy for initiatives that followed the old contribution way with forks, and now have to switch to branches in microsoft/semantic-kernel? + +You proceed normally with the fork and PR targeting `main`, as soon we identify that your contribution PR to main is a big and desirable feature (Look at the ones we described as expected in this ADR) we will create a dedicated feature branch (`feature-yourfeature`) where you can retarget our forks PR to target it. +All further incremental changes and contributions will follow as normal, but instead of `main` you will be targeting the `feature-*` branch. + +### How do you want to solve the "up to date with main branch" problem? + +This will happen when we all agreed that the current feature implementation is complete and ready to merge in `main`. + +As soon the feature is finished, a merge from main will be pushed into the feature branch. +This will normally trigger the conflicts that need to be sorted. +That normally will be the last PR targeting the feature branch which will be followed right away by another PR from the `feature` branch targeting `main` with minimal conflicts if any. +The merging to main might be fast (as all the intermediate feature PRs were all agreed and approved before) diff --git a/docs/decisions/0032-agents.md b/docs/decisions/0032-agents.md new file mode 100644 index 000000000000..016a14f527fa --- /dev/null +++ b/docs/decisions/0032-agents.md @@ -0,0 +1,276 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: SergeyMenshykh +date: 2024-01-24 +deciders: markwallace-microsoft, matthewbolanos +consulted: rogerbarreto, dmytrostruk +informed: +--- + +# SK Agents Overview and High Level Design + +## Context and Problem Statement +Currently, agents in SK .NET are represented by the [IAgent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Experimental/Agents/IAgent.cs) interface, the [Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Experimental/Agents/Internal/Agent.cs) class, and a set of classes in the [Agents folder](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Experimental/Agents). These classes enable agent communication with the OpenAI Assistant API, agent collaboration, and more. + +The [Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Experimental/Agents/Internal/Agent.cs) class is the only implementation of an agent in SK that utilizes the [OpenAI Assistant API](https://platform.openai.com/docs/assistants/how-it-works). It accomplishes this through a series of abstractions, which are implemented as wrappers around the OpenAI Assistant API, hiding the complexity and details of HTTP calls to the OpenAI API. + +The [IAgent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Experimental/Agents/IAgent.cs) interface is implemented by the Agent class. This interface is intended to be implemented by all the future agents. Its design was shaped around the OpenAI Assistant API and contains several members that might not be relevant to all agents, such as NewThreadAsync, AddFileAsync, FileIds, etc. This can make it more challenging to use the interface when building custom agents. + +Current agent functionality lacks the necessary building blocks and a unified agent interface, which would enable quick and easy configuration of how agents collaborate. For example, it would be beneficial to create several agents, add them to an agent chat, and ask them to collaborate on solving a task at hand without writing any code to facilitate the collaboration. In cases where turn-based communication between two or more agents is required, it should be a matter of simply adding the agents to the agent turn-based chat to enable their collaboration. + +## Out of Scope +This ADR outlines a high-level design for SK agents, with the primary goal of getting everyone on the same page regarding potential implementations and gathering early feedback to determine the next steps. As such, the design for certain aspects of agents may be omitted and completed as part of separate ARDs or PRs, depending on the complexity of the area. + +## Decision Drivers +- SK should provide sufficient abstraction to enable the construction of agents that could utilize potentially any LLM API. +- SK should provide sufficient abstraction and building blocks for the most frequent types of agent collaboration. It should be easy to add new blocks as new collaboration methods emerge. +- SK should provide building blocks to modify agents' input and output to cover various customization scenarios. +- SK agents' input and output should be represented by the same data type, making it easy to pass the output of one agent as input to another. +- SK agents should leverage all SK tools DI, plugins, function-calling, etc. +- SK agents API should be as simple as possible so that other libraries can build their agents on top of it if needed. + +## Agents Overview +What is agent? An agent is a class that has the following attributes and abilities: +- Name: Allows the agent to impersonate a persona. +- Tools/Functions: Enables the agent to perform specific tasks or actions. +- System Instructions: A set of directives that guide the agent's behavior. +- Settings: Agent specific settings including LLM settings - such as Temperature, TopP, StopSequence, etc +- Interaction: It's possible to interact with the agent by sending and receiving messages. + +## OpenAI Assistant API + +OpenAI Assistant API Objects.png + +[Source](https://platform.openai.com/docs/assistants/how-it-works/objects) + +[Playground](https://platform.openai.com/playground) + +## Unified Interface And Data Contract +To enable collaboration and customization scenarios, further details of which will be provided below, agents should have a unified interface. They should inherit from the same abstract Agent class and have one data type - AgentMessage, for both input and output data contracts. This will enable a very powerful and extensible model, allowing the addition of any agent to any chat/collaboration, regardless of its implementation, and enabling a seamless chat conversation where the agent's output message is added to the chat without any conversion, and the chat is sent as input for the next agent without any conversion as well. + +For example, the scenario where two agents need to communicate together can be configured like this: +```C# +class AgentTurnBasedChat(Agent[] agents) +{ + async Task StartConversationAsync(AgentMessage[] messages) + { + var chat = new List(messages); + + var nextAgentIndex = 0; + + while (chatExitCondition) + { + var nextAgent = agents[nextAgentIndex]; + + var result = await nextAgent.InvokeAsync(chat); // Agents accept a list of the 'AgentMessage' class, so chat can be passed without any conversion required. + + chat.AddRange(result); // The agent's result can be added back to the chat as is, without any conversion needed. + + nextAgentIndex = ...; + } + + return chat.ToArray(); + } +} + + +Agent copywriter = new ChatCompletionAgent(name: "Mike", instructions: "You're a helpful ....", ...); // Agent using the Chat Completion API + +Agent artDirector = new OpenAIAssistant(name: "Roy", instructions: ...); // Agent using OpenAI Assistance API. + +AgentTurnBasedChat chat = new(agents: new {copywriter, artDirector}); // Any agent can be added to the chat as long as it inherits from the 'Agent' class. + +var result = await chat.StartConversationAsync(new[] { new AgentMessage(role: "user", "collaborate on advertising campaigns for out latest product ....") }); + +``` +Pros: +- No changes are needed for existing agent chat/collaboration classes to accommodate new agents. +- No custom logic is required in the chat/collaboration classes to support new agents. + +Cons: +- Some features of certain agents might not be fully utilized, as chat/collaboration classes are only aware of the unified interface supported by all agents. + +## Agent Collaboration +There are three identified methods for agents to collaborate with each other so far. As new collaboration strategies are discovered, it should be relatively easy to support them. One prerequisite for this is to ensure that agents remain unaware of the conversations they participate in, maintaining a one-way dependency - the chat/collaboration blocks know about the agents, but the agents do not know about them. + +### Collaboration Chat +This collaboration strategy assumes the presence of a facilitator/coordinator agent - an admin, along with agent participants. All agents collaborate in the same chat, following the order determined by the admin agent. The admin agent decides on the next agent each time the chat is updated (new messages returned by an agent are added to the chat) by asking LLM to select the next agent based on chat history and the list of agents. The conversation continues until the exit condition(can be configured on the admin agent using the customization blocks described below) is met or the maximum number of turns is reached. + +```C# +class AgentCollaborationChat(Agent admin, Agent[] participants) +{ + async Task StartConversationAsync(AgentMessage[] messages) + { + var chat = new List(messages); + + while (chatExitCondition) + { + var nextAgent = GetNextAgent(chat); // Admin agent decides on the next best agent to continue the conversation. + + var result = await nextAgent.InvokeAsync(chat); + + chat.AddRange(result); + } + + return chat.ToArray(); + } + + private Agent GetNextAgent(IList chat) + { + var agentNames = string.Join(",", participants.Select(p => p.Name)); + + var ask = new AgentMessage(role: "assistant", content: $"Identify the next agent based on the chat history and the list of agents - {agentNames}."); + + var response = admin.InvokeAsync(chat.Concat(new[] { ask })); + + return participants.Single(p => p.Name == response.Content); + } +} + +Agent projectManager = new ChatCompletionAgent(name: "Mike", instructions: "You're a PM working on the new TODO app ....", ...); + +Agent designer = new ChatCompletionAgent(name: "Peter", instructions: "You're a UI/UX designer ....", ...); + +Agent engineer = new OpenAIAssistant(name: "Roy", instructions: "You are an engineer with front-end skills ..."); + +AgentCollaborationChat chat = new(admin: projectManager, participants: new {designer, engineer}); + +var result = await chat.StartConversationAsync(new[] { new AgentMessage(role: "user", content: "collaborate on a new user experience for the 'Add item' feature.") }); + +``` + +### Turn-Based Chat +The turn-based collaboration strategy involves agents taking turns in a conversation, following a predetermined order. Each agent is invoked after the previous one, continuing until the exit condition is met or the maximum number of turns is reached. Once the last agent has finished, the turn is given back to the first agent in the sequence. + +```C# +class AgentTurnBasedChat(Agent[] agents) +{ + async Task StartConversationAsync(AgentMessage[] messages) + { + var chat = new List(messages); + + var nextAgentIndex = 0; + + while (chatExitCondition) + { + var nextAgent = agents[nextAgentIndex]; + + var result = await nextAgent.InvokeAsync(chat); + + chat.AddRange(result); + + nextAgentIndex = (nextAgentIndex + 1) % agents.Count() + } + + return chat.ToArray(); + } +} + + +Agent copywriter = new ChatCompletionAgent(name: "Mike", instructions: "You're a helpful ....", ...); + +Agent artDirector = new OpenAIAssistant(name: "Roy", instructions: ...); + +AgentTurnBasedChat chat = new(agents: new {copywriter, artDirector}); + +var result = await chat.StartConversationAsync(new[] { new AgentMessage(role: "user", "collaborate on advertising campaigns for out latest product ....") }); + +``` + +### Agents As Plugins +This type of collaboration more closely resembles a delegation method of communication, as the agents are not collaborating in a chat but rather "delegating" by having one agent call the others as functions. +```C# + Agent designer = new ChatCompletionAgent(name: "Peter", instructions: "You're a UI/UX designer ....", ...); + + Agent engineer = new OpenAIAssistant(name: "Roy", instructions: "You are an engineer with front-end skills ..."); + + Agent projectManager = new ChatCompletionAgent(name: "Mike", instructions: "You're a PM working on the new TODO app ....", ...); + projectManager.Plugins.Add(designer.AsPlugin()); + projectManager.Plugins.Add(engineer.AsPlugin()); + + var result = await projectManager.InvokeAsync(new[] { new AgentMessage(role: "user", content: "Work with the design and engineering teams to produce a draft version of 'Add Item' UI.") }); +``` + +Similarly, since agents can be represented as plugins, nothing prevents registering them as plugins on the Kernel so that it calls the agents as it would call any other plugin/function if necessary. +```C# + Agent designer = new ChatCompletionAgent(name: "Peter", instructions: "You're a UI/UX designer ....", ...); + + Agent engineer = new OpenAIAssistant(name: "Roy", instructions: "You are an engineer with front-end skills ..."); + + Kernel kernel = Kernel.CreateBuilder().Build(); + kernel.Plugins.Add(designer.AsPlugin()); + kernel.Plugins.Add(engineer.AsPlugin()); + + var result = await kernel.InvokePromptAsync("Work with the design and engineering teams to produce a draft version of 'Add Item' UI.") +``` + +## Agent Customization & Filters +To cover complex agent collaboration scenarios, it might be necessary to modify agents' input and/or output messages as they travel to and from Agents. This may be useful for various scenarios, such as converting agents' message content from one format/type to another. There could be situations when messages should not be propagated to Agents or not added to the collaboration chat. For example, in the scenario above where the PM, designer, and engineer collaborate on a new experience for the TODO app, the PM's behavior could be extended to generate a chat exit signal based on whether the function used by the agent indicates that the new 'Add item' user experience is good enough as POC: + +```C# +class AgentCollaborationChat(Agent admin, Agent[] participants) +{ + async Task StartConversationAsync(AgentMessage[] messages) + { + ... + while (chatExitCondition) + { + ... + var result = await nextAgent.InvokeAsync(chat); + ... + chatExitCondition == string.Contains(result.Content, "exit_chat") // The chat can be parameterized with agent condition or callback. + } + ... + } +} + +Agent projectManager = new ChatCompletionAgent(name: "Mike", instructions: "You're a PM working on the new TODO app ....", ...); +projectManager.Plugins.AddFromType() +projectManager = projectManager.PostProcess((reply) => { + if(reply.IsFunctionCall) + { + if(reply.FunctionName == "EvaluateUsability") + { + if(reply.FunctionResult.IsCoreFunctionalityAccessible) + { + return new [] { new AgentMessage(role: "system", content: "exit_chat") }; + } + + return new [] { new AgentMessage(role: "system", content: "Peter/designer please improve the design so that core functionality is accessible.") }; + } + } + return reply; +}); + +Agent designer = new ChatCompletionAgent(name: "Peter", instructions: "You're a UI/UX designer ....", ...); + +Agent engineer = new OpenAIAssistant(name: "Roy", instructions: "You are an engineer with front-end skills ..."); + +AgentCollaborationChat chat = new(admin: projectManager, participants: new {designer, engineer}); + +var result = await chat.StartConversationAsync(new[] { new AgentMessage(role: "user", content: "collaborate on a new user experience for the 'Add item' feature.") }); +``` +Some scenarios can be implemented without filters by just plugins. However, this may require extra hops to LLMs and prompt tuning to have those scenarios working. + +Filter examples: +- PostProcess: Accepts an agent and a delegate capable of modifying or replacing messages returned by the agent. It calls the agent and then calls the delegate with the agent's response messages. Finally, it returns the modified messages to the caller. +- PreProcess: Accepts an agent and a delegate capable of modifying or replacing messages to be passed to the agent. It calls the callback to handle the messages and pass the modified messages to the agent. Returns the agent result to the caller. + +Each of the filter can be implemented as a decorator pattern that will allow to build agents pipelines/chains: +```C# +var agent = new ChatCompletionAgent(...) + .PostProcess((m) => { Console.WriteLine($"Agent response: {m.Content}"); return m; }) + .PreProcess(m => { Console.WriteLine($"User input: {m.Content}"); return m; }); + +await agent.InvokeAsync(...); + +``` + +SK, today, already has the concept of filters for [prompts](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/IPromptFilter.cs) and [functions](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Filters/Function/IFunctionFilter.cs). Ideally, the same approach should be taken for Agent filters. + +## Decision Outcome + +Chosen option: "{title of option 1}", because +{justification. e.g., only option, which meets k.o. criterion decision driver | which resolves force {force} | … | comes out best (see below)}. + diff --git a/docs/decisions/0033-kernel-filters.md b/docs/decisions/0033-kernel-filters.md new file mode 100644 index 000000000000..ede793684986 --- /dev/null +++ b/docs/decisions/0033-kernel-filters.md @@ -0,0 +1,150 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: accepted +contact: dmytrostruk +date: 2023-01-23 +deciders: sergeymenshykh, markwallace, rbarreto, stephentoub, dmytrostruk +--- + +# Kernel Filters + +## Context and Problem Statement + +Current way of intercepting some event during function execution works as expected using Kernel Events and event handlers. Example: + +```csharp +ILogger logger = loggerFactory.CreateLogger("MyLogger"); + +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + +void MyInvokingHandler(object? sender, FunctionInvokingEventArgs e) +{ + logger.LogInformation("Invoking: {FunctionName}", e.Function.Name) +} + +void MyInvokedHandler(object? sender, FunctionInvokedEventArgs e) +{ + if (e.Result.Metadata is not null && e.Result.Metadata.ContainsKey("Usage")) + { + logger.LogInformation("Token usage: {TokenUsage}", e.Result.Metadata?["Usage"]?.AsJson()); + } +} + +kernel.FunctionInvoking += MyInvokingHandler; +kernel.FunctionInvoked += MyInvokedHandler; + +var result = await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.") +``` + +There are a couple of problems with this approach: + +1. Event handlers does not support dependency injection. It's hard to get access to specific service, which is registered in application, unless the handler is defined in the same scope where specific service is available. This approach provides some limitations in what place in solution the handler could be defined. (e.g. If developer wants to use `ILoggerFactory` in handler, the handler should be defined in place where `ILoggerFactory` instance is available). +2. It's not clear in what specific period of application runtime the handler should be attached to kernel. Also, it's not clear if developer needs to detach it at some point. +3. Mechanism of events and event handlers in .NET may not be familiar to .NET developers who didn't work with events previously. + + + +## Decision Drivers + +1. Dependency injection for handlers should be supported to easily access registered services within application. +2. There should not be any limitations where handlers are defined within solution, whether it's Startup.cs or separate file. +3. There should be clear way of registering and removing handlers at specific point of application runtime. +4. The mechanism of receiving and processing events in Kernel should be easy and common in .NET ecosystem. +5. New approach should support the same functionality that is available in Kernel Events - cancel function execution, change kernel arguments, change rendered prompt before sending it to AI etc. + +## Decision Outcome + +Introduce Kernel Filters - the approach of receiving the events in Kernel in similar way as action filters in ASP.NET. + +Two new abstractions will be used across Semantic Kernel and developers will have to implement these abstractions in a way that will cover their needs. + +For function-related events: `IFunctionFilter` + +```csharp +public interface IFunctionFilter +{ + void OnFunctionInvoking(FunctionInvokingContext context); + + void OnFunctionInvoked(FunctionInvokedContext context); +} +``` + +For prompt-related events: `IPromptFilter` + +```csharp +public interface IPromptFilter +{ + void OnPromptRendering(PromptRenderingContext context); + + void OnPromptRendered(PromptRenderedContext context); +} +``` + +New approach will allow developers to define filters in separate classes and easily inject required services to process kernel event correctly: + +MyFunctionFilter.cs - filter with the same logic as event handler presented above: + +```csharp +public sealed class MyFunctionFilter : IFunctionFilter +{ + private readonly ILogger _logger; + + public MyFunctionFilter(ILoggerFactory loggerFactory) + { + this._logger = loggerFactory.CreateLogger("MyLogger"); + } + + public void OnFunctionInvoking(FunctionInvokingContext context) + { + this._logger.LogInformation("Invoking {FunctionName}", context.Function.Name); + } + + public void OnFunctionInvoked(FunctionInvokedContext context) + { + var metadata = context.Result.Metadata; + + if (metadata is not null && metadata.ContainsKey("Usage")) + { + this._logger.LogInformation("Token usage: {TokenUsage}", metadata["Usage"]?.AsJson()); + } + } +} +``` + +As soon as new filter is defined, it's easy to configure it to be used in Kernel using dependency injection (pre-construction) or add filter after Kernel initialization (post-construction): + +```csharp +IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); +kernelBuilder.AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + +// Adding filter with DI (pre-construction) +kernelBuilder.Services.AddSingleton(); + +Kernel kernel = kernelBuilder.Build(); + +// Adding filter after Kernel initialization (post-construction) +// kernel.FunctionFilters.Add(new MyAwesomeFilter()); + +var result = await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking."); +``` + +It's also possible to configure multiple filters which will be triggered in order of registration: + +```csharp +kernelBuilder.Services.AddSingleton(); +kernelBuilder.Services.AddSingleton(); +kernelBuilder.Services.AddSingleton(); +``` + +And it's possible to change the order of filter execution in runtime or remove specific filter if needed: + +```csharp +kernel.FunctionFilters.Insert(0, new InitialFilter()); +kernel.FunctionFilters.RemoveAt(1); +``` diff --git a/docs/decisions/0034-rag-in-sk.md b/docs/decisions/0034-rag-in-sk.md new file mode 100644 index 000000000000..f94757179176 --- /dev/null +++ b/docs/decisions/0034-rag-in-sk.md @@ -0,0 +1,299 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: dmytrostruk +date: 2023-01-29 +deciders: sergeymenshykh, markwallace, rbarreto, dmytrostruk +--- + +# Retrieval-Augmented Generation (RAG) in Semantic Kernel + +## Context and Problem Statement + +### General information + +There are several ways how to use RAG pattern in Semantic Kernel (SK). Some of the approaches already exist in SK, and some of them could be added in the future for diverse development experience. + +The purpose of this ADR is to describe problematic places with memory-related functionality in SK, demonstrate how to achieve RAG in current version of SK and propose new design of public API for RAG. + +Considered options, that are presented in this ADR, do not contradict each other and can be supported all at the same time. The decision which option to support will be based on different factors including priority, actual requirement for specific functionality and general feedback. + +### Vector DB integrations - Connectors + +There are 12 [vector DB connectors](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Connectors) (also known as `memory connectors`) implemented at the moment, and it may be unclear for developers how to use them. It's possible to call connector methods directly or use it via `TextMemoryPlugin` from [Plugins.Memory](https://www.nuget.org/packages/Microsoft.SemanticKernel.Plugins.Memory) NuGet package (prompt example: `{{recall 'company budget by year'}} What is my budget for 2024?`) + +Each connector has unique implementation, some of them rely on already existing .NET SDK from specific vector DB provider, and some of them have implemented functionality to use REST API of vector DB provider. + +Ideally, each connector should be always up-to-date and support new functionality. For some connectors maintenance cost is low, since there are no breaking changes included in new features or vector DB provides .NET SDK which is relatively easy to re-use. For other connectors maintenance cost is high, since some of them are still in `alpha` or `beta` development stage, breaking changes can be included or .NET SDK is not provided, which makes it harder to update. + +### IMemoryStore interface + +Each memory connector implements `IMemoryStore` interface with methods like `CreateCollectionAsync`, `GetNearestMatchesAsync` etc., so it can be used as part of `TextMemoryPlugin`. + +By implementing the same interface, each integration is aligned, which makes it possible to use different vector DBs at runtime. At the same time it is disadvantage, because each vector DB can work differently, and it becomes harder to fit all integrations into already existing abstraction. For example, method `CreateCollectionAsync` from `IMemoryStore` is used when application tries to add new record to vector DB to the collection, which doesn't exist, so before insert operation, it creates new collection. In case of [Pinecone](https://www.pinecone.io/) vector DB, this scenario is not supported, because Pinecone index creation is an asynchronous process - API service will return 201 Created HTTP response with following property in response body (index is not ready for usage): + +```json +{ + // Other properties... + "status": { + "ready": false, + "state": "Initializing" + } +} +``` + +In this case, it's impossible to insert a record to database immediately, so HTTP polling or similar mechanism should be implemented to cover this scenario. + +### MemoryRecord as storage schema + +`IMemoryStore` interface uses `MemoryRecord` class as storage schema in vector DB. This means that `MemoryRecord` properties should be aligned to all possible connectors. As soon as developers will use this schema in their databases, any changes to schema may break the application, which is not a flexible approach. + +`MemoryRecord` contains property `ReadOnlyMemory Embedding` for embeddings and `MemoryRecordMetadata Metadata` for embeddings metadata. `MemoryRecordMetadata` contains properties like: + +- `string Id` - unique identifier. +- `string Text` - data-related text. +- `string Description` - optional title describing the content. +- `string AdditionalMetadata` - field for saving custom metadata with a record. + +Since `MemoryRecord` and `MemoryRecordMetadata` are not sealed classes, it should be possible to extend them and add more properties as needed. Although, current approach still forces developers to have specific base schema in their vector DBs, which ideally should be avoided. Developers should have the ability to work with any schema of their choice, which will cover their business scenarios (similarly to Code First approach in Entity Framework). + +### TextMemoryPlugin + +TextMemoryPlugin contains 4 Kernel functions: + +- `Retrieve` - returns concrete record from DB by key. +- `Recall` - performs vector search and returns multiple records based on relevance. +- `Save` - saves record in vector DB. +- `Remove` - removes record from vector DB. + +All functions can be called directly from prompt. Moreover, as soon as these functions are registered in Kernel and Function Calling is enabled, LLM may decide to call specific function to achieve provided goal. + +`Retrieve` and `Recall` functions are useful to provide some context to LLM and ask a question based on data, but functions `Save` and `Remove` perform some manipulations with data in vector DB, which could be unpredicted or sometimes even dangerous (there should be no situations when LLM decides to remove some records, which shouldn't be deleted). + +## Decision Drivers + +1. All manipulations with data in Semantic Kernel should be safe. +2. There should be a clear way(s) how to use RAG pattern in Semantic Kernel. +3. Abstractions should not block developers from using vector DB of their choice with functionality, that cannot be achieved with provided interfaces or data types. + +## Out of scope + +Some of the RAG-related frameworks contain functionality to support full cycle of RAG pattern: + +1. **Read** data from specific resource (e.g. Wikipedia, OneDrive, local PDF file). +2. **Split** data in multiple chunks using specific logic. +3. **Generate** embeddings from data. +4. **Store** data to preferred vector DB. +5. **Search** data in preferred vector DB based on user query. +6. **Ask** LLM a question based on provided data. + +As for now, Semantic Kernel has following experimental features: + +- `TextChunker` class to **split** data in chunks. +- `ITextEmbeddingGenerationService` abstraction and implementations to **generate** embeddings using OpenAI and HuggingFace models. +- Memory connectors to **store** and **search** data. + +Since these features are experimental, they may be deprecated in the future if the decisions for RAG pattern won't require to provide and maintain listed abstractions, classes and connectors in Semantic Kernel. + +Tools for data **reading** is out of scope as for now. + +## Considered Options + +### Option 1 [Supported] - Prompt concatenation + +This option allows to manually construct a prompt with data, so LLM can respond to query based on provided context. It can be achieved by using manual string concatenation or by using prompt template and Kernel arguments. Developers are responsible for integration with vector DB of their choice, data search and prompt construction to send it to LLM. + +This approach doesn't include any memory connectors in Semantic Kernel out-of-the-box, but at the same time it gives an opportunity for developers to handle their data in the way that works for them the best. + +String concatenation: + +```csharp +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("model-id", "api-key") + .Build(); + +var builder = new StringBuilder(); + +// User is responsible for searching the data in a way of their choice, this is an example how it could look like. +var data = await this._vectorDB.SearchAsync("Company budget by year"); + +builder.AppendLine(data); +builder.AppendLine("What is my budget for 2024?"); + +var result = await kernel.InvokePromptAsync(builder.ToString()); +``` + +Prompt template and Kernel arguments: + +```csharp +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("model-id", "api-key") + .Build(); + +// User is responsible for searching the data in a way of their choice, this is an example how it could look like. +var data = await this._vectorDB.SearchAsync("Company budget by year"); + +var arguments = new KernelArguments { ["budgetByYear"] = data }; + +var result = await kernel.InvokePromptAsync("{{budgetByYear}} What is my budget for 2024?", arguments); +``` + +### Option 2 [Supported] - Memory as Plugin + +This approach is similar to Option 1, but data search step is part of prompt rendering process. Following list contains possible plugins to use for data search: + +- [ChatGPT Retrieval Plugin](https://github.com/openai/chatgpt-retrieval-plugin) - this plugin should be hosted as a separate service. It has integration with various [vector databases](https://github.com/openai/chatgpt-retrieval-plugin?tab=readme-ov-file#choosing-a-vector-database). +- [SemanticKernel.Plugins.Memory.TextMemoryPlugin](https://www.nuget.org/packages/Microsoft.SemanticKernel.Plugins.Memory) - Semantic Kernel solution, which supports various [vector databases](https://learn.microsoft.com/en-us/semantic-kernel/memories/vector-db#available-connectors-to-vector-databases). +- Custom user plugin. + +ChatGPT Retrieval Plugin: + +```csharp +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("model-id", "api-key") + .Build(); + +// Import ChatGPT Retrieval Plugin using OpenAPI specification +// https://github.com/openai/chatgpt-retrieval-plugin/blob/main/.well-known/openapi.yaml +await kernel.ImportPluginFromOpenApiAsync("ChatGPTRetrievalPlugin", openApi!, executionParameters: new(authCallback: async (request, cancellationToken) => +{ + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "chat-gpt-retrieval-plugin-token"); +})); + +const string Query = "What is my budget for 2024?"; +const string Prompt = "{{ChatGPTRetrievalPlugin.query_query_post queries=$queries}} {{$query}}"; + +var arguments = new KernelArguments +{ + ["query"] = Query, + ["queries"] = JsonSerializer.Serialize(new List { new { query = Query, top_k = 1 } }), +}; + +var result = await kernel.InvokePromptAsync(Prompt, arguments); +``` + +TextMemoryPlugin: + +```csharp +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("model-id", "api-key") + .Build(); + +// NOTE: If the decision will be to continue support memory-related public API, then it should be revisited. +// It should be up-to-date with new Semantic Kernel patterns. +// Example: instead of `WithChromaMemoryStore`, it should be `AddChromaMemoryStore`. +var memory = new MemoryBuilder() + .WithChromaMemoryStore("https://chroma-endpoint") + .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", "api-key") + .Build(); + +kernel.ImportPluginFromObject(new TextMemoryPlugin(memory)); + +var result = await kernel.InvokePromptAsync("{{recall 'Company budget by year'}} What is my budget for 2024?"); +``` + +Custom user plugin: + +```csharp +public class MyDataPlugin +{ + [KernelFunction("search")] + public async Task SearchAsync(string query) + { + // Make a call to vector DB and return results. + // Here developer can use already existing .NET SDK from specific vector DB provider. + // It's also possible to re-use Semantic Kernel memory connector directly here: + // new ChromaMemoryStore(...).GetNearestMatchAsync(...) + } +} + +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("model-id", "api-key") + .Build(); + +kernel.ImportPluginFromType(); + +var result = await kernel.InvokePromptAsync("{{search 'Company budget by year'}} What is my budget for 2024?"); +``` + +The reason why custom user plugin is more flexible than `TextMemoryPlugin` is because `TextMemoryPlugin` requires all vector DBs to implement `IMemoryStore` interface with disadvantages described above, while custom user plugin can be implemented in a way of developer's choice. There won't be any restrictions on DB record schema or requirement to implement specific interface. + +### Option 3 [Partially supported] - Prompt concatenation using Prompt Filter + +This option is similar to Option 1, but prompt concatenation will happen on Prompt Filter level: + +Prompt filter: + +```csharp +public sealed class MyPromptFilter : IPromptFilter +{ + public void OnPromptRendering(PromptRenderingContext context) + { + // Handling of prompt rendering event... + } + + public void OnPromptRendered(PromptRenderedContext context) + { + var data = "some data"; + var builder = new StringBuilder(); + + builder.AppendLine(data); + builder.AppendLine(context.RenderedPrompt); + + // Override rendered prompt before sending it to AI and include data + context.RenderedPrompt = builder.ToString(); + } +} +``` + +Usage: + +```csharp +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("model-id", "api-key") + .Build(); + +kernel.PromptFilters.Add(new MyPromptFilter()); + +var result = await kernel.InvokePromptAsync("What is my budget for 2024?"); +``` + +From the usage perspective, prompt will contain just user query without additional data. The data will be added to the prompt behind the scenes. + +The reason why this approach is **partially supported** is because a call to vector DB most probably will be an asynchronous, but current Kernel filters don't support asynchronous scenarios. So, in order to support asynchronous calls, new type of filters should be added to Kernel: `IAsyncFunctionFilter` and `IAsyncPromptFilter`. They will be the same as current `IFunctionFilter` and `IPromptFilter` but with async methods. + +### Option 4 [Proposal] - Memory as part of PromptExecutionSettings + +This proposal is another possible way how to implement RAG pattern in SK, on top of already existing approaches described above. Similarly to `TextMemoryPlugin`, this approach will require abstraction layer and each vector DB integration will be required to implement specific interface (it could be existing `IMemoryStore` or completely new one) to be compatible with SK. As described in _Context and Problem Statement_ section, the abstraction layer has its advantages and disadvantages. + +User code will look like this: + +```csharp +var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("model-id", "api-key") + .Build(); + +var executionSettings = new OpenAIPromptExecutionSettings +{ + Temperature = 0.8, + MemoryConfig = new() + { + // This service could be also registered using DI with specific lifetime + Memory = new ChromaMemoryStore("https://chroma-endpoint"), + MinRelevanceScore = 0.8, + Limit = 3 + } +}; + +var function = KernelFunctionFactory.CreateFromPrompt("What is my budget for 2024?", executionSettings); + +var result = await kernel.InvokePromptAsync("What is my budget for 2024?"); +``` + +Data search and prompt concatenation will happen behind the scenes in `KernelFunctionFromPrompt` class. + +## Decision Outcome + +Temporary decision is to provide more examples how to use memory in Semantic Kernel as Plugin. + +The final decision will be ready based on next memory-related requirements. diff --git a/docs/decisions/adr-short-template.md b/docs/decisions/adr-short-template.md index 9b88da98e7b3..bd8b10491f55 100644 --- a/docs/decisions/adr-short-template.md +++ b/docs/decisions/adr-short-template.md @@ -1,31 +1,34 @@ --- # These are optional elements. Feel free to remove any of them. status: {proposed | rejected | accepted | deprecated | … | superseded by [ADR-0001](0001-madr-architecture-decisions.md)} +contact: {person proposing the ADR} date: {YYYY-MM-DD when the decision was last updated} deciders: {list everyone involved in the decision} consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} --- + # {short title of solved problem and solution} ## Context and Problem Statement {Describe the context and problem statement, e.g., in free form using two to three sentences or in the form of an illustrative story. - You may want to articulate the problem in form of a question and add links to collaboration boards or issue management systems.} +You may want to articulate the problem in form of a question and add links to collaboration boards or issue management systems.} + ## Decision Drivers -* {decision driver 1, e.g., a force, facing concern, …} -* {decision driver 2, e.g., a force, facing concern, …} -* … +- {decision driver 1, e.g., a force, facing concern, …} +- {decision driver 2, e.g., a force, facing concern, …} +- … ## Considered Options -* {title of option 1} -* {title of option 2} -* {title of option 3} -* … +- {title of option 1} +- {title of option 2} +- {title of option 3} +- … ## Decision Outcome diff --git a/docs/decisions/adr-template.md b/docs/decisions/adr-template.md index 0ab8c17b58b8..a96551338a6e 100644 --- a/docs/decisions/adr-template.md +++ b/docs/decisions/adr-template.md @@ -1,31 +1,34 @@ --- # These are optional elements. Feel free to remove any of them. status: {proposed | rejected | accepted | deprecated | … | superseded by [ADR-0001](0001-madr-architecture-decisions.md)} +contact: {person proposing the ADR} date: {YYYY-MM-DD when the decision was last updated} deciders: {list everyone involved in the decision} consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} --- + # {short title of solved problem and solution} ## Context and Problem Statement {Describe the context and problem statement, e.g., in free form using two to three sentences or in the form of an illustrative story. - You may want to articulate the problem in form of a question and add links to collaboration boards or issue management systems.} +You may want to articulate the problem in form of a question and add links to collaboration boards or issue management systems.} + ## Decision Drivers -* {decision driver 1, e.g., a force, facing concern, …} -* {decision driver 2, e.g., a force, facing concern, …} -* … +- {decision driver 1, e.g., a force, facing concern, …} +- {decision driver 2, e.g., a force, facing concern, …} +- … ## Considered Options -* {title of option 1} -* {title of option 2} -* {title of option 3} -* … +- {title of option 1} +- {title of option 2} +- {title of option 3} +- … ## Decision Outcome @@ -33,47 +36,52 @@ Chosen option: "{title of option 1}", because {justification. e.g., only option, which meets k.o. criterion decision driver | which resolves force {force} | … | comes out best (see below)}. + ### Consequences -* Good, because {positive consequence, e.g., improvement of one or more desired qualities, …} -* Bad, because {negative consequence, e.g., compromising one or more desired qualities, …} -* … +- Good, because {positive consequence, e.g., improvement of one or more desired qualities, …} +- Bad, because {negative consequence, e.g., compromising one or more desired qualities, …} +- … + ## Validation {describe how the implementation of/compliance with the ADR is validated. E.g., by a review or an ArchUnit test} + ## Pros and Cons of the Options ### {title of option 1} + {example | description | pointer to more information | …} -* Good, because {argument a} -* Good, because {argument b} +- Good, because {argument a} +- Good, because {argument b} -* Neutral, because {argument c} -* Bad, because {argument d} -* … +- Neutral, because {argument c} +- Bad, because {argument d} +- … ### {title of other option} {example | description | pointer to more information | …} -* Good, because {argument a} -* Good, because {argument b} -* Neutral, because {argument c} -* Bad, because {argument d} -* … +- Good, because {argument a} +- Good, because {argument b} +- Neutral, because {argument c} +- Bad, because {argument d} +- … + ## More Information {You might want to provide additional evidence/confidence for the decision outcome here and/or - document the team agreement on the decision and/or - define when this decision when and how the decision should be realized and if/when it should be re-visited and/or - how the decision is validated. - Links to other decisions and resources might appear here as well.} +document the team agreement on the decision and/or +define when this decision when and how the decision should be realized and if/when it should be re-visited and/or +how the decision is validated. +Links to other decisions and resources might appear here as well.} diff --git a/docs/decisions/diagrams/chat-text-models.mmd b/docs/decisions/diagrams/chat-text-models.mmd new file mode 100644 index 000000000000..5725602ca818 --- /dev/null +++ b/docs/decisions/diagrams/chat-text-models.mmd @@ -0,0 +1,125 @@ +--- +title: Chat & Text Models +--- +classDiagram + %% Use https://mermaid.live/ to preview this diagram. The VS Code extension does not handle namespaces. + direction LR + + namespace Microsoft_SemanticKernel { + class KernelContent { + <> + +InnerContent : Object + +ModelId : String + +Metadata : IDictionary + +string(modelContent : KernelContent) + } + + class StreamingKernelContent { + <> + +ChoiceIndex : Integer + +InnerContent : Object + +Metadata : IDictionary + +ToString() + +ToByteArray() + +string(modelContent : StreamingKernelContent) + } + + class TextContent { + +Text : String + +Encoding : Encoding + +ToString() + } + + class StreamingTextContent { + +Text : String + +Encoding : Encoding + +ToString() + +ToByteArray() + } + + class ChatMessageContent { + +Role : AuthorRole + +Content : String + +Items : ChatMessageContentItemCollection + +Encoding : Encoding + +ToString() + } + + class StreamingChatMessageContent { + +Content : String + +Role : AuthorRole + +Encoding : Encoding + +ToString() + +ToByteArray() + } + + class ImageContent { + +Uri : Uri + +ToString() + } + } + + namespace Microsoft_SemanticKernel_ChatCompletion { + class ChatMessageContentItemCollection { + +Count + +Add(item: KernelContent) + } + + class ChatHistory { + +AddMessage(chatMessageContent : ChatMessageContent) + +AddMessage(authorRole : AuthorRole, content : string, encoding : Encoding, metadata : IDictionary) + +AddUserMessage(content : string) + +AddAssistantMessage(content : string) + +AddSystemMessage(content : string) + } + } + + namespace Microsoft_SemanticKernel_Connectors_OpenAI { + class OpenAIChatMessageContent { + +FunctionCall : FunctionCall + +Name : Name + +GetOpenAIFunctionResponse() + } + + class AzureOpenAIWithDataChatMessageContent { + +ToolContent : String + } + + class OpenAIStreamingTextContent { + +ToByteArray() + +ToString() + } + + class OpenAIStreamingChatMessageContent { + +Name : String + +FunctionName : String + +FunctionArgument : String + +ToByteArray() + +ToString() + +GetOpenAIStreamingFunctionResponse(fullContent : OpenAIStreamingChatMessageContent[]) + } + + class AzureOpenAIWithDataStreamingChatMessageContent { + +FunctionName : String + +FunctionArgument : String + -IsValidMessage(message : ChatWithDataStreamingMessage) + } + + class OpenAIChatHistory { + + } + } + + KernelContent <|-- TextContent + KernelContent <|-- ImageContent + KernelContent <|-- ChatMessageContent + KernelContent *-- ChatMessageContentItemCollection + ChatMessageContent <|-- OpenAIChatMessageContent + ChatMessageContent <|-- AzureOpenAIWithDataChatMessageContent + StreamingKernelContent <|-- StreamingTextContent + StreamingTextContent <|-- OpenAIStreamingTextContent + StreamingKernelContent <|-- StreamingChatMessageContent + StreamingChatMessageContent <|-- OpenAIStreamingChatMessageContent + StreamingChatMessageContent <|-- AzureOpenAIWithDataStreamingChatMessageContent + ChatHistory <|-- OpenAIChatHistory + ChatMessageContent o-- ChatMessageContentItemCollection diff --git a/docs/decisions/diagrams/chat-text-models.png b/docs/decisions/diagrams/chat-text-models.png new file mode 100644 index 000000000000..f2b6b0eb0637 Binary files /dev/null and b/docs/decisions/diagrams/chat-text-models.png differ diff --git a/docs/decisions/diagrams/git-flow.png b/docs/decisions/diagrams/git-flow.png new file mode 100644 index 000000000000..366fece8a5dd Binary files /dev/null and b/docs/decisions/diagrams/git-flow.png differ diff --git a/docs/decisions/diagrams/git-hub-flow.png b/docs/decisions/diagrams/git-hub-flow.png new file mode 100644 index 000000000000..d85118995668 Binary files /dev/null and b/docs/decisions/diagrams/git-hub-flow.png differ diff --git a/docs/decisions/diagrams/net-pr-status-checks.png b/docs/decisions/diagrams/net-pr-status-checks.png new file mode 100644 index 000000000000..d1d878390944 Binary files /dev/null and b/docs/decisions/diagrams/net-pr-status-checks.png differ diff --git a/docs/decisions/diagrams/open-ai-assistant-api-objects.png b/docs/decisions/diagrams/open-ai-assistant-api-objects.png new file mode 100644 index 000000000000..4869d961df63 Binary files /dev/null and b/docs/decisions/diagrams/open-ai-assistant-api-objects.png differ diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index e58ac9af5617..d6a12595f942 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -5,68 +5,81 @@ true - - - - - + + + + + + + + + + + + - - - - - + + + + + - - + + + + - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + - + - - - - + + + + - - - - - + + + + + - - - + + - - + + - + - - - + + + - - - + + + + all runtime; build; native; contentfiles; analyzers; buildtransitive @@ -76,12 +89,12 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive @@ -91,17 +104,17 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/dotnet/README.md b/dotnet/README.md index 28ca20228fb7..86eeff863735 100644 --- a/dotnet/README.md +++ b/dotnet/README.md @@ -14,7 +14,7 @@ First, let's create a new project, targeting .NET 6 or newer, and add the `Microsoft.SemanticKernel` nuget package to your project from the command prompt in Visual Studio: - dotnet add package Microsoft.SemanticKernel --prerelease + dotnet add package Microsoft.SemanticKernel # Running prompts with input parameters @@ -22,17 +22,17 @@ Copy and paste the following code into your project, with your Azure OpenAI key ```csharp using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; -var builder = new KernelBuilder(); +var builder = Kernel.CreateBuilder(); -builder.WithAzureChatCompletionService( +builder.AddAzureOpenAIChatCompletion( "gpt-35-turbo", // Azure OpenAI Deployment Name "https://contoso.openai.azure.com/", // Azure OpenAI Endpoint "...your Azure OpenAI Key..."); // Azure OpenAI Key // Alternative using OpenAI -//builder.WithOpenAIChatCompletionService( +//builder.AddOpenAIChatCompletion( // "gpt-3.5-turbo", // OpenAI Model name // "...your OpenAI API Key..."); // OpenAI API Key @@ -42,7 +42,7 @@ var prompt = @"{{$input}} One line TLDR with the fewest words."; -var summarize = kernel.CreateSemanticFunction(prompt, requestSettings: new OpenAIRequestSettings { MaxTokens = 100 }); +var summarize = kernel.CreateFunctionFromPrompt(prompt, executionSettings: new OpenAIPromptExecutionSettings { MaxTokens = 100 }); string text1 = @" 1st Law of Thermodynamics - Energy cannot be created or destroyed. @@ -54,49 +54,15 @@ string text2 = @" 2. The acceleration of an object depends on the mass of the object and the amount of force applied. 3. Whenever one object exerts a force on another object, the second object exerts an equal and opposite on the first."; -Console.WriteLine(await kernel.RunAsync(text1, summarize)); +Console.WriteLine(await kernel.InvokeAsync(summarize, new() { ["input"] = text1 })); -Console.WriteLine(await kernel.RunAsync(text2, summarize)); +Console.WriteLine(await kernel.InvokeAsync(summarize, new() { ["input"] = text2 })); // Output: // Energy conserved, entropy increases, zero entropy at 0K. // Objects move in response to forces. ``` -# Prompt chaining - -The previous code shows how to invoke individual semantic functions, but you can -also chain functions (aka prompt chaining) to process the initial input with multiple -operations. - -The following code for example, translates an initial text to math symbols and -then generates a summary: - -```csharp -string translationPrompt = @"{{$input}} - -Translate the text to math."; - -string summarizePrompt = @"{{$input}} - -Give me a TLDR with the fewest words."; - -var translator = kernel.CreateSemanticFunction(translationPrompt, requestSettings: new OpenAIRequestSettings { MaxTokens = 200 }); -var summarize = kernel.CreateSemanticFunction(summarizePrompt, requestSettings: new OpenAIRequestSettings { MaxTokens = 100 }); - -string inputText = @" -1st Law of Thermodynamics - Energy cannot be created or destroyed. -2nd Law of Thermodynamics - For a spontaneous process, the entropy of the universe increases. -3rd Law of Thermodynamics - A perfect crystal at zero Kelvin has zero entropy."; - -// Run two prompts in sequence (prompt chaining) -var output = await kernel.RunAsync(inputText, translator, summarize); - -Console.WriteLine(output); - -// Output: ΔE = 0, ΔSuniv > 0, S = 0 at 0K. -``` - # Semantic Kernel Notebooks The repository contains also a few C# Jupyter notebooks that demonstrates @@ -109,11 +75,12 @@ requirements and setup instructions. 2. [Loading and configuring Semantic Kernel](./notebooks/01-basic-loading-the-kernel.ipynb) 3. [Running AI prompts from file](./notebooks/02-running-prompts-from-file.ipynb) 4. [Creating Semantic Functions at runtime (i.e. inline functions)](./notebooks/03-semantic-function-inline.ipynb) -5. [Using Context Variables to Build a Chat Experience](./notebooks/04-context-variables-chat.ipynb) +5. [Using Kernel Arguments to Build a Chat Experience](./notebooks/04-kernel-arguments-chat.ipynb) 6. [Creating and Executing Plans](./notebooks/05-using-the-planner.ipynb) 7. [Building Memory with Embeddings](./notebooks/06-memory-and-embeddings.ipynb) -8. [Creating images with DALL-E 2](./notebooks/07-DALL-E-2.ipynb) -9. [Chatting with ChatGPT and Images](./notebooks/08-chatGPT-with-DALL-E-2.ipynb) +8. [Creating images with DALL-E 3](./notebooks/07-DALL-E-3.ipynb) +9. [Chatting with ChatGPT and Images](./notebooks/08-chatGPT-with-DALL-E-3.ipynb) +10. [BingSearch using Kernel](./notebooks/10-BingSearch-using-kernel.ipynb) # Nuget packages @@ -133,21 +100,20 @@ Packages included in **Microsoft.SemanticKernel**: used by the core and other SK components. 1. **Microsoft.SemanticKernel.Core**: contains the core logic of SK, such as prompt engineering, semantic memory and semantic functions definition and orchestration. -1. **Microsoft.SemanticKernel.Connectors.AI.OpenAI**: connectors to OpenAI and Azure - OpenAI, allowing to run semantic functions, chats, image generation with GPT3, - GPT3.5, GPT4, DALL-E2. +1. **Microsoft.SemanticKernel.Connectors.OpenAI**: connectors to OpenAI and Azure + OpenAI, allowing to run semantic functions, chats, text to image with GPT3, + GPT3.5, GPT4, DALL-E3. Other SK packages available at nuget.org: -1. **Microsoft.SemanticKernel.Connectors.Memory.Qdrant**: Qdrant connector for +1. **Microsoft.SemanticKernel.Connectors.Qdrant**: Qdrant connector for plugins and semantic memory. -2. **Microsoft.SemanticKernel.Connectors.Memory.Sqlite**: SQLite connector for +2. **Microsoft.SemanticKernel.Connectors.Sqlite**: SQLite connector for plugins and semantic memory 3. **Microsoft.SemanticKernel.Plugins.Document**: Document Plugin: Word processing, OpenXML, etc. 4. **Microsoft.SemanticKernel.Plugins.MsGraph**: Microsoft Graph Plugin: access your tenant data, schedule meetings, send emails, etc. -5. **Microsoft.SemanticKernel.Plugins.OpenAPI**: OpenAPI Plugin. +5. **Microsoft.SemanticKernel.Plugins.OpenApi**: OpenAPI Plugin. 6. **Microsoft.SemanticKernel.Plugins.Web**: Web Plugin: search the web, download files, etc. -7. **Microsoft.SemanticKernel.Reliability.Polly**: Extension for http resiliency. diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 86b0ceb3d3a5..701b487d0e31 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -1,3 +1,4 @@ + Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.4.33213.308 @@ -7,18 +8,9 @@ EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{FA3720F1-C99A-49B2-9577-A940257098BF}" - ProjectSection(SolutionItems) = preProject - ..\samples\dotnet\Directory.Build.props = ..\samples\dotnet\Directory.Build.props - ..\samples\dotnet\Directory.Build.targets = ..\samples\dotnet\Directory.Build.targets - ..\samples\dotnet\Directory.Packages.props = ..\samples\dotnet\Directory.Packages.props - EndProjectSection EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KernelSyntaxExamples", "samples\KernelSyntaxExamples\KernelSyntaxExamples.csproj", "{47C6F821-5103-431F-B3B8-A2868A68BB78}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MsGraphPluginsExample", "..\samples\dotnet\MsGraphPluginsExample\MsGraphPluginsExample.csproj", "{3EB61E99-C39B-4620-9482-F8DA18E48525}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KernelHttpServer", "..\samples\dotnet\KernelHttpServer\KernelHttpServer.csproj", "{34A7F1EF-D243-4160-A413-D713FEABCD94}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "IntegrationTests", "src\IntegrationTests\IntegrationTests.csproj", "{E4B777A1-28E1-41BE-96AE-7F3EC61FD5D4}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugins.Document", "src\Plugins\Plugins.Document\Plugins.Document.csproj", "{F94D1938-9DB7-4B24-9FF3-166DDFD96330}" @@ -32,6 +24,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution ..\.editorconfig = ..\.editorconfig ..\.gitignore = ..\.gitignore Directory.Build.props = Directory.Build.props + Directory.Build.targets = Directory.Build.targets Directory.Packages.props = Directory.Packages.props ..\.github\workflows\dotnet-format.yml = ..\.github\workflows\dotnet-format.yml ..\nuget.config = ..\nuget.config @@ -51,7 +44,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "nuget", "nuget", "{F4243136 nuget\NUGET.md = nuget\NUGET.md EndProjectSection EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.OpenAPI", "src\Functions\Functions.OpenAPI\Functions.OpenAPI.csproj", "{F2A1F81E-700E-4C0E-B021-B9EF29AA20BD}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.OpenApi", "src\Functions\Functions.OpenApi\Functions.OpenApi.csproj", "{F2A1F81E-700E-4C0E-B021-B9EF29AA20BD}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "connectors", "connectors", "{0247C2C9-86C3-45BA-8873-28B0948EDC0C}" EndProject @@ -67,7 +60,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.Redis", " EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.Chroma", "src\Connectors\Connectors.Memory.Chroma\Connectors.Memory.Chroma.csproj", "{185E0CE8-C2DA-4E4C-A491-E8EB40316315}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AI.OpenAI", "src\Connectors\Connectors.AI.OpenAI\Connectors.AI.OpenAI.csproj", "{AFA81EB7-F869-467D-8A90-744305D80AAC}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.OpenAI", "src\Connectors\Connectors.OpenAI\Connectors.OpenAI.csproj", "{AFA81EB7-F869-467D-8A90-744305D80AAC}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SemanticKernel.Abstractions", "src\SemanticKernel.Abstractions\SemanticKernel.Abstractions.csproj", "{627742DB-1E52-468A-99BD-6FF1A542D25B}" EndProject @@ -77,26 +70,23 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "extensions", "extensions", EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Extensions.UnitTests", "src\Extensions\Extensions.UnitTests\Extensions.UnitTests.csproj", "{F51017A9-15C8-472D-893C-080046D710A6}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.AzureCognitiveSearch", "src\Connectors\Connectors.Memory.AzureCognitiveSearch\Connectors.Memory.AzureCognitiveSearch.csproj", "{EC3BB6D1-2FB2-4702-84C6-F791DE533ED4}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.AzureAISearch", "src\Connectors\Connectors.Memory.AzureAISearch\Connectors.Memory.AzureAISearch.csproj", "{EC3BB6D1-2FB2-4702-84C6-F791DE533ED4}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.Pinecone", "src\Connectors\Connectors.Memory.Pinecone\Connectors.Memory.Pinecone.csproj", "{4D226C2F-AE9F-4EFB-AF2D-45C8FE5CB34E}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.Grpc", "src\Functions\Functions.Grpc\Functions.Grpc.csproj", "{E52F805C-794A-4CA9-B684-DFF358B18820}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AI.HuggingFace", "src\Connectors\Connectors.AI.HuggingFace\Connectors.AI.HuggingFace.csproj", "{136823BE-8665-4D57-87E0-EF41535539E2}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.HuggingFace", "src\Connectors\Connectors.HuggingFace\Connectors.HuggingFace.csproj", "{136823BE-8665-4D57-87E0-EF41535539E2}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "InternalUtilities", "InternalUtilities", "{4D3DAE63-41C6-4E1C-A35A-E77BDFC40675}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.Weaviate", "src\Connectors\Connectors.Memory.Weaviate\Connectors.Memory.Weaviate.csproj", "{6AAB0620-33A1-4A98-A63B-6560B9BA47A4}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OpenApiPluginsExample", "..\samples\dotnet\OpenApiPluginsExample\OpenApiPluginsExample.csproj", "{4D91A3E0-C404-495B-AD4A-411C4E83CF54}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.DuckDB", "src\Connectors\Connectors.Memory.DuckDB\Connectors.Memory.DuckDB.csproj", "{50FAE231-6F24-4779-9D02-12ABBC9A49E2}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{5C246969-D794-4EC3-8E8F-F90D4D166420}" ProjectSection(SolutionItems) = preProject src\InternalUtilities\test\AssertExtensions.cs = src\InternalUtilities\test\AssertExtensions.cs - src\InternalUtilities\test\FunctionHelpers.cs = src\InternalUtilities\test\FunctionHelpers.cs src\InternalUtilities\test\TestInternalUtilities.props = src\InternalUtilities\test\TestInternalUtilities.props EndProjectSection EndProject @@ -109,7 +99,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Diagnostics", "Diagnostics" ProjectSection(SolutionItems) = preProject src\InternalUtilities\src\Diagnostics\CompilerServicesAttributes.cs = src\InternalUtilities\src\Diagnostics\CompilerServicesAttributes.cs src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs = src\InternalUtilities\src\Diagnostics\ExceptionExtensions.cs - src\InternalUtilities\src\Diagnostics\HttpStatusCodeType.cs = src\InternalUtilities\src\Diagnostics\HttpStatusCodeType.cs + src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs = src\InternalUtilities\src\Diagnostics\ExperimentalAttribute.cs + src\InternalUtilities\src\Diagnostics\IsExternalInit.cs = src\InternalUtilities\src\Diagnostics\IsExternalInit.cs src\InternalUtilities\src\Diagnostics\NullableAttributes.cs = src\InternalUtilities\src\Diagnostics\NullableAttributes.cs src\InternalUtilities\src\Diagnostics\Verify.cs = src\InternalUtilities\src\Diagnostics\Verify.cs EndProjectSection @@ -119,51 +110,115 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Linq", "Linq", "{B00AD427-0 src\InternalUtilities\src\Linq\AsyncEnumerable.cs = src\InternalUtilities\src\Linq\AsyncEnumerable.cs EndProjectSection EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{DB950192-30F1-48B1-88D7-F43FECCA1A1C}" - ProjectSection(SolutionItems) = preProject - src\InternalUtilities\src\Text\Json.cs = src\InternalUtilities\src\Text\Json.cs - src\InternalUtilities\src\Text\StringExtensions.cs = src\InternalUtilities\src\Text\StringExtensions.cs - EndProjectSection -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Http", "Http", "{1C19D805-3573-4477-BF07-40180FCDE1BD}" ProjectSection(SolutionItems) = preProject + src\InternalUtilities\src\Http\HttpClientExtensions.cs = src\InternalUtilities\src\Http\HttpClientExtensions.cs src\InternalUtilities\src\Http\HttpClientProvider.cs = src\InternalUtilities\src\Http\HttpClientProvider.cs - src\InternalUtilities\src\Http\NonDisposableHttpClientHandler.cs = src\InternalUtilities\src\Http\NonDisposableHttpClientHandler.cs + src\InternalUtilities\src\Http\HttpContentExtensions.cs = src\InternalUtilities\src\Http\HttpContentExtensions.cs + src\InternalUtilities\src\Http\HttpHeaderValues.cs = src\InternalUtilities\src\Http\HttpHeaderValues.cs + src\InternalUtilities\src\Http\HttpRequest.cs = src\InternalUtilities\src\Http\HttpRequest.cs EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "System", "System", "{3CDE10B2-AE8F-4FC4-8D55-92D4AD32E144}" ProjectSection(SolutionItems) = preProject src\InternalUtilities\src\System\EnvExtensions.cs = src\InternalUtilities\src\System\EnvExtensions.cs + src\InternalUtilities\src\System\InternalTypeConverter.cs = src\InternalUtilities\src\System\InternalTypeConverter.cs + src\InternalUtilities\src\System\NonNullCollection.cs = src\InternalUtilities\src\System\NonNullCollection.cs + src\InternalUtilities\src\System\TypeConverterFactory.cs = src\InternalUtilities\src\System\TypeConverterFactory.cs EndProjectSection EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugins.Core", "src\Plugins\Plugins.Core\Plugins.Core.csproj", "{0D0C4DAD-E6BC-4504-AE3A-EEA4E35920C1}" +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Type", "Type", "{E85EA4D0-BB7E-4DFD-882F-A76EB8C0B8FF}" + ProjectSection(SolutionItems) = preProject + src\InternalUtilities\src\Type\TypeExtensions.cs = src\InternalUtilities\src\Type\TypeExtensions.cs + EndProjectSection EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NCalcPlugins", "samples\NCalcPlugins\NCalcPlugins.csproj", "{E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugins.Core", "src\Plugins\Plugins.Core\Plugins.Core.csproj", "{0D0C4DAD-E6BC-4504-AE3A-EEA4E35920C1}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ApplicationInsightsExample", "samples\ApplicationInsightsExample\ApplicationInsightsExample.csproj", "{C754950A-E16C-4F96-9CC7-9328E361B5AF}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TelemetryExample", "samples\TelemetryExample\TelemetryExample.csproj", "{C754950A-E16C-4F96-9CC7-9328E361B5AF}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.Kusto", "src\Connectors\Connectors.Memory.Kusto\Connectors.Memory.Kusto.csproj", "{E07608CC-D710-4655-BB9E-D22CF3CDD193}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TemplateEngine.Basic", "src\Extensions\TemplateEngine.Basic\TemplateEngine.Basic.csproj", "{10E4B697-D4E8-468D-872D-49670FD150FB}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Reliability.Polly", "src\Extensions\Reliability.Polly\Reliability.Polly.csproj", "{D4540A0F-98E3-4E70-9093-1948AE5B2AAD}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Reliability.Basic", "src\Extensions\Reliability.Basic\Reliability.Basic.csproj", "{3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "plugins", "plugins", "{D6D598DF-C17C-46F4-B2B9-CDE82E2DE132}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugins.UnitTests", "src\Plugins\Plugins.UnitTests\Plugins.UnitTests.csproj", "{5CB78CE4-895B-4A14-98AA-716A37DEEBB1}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "planners", "planners", "{A21FAC7C-0C09-4EAD-843B-926ACEF73C80}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Planners.Core", "src\Planners\Planners.Core\Planners.Core.csproj", "{F224B869-FA0E-4EEE-A6BF-C2D61FF8E731}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Planners.Core.UnitTests", "src\Planners\Planners.Core.UnitTests\Planners.Core.UnitTests.csproj", "{CC77DCFA-A419-4202-A98A-868CDF457792}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.Milvus", "src\Connectors\Connectors.Memory.Milvus\Connectors.Memory.Milvus.csproj", "{8B754E80-7A97-4585-8D7E-1D588FA5F727}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugins.Memory", "src\Plugins\Plugins.Memory\Plugins.Memory.csproj", "{E91365A1-8B01-4AB8-825F-67E3515EADCD}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "experimental", "experimental", "{A2357CF8-3BB9-45A1-93F1-B366C9B63658}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Planners.OpenAI", "src\Planners\Planners.OpenAI\Planners.OpenAI.csproj", "{348BBF45-23B4-4599-83A6-8AE1795227FB}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Memory.MongoDB", "src\Connectors\Connectors.Memory.MongoDB\Connectors.Memory.MongoDB.csproj", "{6009CC87-32F1-4282-88BB-8E5A7BA12925}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PromptTemplates.Handlebars", "src\Extensions\PromptTemplates.Handlebars\PromptTemplates.Handlebars.csproj", "{B0646036-0C50-4F66-B479-ADA9C1166816}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.Yaml", "src\Functions\Functions.Yaml\Functions.Yaml.csproj", "{4AD4E731-16E7-4A0E-B403-6C96459F989B}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.Markdown", "src\Functions\Functions.Markdown\Functions.Markdown.csproj", "{E576E260-4030-4C4C-B207-CA3B684E9669}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Planners.Handlebars", "src\Planners\Planners.Handlebars\Planners.Handlebars.csproj", "{352DAE69-E31C-44E3-A35F-E0342A955869}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "planning", "planning", "{A7DAB812-81CF-4931-B38C-468A3261C4C8}" + ProjectSection(SolutionItems) = preProject + src\InternalUtilities\planning\PlannerInstrumentation.cs = src\InternalUtilities\planning\PlannerInstrumentation.cs + src\InternalUtilities\planning\PlannerOptions.cs = src\InternalUtilities\planning\PlannerOptions.cs + src\InternalUtilities\planning\PlanningUtilities.props = src\InternalUtilities\planning\PlanningUtilities.props + src\InternalUtilities\planning\SemanticMemoryConfig.cs = src\InternalUtilities\planning\SemanticMemoryConfig.cs + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Extensions", "Extensions", "{3F4E0DC5-2241-4EF2-9F69-E7EC7834D349}" + ProjectSection(SolutionItems) = preProject + src\InternalUtilities\planning\Extensions\ChatHistoryExtensions.cs = src\InternalUtilities\planning\Extensions\ChatHistoryExtensions.cs + src\InternalUtilities\planning\Extensions\KernelFunctionMetadataExtensions.cs = src\InternalUtilities\planning\Extensions\KernelFunctionMetadataExtensions.cs + src\InternalUtilities\planning\Extensions\ReadOnlyFunctionCollectionPlannerExtensions.cs = src\InternalUtilities\planning\Extensions\ReadOnlyFunctionCollectionPlannerExtensions.cs + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Schema", "Schema", "{873281F6-9C31-4641-98AE-F1D2844DE9F0}" + ProjectSection(SolutionItems) = preProject + src\InternalUtilities\planning\Schema\JsonSchemaFunctionContent.cs = src\InternalUtilities\planning\Schema\JsonSchemaFunctionContent.cs + src\InternalUtilities\planning\Schema\JsonSchemaFunctionParameters.cs = src\InternalUtilities\planning\Schema\JsonSchemaFunctionParameters.cs + src\InternalUtilities\planning\Schema\JsonSchemaFunctionResponse.cs = src\InternalUtilities\planning\Schema\JsonSchemaFunctionResponse.cs + src\InternalUtilities\planning\Schema\JsonSchemaFunctionView.cs = src\InternalUtilities\planning\Schema\JsonSchemaFunctionView.cs + src\InternalUtilities\planning\Schema\JsonSchemaResponse.cs = src\InternalUtilities\planning\Schema\JsonSchemaResponse.cs + EndProjectSection +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Planners.Handlebars.UnitTests", "src\Planners\Planners.Handlebars.UnitTests\Planners.Handlebars.UnitTests.csproj", "{3FC4A81B-8ABE-473F-BC7C-6F4885775534}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AI", "AI", "{1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Memory", "Memory", "{24503383-A8C4-4255-9998-28D70FE8E99A}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Agents", "src\Experimental\Agents\Experimental.Agents.csproj", "{5438D1E3-E03D-444B-BBBA-478F93161AA8}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Agents.UnitTests", "src\Experimental\Agents.UnitTests\Experimental.Agents.UnitTests.csproj", "{4AD80279-9AC1-476F-8103-E6CD5E4FD525}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Orchestration.Flow", "src\Experimental\Orchestration.Flow\Experimental.Orchestration.Flow.csproj", "{B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Orchestration.Flow.IntegrationTests", "src\Experimental\Orchestration.Flow.IntegrationTests\Experimental.Orchestration.Flow.IntegrationTests.csproj", "{3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Orchestration.Flow.UnitTests", "src\Experimental\Orchestration.Flow.UnitTests\Experimental.Orchestration.Flow.UnitTests.csproj", "{731CC542-8BE9-42D4-967D-99206EC2B310}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DocumentationExamples", "samples\DocumentationExamples\DocumentationExamples.csproj", "{A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CreateChatGptPlugin", "samples\CreateChatGptPlugin\Solution\CreateChatGptPlugin.csproj", "{87AB5AF5-5783-4372-9789-664895E0A2FF}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.OpenApi.Extensions", "src\Functions\Functions.OpenApi.Extensions\Functions.OpenApi.Extensions.csproj", "{95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-AE5F-4080-8790-13EB16323CEF}" + ProjectSection(SolutionItems) = preProject + src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs + src\InternalUtilities\src\Text\ReadOnlyMemoryConverter.cs = src\InternalUtilities\src\Text\ReadOnlyMemoryConverter.cs + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Linq", "Linq", "{607DD6FA-FA0D-45E6-80BA-22A373609E89}" + ProjectSection(SolutionItems) = preProject + src\InternalUtilities\test\Linq\AsyncEnumerable.cs = src\InternalUtilities\test\Linq\AsyncEnumerable.cs + EndProjectSection +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -182,16 +237,6 @@ Global {47C6F821-5103-431F-B3B8-A2868A68BB78}.Publish|Any CPU.ActiveCfg = Release|Any CPU {47C6F821-5103-431F-B3B8-A2868A68BB78}.Release|Any CPU.ActiveCfg = Release|Any CPU {47C6F821-5103-431F-B3B8-A2868A68BB78}.Release|Any CPU.Build.0 = Release|Any CPU - {3EB61E99-C39B-4620-9482-F8DA18E48525}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3EB61E99-C39B-4620-9482-F8DA18E48525}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3EB61E99-C39B-4620-9482-F8DA18E48525}.Publish|Any CPU.ActiveCfg = Release|Any CPU - {3EB61E99-C39B-4620-9482-F8DA18E48525}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3EB61E99-C39B-4620-9482-F8DA18E48525}.Release|Any CPU.Build.0 = Release|Any CPU - {34A7F1EF-D243-4160-A413-D713FEABCD94}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {34A7F1EF-D243-4160-A413-D713FEABCD94}.Debug|Any CPU.Build.0 = Debug|Any CPU - {34A7F1EF-D243-4160-A413-D713FEABCD94}.Publish|Any CPU.ActiveCfg = Release|Any CPU - {34A7F1EF-D243-4160-A413-D713FEABCD94}.Release|Any CPU.ActiveCfg = Release|Any CPU - {34A7F1EF-D243-4160-A413-D713FEABCD94}.Release|Any CPU.Build.0 = Release|Any CPU {E4B777A1-28E1-41BE-96AE-7F3EC61FD5D4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E4B777A1-28E1-41BE-96AE-7F3EC61FD5D4}.Debug|Any CPU.Build.0 = Debug|Any CPU {E4B777A1-28E1-41BE-96AE-7F3EC61FD5D4}.Publish|Any CPU.ActiveCfg = Release|Any CPU @@ -324,11 +369,6 @@ Global {6AAB0620-33A1-4A98-A63B-6560B9BA47A4}.Publish|Any CPU.Build.0 = Publish|Any CPU {6AAB0620-33A1-4A98-A63B-6560B9BA47A4}.Release|Any CPU.ActiveCfg = Release|Any CPU {6AAB0620-33A1-4A98-A63B-6560B9BA47A4}.Release|Any CPU.Build.0 = Release|Any CPU - {4D91A3E0-C404-495B-AD4A-411C4E83CF54}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4D91A3E0-C404-495B-AD4A-411C4E83CF54}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4D91A3E0-C404-495B-AD4A-411C4E83CF54}.Publish|Any CPU.ActiveCfg = Release|Any CPU - {4D91A3E0-C404-495B-AD4A-411C4E83CF54}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4D91A3E0-C404-495B-AD4A-411C4E83CF54}.Release|Any CPU.Build.0 = Release|Any CPU {50FAE231-6F24-4779-9D02-12ABBC9A49E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {50FAE231-6F24-4779-9D02-12ABBC9A49E2}.Debug|Any CPU.Build.0 = Debug|Any CPU {50FAE231-6F24-4779-9D02-12ABBC9A49E2}.Publish|Any CPU.ActiveCfg = Publish|Any CPU @@ -341,11 +381,6 @@ Global {0D0C4DAD-E6BC-4504-AE3A-EEA4E35920C1}.Publish|Any CPU.Build.0 = Publish|Any CPU {0D0C4DAD-E6BC-4504-AE3A-EEA4E35920C1}.Release|Any CPU.ActiveCfg = Release|Any CPU {0D0C4DAD-E6BC-4504-AE3A-EEA4E35920C1}.Release|Any CPU.Build.0 = Release|Any CPU - {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Publish|Any CPU.ActiveCfg = Release|Any CPU - {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA}.Release|Any CPU.Build.0 = Release|Any CPU {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Debug|Any CPU.Build.0 = Debug|Any CPU {C754950A-E16C-4F96-9CC7-9328E361B5AF}.Publish|Any CPU.ActiveCfg = Release|Any CPU @@ -357,42 +392,12 @@ Global {E07608CC-D710-4655-BB9E-D22CF3CDD193}.Publish|Any CPU.Build.0 = Publish|Any CPU {E07608CC-D710-4655-BB9E-D22CF3CDD193}.Release|Any CPU.ActiveCfg = Release|Any CPU {E07608CC-D710-4655-BB9E-D22CF3CDD193}.Release|Any CPU.Build.0 = Release|Any CPU - {10E4B697-D4E8-468D-872D-49670FD150FB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {10E4B697-D4E8-468D-872D-49670FD150FB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {10E4B697-D4E8-468D-872D-49670FD150FB}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {10E4B697-D4E8-468D-872D-49670FD150FB}.Publish|Any CPU.Build.0 = Publish|Any CPU - {10E4B697-D4E8-468D-872D-49670FD150FB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {10E4B697-D4E8-468D-872D-49670FD150FB}.Release|Any CPU.Build.0 = Release|Any CPU - {D4540A0F-98E3-4E70-9093-1948AE5B2AAD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D4540A0F-98E3-4E70-9093-1948AE5B2AAD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D4540A0F-98E3-4E70-9093-1948AE5B2AAD}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {D4540A0F-98E3-4E70-9093-1948AE5B2AAD}.Publish|Any CPU.Build.0 = Publish|Any CPU - {D4540A0F-98E3-4E70-9093-1948AE5B2AAD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D4540A0F-98E3-4E70-9093-1948AE5B2AAD}.Release|Any CPU.Build.0 = Release|Any CPU - {3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567}.Publish|Any CPU.Build.0 = Publish|Any CPU - {3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567}.Release|Any CPU.Build.0 = Release|Any CPU {5CB78CE4-895B-4A14-98AA-716A37DEEBB1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {5CB78CE4-895B-4A14-98AA-716A37DEEBB1}.Debug|Any CPU.Build.0 = Debug|Any CPU {5CB78CE4-895B-4A14-98AA-716A37DEEBB1}.Publish|Any CPU.ActiveCfg = Debug|Any CPU {5CB78CE4-895B-4A14-98AA-716A37DEEBB1}.Publish|Any CPU.Build.0 = Debug|Any CPU {5CB78CE4-895B-4A14-98AA-716A37DEEBB1}.Release|Any CPU.ActiveCfg = Release|Any CPU {5CB78CE4-895B-4A14-98AA-716A37DEEBB1}.Release|Any CPU.Build.0 = Release|Any CPU - {F224B869-FA0E-4EEE-A6BF-C2D61FF8E731}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F224B869-FA0E-4EEE-A6BF-C2D61FF8E731}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F224B869-FA0E-4EEE-A6BF-C2D61FF8E731}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {F224B869-FA0E-4EEE-A6BF-C2D61FF8E731}.Publish|Any CPU.Build.0 = Publish|Any CPU - {F224B869-FA0E-4EEE-A6BF-C2D61FF8E731}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F224B869-FA0E-4EEE-A6BF-C2D61FF8E731}.Release|Any CPU.Build.0 = Release|Any CPU - {CC77DCFA-A419-4202-A98A-868CDF457792}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CC77DCFA-A419-4202-A98A-868CDF457792}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CC77DCFA-A419-4202-A98A-868CDF457792}.Publish|Any CPU.ActiveCfg = Release|Any CPU - {CC77DCFA-A419-4202-A98A-868CDF457792}.Publish|Any CPU.Build.0 = Release|Any CPU - {CC77DCFA-A419-4202-A98A-868CDF457792}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CC77DCFA-A419-4202-A98A-868CDF457792}.Release|Any CPU.Build.0 = Release|Any CPU {8B754E80-7A97-4585-8D7E-1D588FA5F727}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {8B754E80-7A97-4585-8D7E-1D588FA5F727}.Debug|Any CPU.Build.0 = Debug|Any CPU {8B754E80-7A97-4585-8D7E-1D588FA5F727}.Publish|Any CPU.ActiveCfg = Debug|Any CPU @@ -401,10 +406,100 @@ Global {8B754E80-7A97-4585-8D7E-1D588FA5F727}.Release|Any CPU.Build.0 = Release|Any CPU {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Publish|Any CPU.Build.0 = Debug|Any CPU + {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Publish|Any CPU.Build.0 = Publish|Any CPU {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Release|Any CPU.ActiveCfg = Release|Any CPU {E91365A1-8B01-4AB8-825F-67E3515EADCD}.Release|Any CPU.Build.0 = Release|Any CPU + {348BBF45-23B4-4599-83A6-8AE1795227FB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {348BBF45-23B4-4599-83A6-8AE1795227FB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {348BBF45-23B4-4599-83A6-8AE1795227FB}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {348BBF45-23B4-4599-83A6-8AE1795227FB}.Publish|Any CPU.Build.0 = Publish|Any CPU + {348BBF45-23B4-4599-83A6-8AE1795227FB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {348BBF45-23B4-4599-83A6-8AE1795227FB}.Release|Any CPU.Build.0 = Release|Any CPU + {6009CC87-32F1-4282-88BB-8E5A7BA12925}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6009CC87-32F1-4282-88BB-8E5A7BA12925}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6009CC87-32F1-4282-88BB-8E5A7BA12925}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {6009CC87-32F1-4282-88BB-8E5A7BA12925}.Publish|Any CPU.Build.0 = Publish|Any CPU + {6009CC87-32F1-4282-88BB-8E5A7BA12925}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6009CC87-32F1-4282-88BB-8E5A7BA12925}.Release|Any CPU.Build.0 = Release|Any CPU + {B0646036-0C50-4F66-B479-ADA9C1166816}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B0646036-0C50-4F66-B479-ADA9C1166816}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B0646036-0C50-4F66-B479-ADA9C1166816}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {B0646036-0C50-4F66-B479-ADA9C1166816}.Publish|Any CPU.Build.0 = Publish|Any CPU + {B0646036-0C50-4F66-B479-ADA9C1166816}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B0646036-0C50-4F66-B479-ADA9C1166816}.Release|Any CPU.Build.0 = Release|Any CPU + {4AD4E731-16E7-4A0E-B403-6C96459F989B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4AD4E731-16E7-4A0E-B403-6C96459F989B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4AD4E731-16E7-4A0E-B403-6C96459F989B}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {4AD4E731-16E7-4A0E-B403-6C96459F989B}.Publish|Any CPU.Build.0 = Publish|Any CPU + {4AD4E731-16E7-4A0E-B403-6C96459F989B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4AD4E731-16E7-4A0E-B403-6C96459F989B}.Release|Any CPU.Build.0 = Release|Any CPU + {E576E260-4030-4C4C-B207-CA3B684E9669}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E576E260-4030-4C4C-B207-CA3B684E9669}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E576E260-4030-4C4C-B207-CA3B684E9669}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {E576E260-4030-4C4C-B207-CA3B684E9669}.Publish|Any CPU.Build.0 = Publish|Any CPU + {E576E260-4030-4C4C-B207-CA3B684E9669}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E576E260-4030-4C4C-B207-CA3B684E9669}.Release|Any CPU.Build.0 = Release|Any CPU + {352DAE69-E31C-44E3-A35F-E0342A955869}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {352DAE69-E31C-44E3-A35F-E0342A955869}.Debug|Any CPU.Build.0 = Debug|Any CPU + {352DAE69-E31C-44E3-A35F-E0342A955869}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {352DAE69-E31C-44E3-A35F-E0342A955869}.Publish|Any CPU.Build.0 = Publish|Any CPU + {352DAE69-E31C-44E3-A35F-E0342A955869}.Release|Any CPU.ActiveCfg = Release|Any CPU + {352DAE69-E31C-44E3-A35F-E0342A955869}.Release|Any CPU.Build.0 = Release|Any CPU + {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Publish|Any CPU.Build.0 = Debug|Any CPU + {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Release|Any CPU.Build.0 = Release|Any CPU + {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Publish|Any CPU.Build.0 = Publish|Any CPU + {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Release|Any CPU.Build.0 = Release|Any CPU + {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Publish|Any CPU.Build.0 = Debug|Any CPU + {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Release|Any CPU.Build.0 = Release|Any CPU + {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Publish|Any CPU.Build.0 = Publish|Any CPU + {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Release|Any CPU.Build.0 = Release|Any CPU + {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}.Publish|Any CPU.Build.0 = Debug|Any CPU + {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}.Release|Any CPU.Build.0 = Release|Any CPU + {731CC542-8BE9-42D4-967D-99206EC2B310}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {731CC542-8BE9-42D4-967D-99206EC2B310}.Debug|Any CPU.Build.0 = Debug|Any CPU + {731CC542-8BE9-42D4-967D-99206EC2B310}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {731CC542-8BE9-42D4-967D-99206EC2B310}.Publish|Any CPU.Build.0 = Debug|Any CPU + {731CC542-8BE9-42D4-967D-99206EC2B310}.Release|Any CPU.ActiveCfg = Release|Any CPU + {731CC542-8BE9-42D4-967D-99206EC2B310}.Release|Any CPU.Build.0 = Release|Any CPU + {A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D}.Publish|Any CPU.Build.0 = Debug|Any CPU + {A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D}.Release|Any CPU.Build.0 = Release|Any CPU + {87AB5AF5-5783-4372-9789-664895E0A2FF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {87AB5AF5-5783-4372-9789-664895E0A2FF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {87AB5AF5-5783-4372-9789-664895E0A2FF}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {87AB5AF5-5783-4372-9789-664895E0A2FF}.Publish|Any CPU.Build.0 = Debug|Any CPU + {87AB5AF5-5783-4372-9789-664895E0A2FF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {87AB5AF5-5783-4372-9789-664895E0A2FF}.Release|Any CPU.Build.0 = Release|Any CPU + {95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8}.Publish|Any CPU.Build.0 = Publish|Any CPU + {95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -412,8 +507,6 @@ Global GlobalSection(NestedProjects) = preSolution {A284C7EB-2248-4A75-B112-F5DCDE65410D} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {47C6F821-5103-431F-B3B8-A2868A68BB78} = {FA3720F1-C99A-49B2-9577-A940257098BF} - {3EB61E99-C39B-4620-9482-F8DA18E48525} = {FA3720F1-C99A-49B2-9577-A940257098BF} - {34A7F1EF-D243-4160-A413-D713FEABCD94} = {FA3720F1-C99A-49B2-9577-A940257098BF} {E4B777A1-28E1-41BE-96AE-7F3EC61FD5D4} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {F94D1938-9DB7-4B24-9FF3-166DDFD96330} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} {689A5041-BAE7-448F-9BDC-4672E96249AA} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} @@ -425,45 +518,61 @@ Global {F2A1F81E-700E-4C0E-B021-B9EF29AA20BD} = {9ECD1AA0-75B3-4E25-B0B5-9F0945B64974} {0247C2C9-86C3-45BA-8873-28B0948EDC0C} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {EB3FC57F-E591-4C88-BCD5-B6A1BC635168} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {5DEBAA62-F117-496A-8778-FED3604B70E2} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {EC004F12-2F60-4EDD-B3CD-3A504900D929} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {C9F957FA-A70F-4A6D-8F95-23FCD7F4FB87} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {3720F5ED-FB4D-485E-8A93-CDE60DEF0805} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {185E0CE8-C2DA-4E4C-A491-E8EB40316315} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {AFA81EB7-F869-467D-8A90-744305D80AAC} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {5DEBAA62-F117-496A-8778-FED3604B70E2} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {EC004F12-2F60-4EDD-B3CD-3A504900D929} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {C9F957FA-A70F-4A6D-8F95-23FCD7F4FB87} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {3720F5ED-FB4D-485E-8A93-CDE60DEF0805} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {185E0CE8-C2DA-4E4C-A491-E8EB40316315} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {AFA81EB7-F869-467D-8A90-744305D80AAC} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} {627742DB-1E52-468A-99BD-6FF1A542D25B} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {E3299033-EB81-4C4C-BCD9-E8DC40937969} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {078F96B4-09E1-4E0E-B214-F71A4F4BF633} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {F51017A9-15C8-472D-893C-080046D710A6} = {078F96B4-09E1-4E0E-B214-F71A4F4BF633} - {EC3BB6D1-2FB2-4702-84C6-F791DE533ED4} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {4D226C2F-AE9F-4EFB-AF2D-45C8FE5CB34E} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {EC3BB6D1-2FB2-4702-84C6-F791DE533ED4} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {4D226C2F-AE9F-4EFB-AF2D-45C8FE5CB34E} = {24503383-A8C4-4255-9998-28D70FE8E99A} {E52F805C-794A-4CA9-B684-DFF358B18820} = {9ECD1AA0-75B3-4E25-B0B5-9F0945B64974} - {136823BE-8665-4D57-87E0-EF41535539E2} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {136823BE-8665-4D57-87E0-EF41535539E2} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} - {6AAB0620-33A1-4A98-A63B-6560B9BA47A4} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {4D91A3E0-C404-495B-AD4A-411C4E83CF54} = {FA3720F1-C99A-49B2-9577-A940257098BF} - {50FAE231-6F24-4779-9D02-12ABBC9A49E2} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {6AAB0620-33A1-4A98-A63B-6560B9BA47A4} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {50FAE231-6F24-4779-9D02-12ABBC9A49E2} = {24503383-A8C4-4255-9998-28D70FE8E99A} {5C246969-D794-4EC3-8E8F-F90D4D166420} = {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675} {958AD708-F048-4FAF-94ED-D2F2B92748B9} = {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675} {29E7D971-1308-4171-9872-E8E4669A1134} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} {B00AD427-0047-4850-BEF9-BA8237EA9D8B} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} - {DB950192-30F1-48B1-88D7-F43FECCA1A1C} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} {1C19D805-3573-4477-BF07-40180FCDE1BD} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} {3CDE10B2-AE8F-4FC4-8D55-92D4AD32E144} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} + {E85EA4D0-BB7E-4DFD-882F-A76EB8C0B8FF} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} {0D0C4DAD-E6BC-4504-AE3A-EEA4E35920C1} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} - {E6EDAB8F-3406-4DBF-9AAB-DF40DC2CA0FA} = {FA3720F1-C99A-49B2-9577-A940257098BF} {C754950A-E16C-4F96-9CC7-9328E361B5AF} = {FA3720F1-C99A-49B2-9577-A940257098BF} - {E07608CC-D710-4655-BB9E-D22CF3CDD193} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {10E4B697-D4E8-468D-872D-49670FD150FB} = {078F96B4-09E1-4E0E-B214-F71A4F4BF633} - {D4540A0F-98E3-4E70-9093-1948AE5B2AAD} = {078F96B4-09E1-4E0E-B214-F71A4F4BF633} - {3DC4DBD8-20A5-4937-B4F5-BB5E24E7A567} = {078F96B4-09E1-4E0E-B214-F71A4F4BF633} + {E07608CC-D710-4655-BB9E-D22CF3CDD193} = {24503383-A8C4-4255-9998-28D70FE8E99A} {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} {5CB78CE4-895B-4A14-98AA-716A37DEEBB1} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} {A21FAC7C-0C09-4EAD-843B-926ACEF73C80} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} - {F224B869-FA0E-4EEE-A6BF-C2D61FF8E731} = {A21FAC7C-0C09-4EAD-843B-926ACEF73C80} - {CC77DCFA-A419-4202-A98A-868CDF457792} = {A21FAC7C-0C09-4EAD-843B-926ACEF73C80} - {8B754E80-7A97-4585-8D7E-1D588FA5F727} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {8B754E80-7A97-4585-8D7E-1D588FA5F727} = {24503383-A8C4-4255-9998-28D70FE8E99A} {E91365A1-8B01-4AB8-825F-67E3515EADCD} = {D6D598DF-C17C-46F4-B2B9-CDE82E2DE132} + {A2357CF8-3BB9-45A1-93F1-B366C9B63658} = {831DDCA2-7D2C-4C31-80DB-6BDB3E1F7AE0} + {348BBF45-23B4-4599-83A6-8AE1795227FB} = {A21FAC7C-0C09-4EAD-843B-926ACEF73C80} + {6009CC87-32F1-4282-88BB-8E5A7BA12925} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {B0646036-0C50-4F66-B479-ADA9C1166816} = {078F96B4-09E1-4E0E-B214-F71A4F4BF633} + {4AD4E731-16E7-4A0E-B403-6C96459F989B} = {9ECD1AA0-75B3-4E25-B0B5-9F0945B64974} + {E576E260-4030-4C4C-B207-CA3B684E9669} = {9ECD1AA0-75B3-4E25-B0B5-9F0945B64974} + {352DAE69-E31C-44E3-A35F-E0342A955869} = {A21FAC7C-0C09-4EAD-843B-926ACEF73C80} + {A7DAB812-81CF-4931-B38C-468A3261C4C8} = {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675} + {3F4E0DC5-2241-4EF2-9F69-E7EC7834D349} = {A7DAB812-81CF-4931-B38C-468A3261C4C8} + {873281F6-9C31-4641-98AE-F1D2844DE9F0} = {A7DAB812-81CF-4931-B38C-468A3261C4C8} + {3FC4A81B-8ABE-473F-BC7C-6F4885775534} = {A21FAC7C-0C09-4EAD-843B-926ACEF73C80} + {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {24503383-A8C4-4255-9998-28D70FE8E99A} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {5438D1E3-E03D-444B-BBBA-478F93161AA8} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} + {4AD80279-9AC1-476F-8103-E6CD5E4FD525} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} + {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} + {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} + {731CC542-8BE9-42D4-967D-99206EC2B310} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} + {A8E0D3B2-49D7-4DF6-BF91-B234C1C5E25D} = {FA3720F1-C99A-49B2-9577-A940257098BF} + {87AB5AF5-5783-4372-9789-664895E0A2FF} = {FA3720F1-C99A-49B2-9577-A940257098BF} + {95CAA25F-A0DE-4A5B-92BA-7D56C0E822A8} = {9ECD1AA0-75B3-4E25-B0B5-9F0945B64974} + {EB2C141A-AE5F-4080-8790-13EB16323CEF} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} + {607DD6FA-FA0D-45E6-80BA-22A373609E89} = {5C246969-D794-4EC3-8E8F-F90D4D166420} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/SK-dotnet.sln.DotSettings b/dotnet/SK-dotnet.sln.DotSettings index 78893c5aab58..e0c9ed70c24e 100644 --- a/dotnet/SK-dotnet.sln.DotSettings +++ b/dotnet/SK-dotnet.sln.DotSettings @@ -135,6 +135,8 @@ 2 False True + True + Console True True True @@ -227,6 +229,7 @@ public void It$SOMENAME$() True True True + True True True \ No newline at end of file diff --git a/dotnet/SK-dotnet.v3.ncrunchsolution b/dotnet/SK-dotnet.v3.ncrunchsolution new file mode 100644 index 000000000000..46ef15ac141c --- /dev/null +++ b/dotnet/SK-dotnet.v3.ncrunchsolution @@ -0,0 +1,5 @@ + + + True + + \ No newline at end of file diff --git a/dotnet/code-coverage.ps1 b/dotnet/code-coverage.ps1 new file mode 100644 index 000000000000..108dbdffa776 --- /dev/null +++ b/dotnet/code-coverage.ps1 @@ -0,0 +1,60 @@ +# This script is for local use to analyze code coverage in more detail using HTML report. + +Param( + [switch]$ProdPackagesOnly = $false +) + +# Generate a timestamp for the current date and time +$timestamp = Get-Date -Format "yyyyMMdd-HHmmss" + +# Define paths +$scriptPath = Get-Item -Path $PSScriptRoot +$coverageOutputPath = Join-Path $scriptPath "TestResults\Coverage\$timestamp" +$reportOutputPath = Join-Path $scriptPath "TestResults\Reports\$timestamp" + +# Create output directories +New-Item -ItemType Directory -Force -Path $coverageOutputPath +New-Item -ItemType Directory -Force -Path $reportOutputPath + +# Find tests for projects ending with 'UnitTests.csproj' +$testProjects = Get-ChildItem $scriptPath -Filter "*UnitTests.csproj" -Recurse + +foreach ($project in $testProjects) { + $testProjectPath = $project.FullName + Write-Host "Running tests for project: $($testProjectPath)" + + # Run tests + dotnet test $testProjectPath ` + --collect:"XPlat Code Coverage" ` + --results-directory:$coverageOutputPath ` + +} + +# Install required tools +& dotnet tool install -g coverlet.console +& dotnet tool install -g dotnet-reportgenerator-globaltool + +# Generate HTML report +if ($ProdPackagesOnly) { + $assemblies = @( + "+Microsoft.SemanticKernel.Abstractions", + "+Microsoft.SemanticKernel.Core", + "+Microsoft.SemanticKernel.PromptTemplates.Handlebars", + "+Microsoft.SemanticKernel.Connectors.OpenAI", + "+Microsoft.SemanticKernel.Yaml" + ) + + $assemblyFilters = $assemblies -join ";" + + # Generate report for production assemblies only + & reportgenerator -reports:"$coverageOutputPath/**/coverage.cobertura.xml" -targetdir:$reportOutputPath -reporttypes:Html -assemblyfilters:$assemblyFilters +} +else { + & reportgenerator -reports:"$coverageOutputPath/**/coverage.cobertura.xml" -targetdir:$reportOutputPath -reporttypes:Html +} + +Write-Host "Code coverage report generated at: $reportOutputPath" + +# Open report +$reportIndexHtml = Join-Path $reportOutputPath "index.html" +Invoke-Item -Path $reportIndexHtml diff --git a/dotnet/docs/EXPERIMENTS.md b/dotnet/docs/EXPERIMENTS.md new file mode 100644 index 000000000000..50f99e702de2 --- /dev/null +++ b/dotnet/docs/EXPERIMENTS.md @@ -0,0 +1,67 @@ +# Experiments + +The following capabilities are marked experimental in the .NET SDK. Once the APIs for these features are stable, the experimental attribute will be removed. In the meantime, these features are subject to change. + +You can use the following diagnostic IDs to ignore warnings or errors for a particular experimental feature. For example, to ignore warnings for the embedding services, add `SKEXP0001` to your list of ignored warnings in your .NET project file as well as the ID for the embedding service you want to use. For example: + +```xml + + SKEXP0001,SKEXP0011 + +``` + +## Core + +- SKEXP0001: Embedding services +- SKEXP0002: Image services +- SKEXP0003: Memory connectors +- SKEXP0004: Kernel Filters + +## OpenAI and Azure OpenAI services + +- SKEXP0010: Azure OpenAI with your data service +- SKEXP0011: OpenAI embedding service +- SKEXP0012: OpenAI image service +- SKEXP0013: OpenAI parameters +- SKEXP0014: OpenAI chat history extension + +## Memory connectors + +- SKEXP0020: Hugging Face AI connector +- SKEXP0021: Azure AI Search memory connector +- SKEXP0022: Chroma memory connector +- SKEXP0023: DuckDB memory connector +- SKEXP0024: Kusto memory connector +- SKEXP0025: Milvus memory connector +- SKEXP0026: Qdrant memory connector +- SKEXP0027: Redis memory connector +- SKEXP0028: Sqlite memory connector +- SKEXP0029: Weaviate memory connector +- SKEXP0030: MongoDB memory connector +- SKEXP0031: Pinecone memory connector +- SKEXP0032: Postgres memory connector + +## Functions + +- SKEXP0040: GRPC functions +- SKEXP0041: Markdown functions +- SKEXP0042: OpenAPI functions + +## Out-of-the-box plugins + +- SKEXP0050: Core plugins +- SKEXP0051: Document plugins +- SKEXP0052: Memory plugins +- SKEXP0053: Microsoft 365 plugins +- SKEXP0054: Web plugins +- SKEXP0055: Text chunkcer plugin + +## Planners + +- SKEXP0060: Handlebars planner +- SKEXP0061: OpenAI Stepwise planner + +## Experiments + +- SKEXP0101: Experiment with Assistants +- SKEXP0102: Experiment with Flow Orchestration diff --git a/dotnet/docs/TELEMETRY.md b/dotnet/docs/TELEMETRY.md index a031ffb26a1f..e88b47a03069 100644 --- a/dotnet/docs/TELEMETRY.md +++ b/dotnet/docs/TELEMETRY.md @@ -3,7 +3,7 @@ Telemetry in Semantic Kernel (SK) .NET implementation includes _logging_, _metering_ and _tracing_. The code is instrumented using native .NET instrumentation tools, which means that it's possible to use different monitoring platforms (e.g. Application Insights, Prometheus, Grafana etc.). -Code example using Application Insights can be found [here](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/ApplicationInsightsExample/Program.cs). +Code example using Application Insights can be found [here](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/TelemetryExample). ## Logging @@ -18,15 +18,16 @@ Log levels used in SK: - Prompt (template and rendered version) for AI to create a plan - Created plan with function arguments (arguments may contain sensitive data) - Prompt (template and rendered version) for AI to execute a function + - Arguments to functions (arguments may contain sensitive data) - Debug - contains more detailed messages without sensitive data. Can be enabled in production environments. - Information (default) - log level that is enabled by default and provides information about general flow of the application. Contains following data: - AI model used to create a plan - Plan creation status (Success/Failed) - - Plan creation execution time (in milliseconds) + - Plan creation execution time (in seconds) - Created plan without function arguments - AI model used to execute a function - Function execution status (Success/Failed) - - Function execution time (in milliseconds) + - Function execution time (in seconds) - Warning - includes information about unusual events that don't cause the application to fail. - Error - used for logging exception details. @@ -38,85 +39,67 @@ Enable logging for Kernel instance: var kernel = new KernelBuilder().WithLoggerFactory(loggerFactory); ``` -Enable logging for Planner instance (_metering_ and _tracing_ will be enabled as well): - -```csharp -var planner = new SequentialPlanner(kernel, plannerConfig).WithInstrumentation(loggerFactory); -``` +All kernel functions and planners will be instrumented. It includes _logs_, _metering_ and _tracing_. ### Log Filtering Configuration Log filtering configuration has been refined to strike a balance between visibility and relevance: ```csharp +// Add OpenTelemetry as a logging provider +builder.AddOpenTelemetry(options => +{ + options.AddAzureMonitorLogExporter(options => options.ConnectionString = connectionString); + // Format log messages. This is default to false. + options.IncludeFormattedMessage = true; +}); builder.AddFilter("Microsoft", LogLevel.Warning); builder.AddFilter("Microsoft.SemanticKernel", LogLevel.Critical); builder.AddFilter("Microsoft.SemanticKernel.Reliability", LogLevel.Information); ``` +> Read more at: https://github.com/open-telemetry/opentelemetry-dotnet/blob/main/docs/logs/customizing-the-sdk/README.md + ## Metering Metering is implemented with `Meter` class from `System.Diagnostics.Metrics` namespace. Available meters: -- _Microsoft.SemanticKernel.Planning.Action.InstrumentedActionPlanner_ - captures metrics for `ActionPlanner`. List of metrics: - - `SK.ActionPlanner.CreatePlan.ExecutionTime` - execution time of plan creation (in milliseconds) -- _Microsoft.SemanticKernel.Planning.Sequential.InstrumentedSequentialPlanner_ - captures metrics for `SequentialPlanner`. List of metrics: - - `SK.SequentialPlanner.CreatePlan.ExecutionTime` - execution time of plan creation (in milliseconds) -- _Microsoft.SemanticKernel.Planning.Stepwise.StepwisePlanner_ - captures metrics for `StepwisePlanner`. List of metrics: - - `SK.StepwisePlanner.CreatePlan.ExecutionTime` - execution time of plan creation (in milliseconds) -- _Microsoft.SemanticKernel.Planning.Plan_ - captures metrics for `Plan`. List of metrics: - - `SK.Plan.Execution.ExecutionTime` - plan execution time (in milliseconds) - - `SK.Plan.Execution.ExecutionTotal` - total number of plan executions - - `SK.Plan.Execution.ExecutionSuccess` - number of successful plan executions - - `SK.Plan.Execution.ExecutionFailure` - number of failed plan executions -- _Microsoft.SemanticKernel.SKFunction_ - captures metrics for `SKFunction`. List of metrics: - - `SK..ExecutionTime` - function execution time (in milliseconds) - - `SK..ExecutionTotal` - total number of function executions - - `SK..ExecutionSuccess` - number of successful function executions - - `SK..ExecutionFailure` - number of failed function executions -- _Microsoft.SemanticKernel.Connectors.AI.OpenAI_ - captures metrics for OpenAI functionality. List of metrics: - - `SK.Connectors.OpenAI.PromptTokens` - number of prompt tokens used. - - `SK.Connectors.OpenAI.CompletionTokens` - number of completion tokens used. - - `SK.Connectors.OpenAI.TotalTokens` - total number of tokens used. - -### Examples - -Depending on monitoring tool, there are different ways how to subscribe to available meters. Following example shows how to subscribe to available meters and export metrics to Application Insights using `MeterListener`: +- _Microsoft.SemanticKernel.Planning_ - contains all metrics related to planning. List of metrics: + - `semantic_kernel.planning.create_plan.duration` (Histogram) - execution time of plan creation (in seconds) + - `semantic_kernel.planning.invoke_plan.duration` (Histogram) - execution time of plan execution (in seconds) +- _Microsoft.SemanticKernel_ - captures metrics for `KernelFunction`. List of metrics: + - `semantic_kernel.function.invocation.duration` (Histogram) - function execution time (in seconds) + - `semantic_kernel.function.streaming.duration` (Histogram) - function streaming execution time (in seconds) + - `semantic_kernel.function.invocation.token_usage.prompt` (Histogram) - number of prompt token usage (only for `KernelFunctionFromPrompt`) + - `semantic_kernel.function.invocation.token_usage.completion` (Histogram) - number of completion token usage (only for `KernelFunctionFromPrompt`) +- _Microsoft.SemanticKernel.Connectors.OpenAI_ - captures metrics for OpenAI functionality. List of metrics: + - `semantic_kernel.connectors.openai.tokens.prompt` (Counter) - number of prompt tokens used. + - `semantic_kernel.connectors.openai.tokens.completion` (Counter) - number of completion tokens used. + - `semantic_kernel.connectors.openai.tokens.total` (Counter) - total number of tokens used. + +Measurements will be associated with tags that will allow data to be categorized for analysis: ```csharp -var meterListener = new MeterListener(); - -meterListener.InstrumentPublished = (instrument, listener) => -{ - if (instrument.Meter.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal)) - { - listener.EnableMeasurementEvents(instrument); - } -}; - -// Set callback to specific numeric type - double. -meterListener.SetMeasurementEventCallback((instrument, measurement, tags, state) => -{ - // Export to Application Insights using telemetry client instance - telemetryClient.GetMetric(instrument.Name).TrackValue(measurement); -}); - -meterListener.Start(); +TagList tags = new() { { "semantic_kernel.function.name", this.Name } }; +s_invocationDuration.Record(duration.TotalSeconds, in tags); ``` -It's possible to control for what meters to subscribe. For example, following condition will allow to subscribe to all meters in Semantic Kernel: +### [Examples](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/TelemetryExample/Program.cs) + +Depending on monitoring tool, there are different ways how to subscribe to available meters. Following example shows how to subscribe to available meters and export metrics to Application Insights using `OpenTelemetry.Sdk`: ```csharp -instrument.Meter.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal) +using var meterProvider = Sdk.CreateMeterProviderBuilder() + .AddMeter("Microsoft.SemanticKernel*") + .AddAzureMonitorMetricExporter(options => options.ConnectionString = connectionString) + .Build(); ``` -It's also possible to subscribe to specific meter. Following condition will allow to subscribe to meter for `SKFunction` only: +> Read more at: https://learn.microsoft.com/en-us/azure/azure-monitor/app/opentelemetry-enable?tabs=net -```csharp -instrument.Meter.Name.Equals("Microsoft.SemanticKernel.SKFunction", StringComparison.Ordinal) -``` +> Read more at: https://github.com/open-telemetry/opentelemetry-dotnet/blob/main/docs/metrics/customizing-the-sdk/README.md ## Tracing @@ -124,33 +107,18 @@ Tracing is implemented with `Activity` class from `System.Diagnostics` namespace Available activity sources: -- _Microsoft.SemanticKernel.Planning.Action.InstrumentedActionPlanner_ - creates activities for `ActionPlanner`. -- _Microsoft.SemanticKernel.Planning.Sequential.InstrumentedSequentialPlanner_ - creates activities for `SequentialPlanner`. -- _Microsoft.SemanticKernel.Planning.Stepwise.StepwisePlanner_ - creates activities for `StepwisePlanner`. -- _Microsoft.SemanticKernel.Planning.Plan_ - creates activities for `Plan`. -- _Microsoft.SemanticKernel.SKFunction_ - creates activities for `SKFunction`. +- _Microsoft.SemanticKernel.Planning_ - creates activities for all planners. +- _Microsoft.SemanticKernel_ - creates activities for `KernelFunction`. ### Examples -Subscribe to available activity sources using `ActivityListener`: +Subscribe to available activity sources using `OpenTelemetry.Sdk`: ```csharp -var activityListener = new ActivityListener(); - -activityListener.ShouldListenTo = - activitySource => activitySource.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal); - -ActivitySource.AddActivityListener(activityListener); +using var traceProvider = Sdk.CreateTracerProviderBuilder() + .AddSource("Microsoft.SemanticKernel*") + .AddAzureMonitorTraceExporter(options => options.ConnectionString = connectionString) + .Build(); ``` -Following condition will allow to subscribe to all activity sources in Semantic Kernel: - -```csharp -activitySource.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal) -``` - -It's also possible to subscribe to specific activity source. Following condition will allow to subscribe to activity source for `SKFunction` only: - -```csharp -activitySource.Name.Equals("Microsoft.SemanticKernel.SKFunction", StringComparison.Ordinal) -``` +> Read more at: https://github.com/open-telemetry/opentelemetry-dotnet/blob/main/docs/trace/customizing-the-sdk/README.md diff --git a/dotnet/notebooks/00-getting-started.ipynb b/dotnet/notebooks/00-getting-started.ipynb index 597e68c3a3ed..f850d4d20190 100644 --- a/dotnet/notebooks/00-getting-started.ipynb +++ b/dotnet/notebooks/00-getting-started.ipynb @@ -8,7 +8,7 @@ "#### Watch the Getting Started Quick Start [Video](https://aka.ms/SK-Getting-Started-Notebook)\n", "\n", "> [!IMPORTANT]\n", - "> You will need an [.Net 7 SDK](https://dotnet.microsoft.com/en-us/download) and [Polyglot](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.dotnet-interactive-vscode) to get started with this notebook using .Net Interactive" + "> You will need an [.NET 8 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0) and [Polyglot](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.dotnet-interactive-vscode) to get started with this notebook using .NET Interactive." ] }, { @@ -61,7 +61,7 @@ "outputs": [], "source": [ "// Import Semantic Kernel\n", - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"" + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"" ] }, { @@ -86,9 +86,10 @@ "outputs": [], "source": [ "using Microsoft.SemanticKernel;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", "//Create Kernel builder\n", - "var builder = new KernelBuilder();" + "var builder = Kernel.CreateBuilder();" ] }, { @@ -108,11 +109,11 @@ "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", "if (useAzureOpenAI)\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", "else\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", "\n", - "IKernel kernel = builder.Build();" + "var kernel = builder.Build();" ] }, { @@ -136,33 +137,20 @@ }, "outputs": [], "source": [ - "// Load the Plugins Directory\n", - "var pluginsDirectory = Path.Combine(System.IO.Directory.GetCurrentDirectory(), \"..\", \"..\", \"samples\", \"plugins\");\n", + "// FunPlugin directory path\n", + "var funPluginDirectoryPath = Path.Combine(System.IO.Directory.GetCurrentDirectory(), \"..\", \"..\", \"samples\", \"plugins\", \"FunPlugin\");\n", "\n", "// Load the FunPlugin from the Plugins Directory\n", - "var funPluginFunctions = kernel.ImportSemanticFunctionsFromDirectory(pluginsDirectory, \"FunPlugin\");\n", + "var funPluginFunctions = kernel.ImportPluginFromPromptDirectory(funPluginDirectoryPath);\n", + "\n", + "// Construct arguments\n", + "var arguments = new KernelArguments() { [\"input\"] = \"time travel to dinosaur age\" };\n", "\n", "// Run the Function called Joke\n", - "var result = await kernel.RunAsync(\"time travel to dinosaur age\", funPluginFunctions[\"Joke\"]);\n", - "var resultString = result.GetValue();\n", + "var result = await kernel.InvokeAsync(funPluginFunctions[\"Joke\"], arguments);\n", "\n", "// Return the result to the Notebook\n", - "Console.WriteLine(resultString);" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**Next Steps**: You know the basics, let's try this in a sample app so you can learn the core concepts!\n", - "\n", - "Sample app learning examples:\n", - "- [Simple chat summary](../../samples/apps/chat-summary-webapp-react/README.md) (**Recommended**) – learn how basic semantic functions can be added to an app\n", - "- [Book creator](../../samples/apps/book-creator-webapp-react/README.md) – learn how Planner and chaining of semantic functions can be used in your app\n", - "- [Authentication and APIs](../../samples/dotnet/MsGraphPluginsExample/README.md) – learn how to connect to external API's with authentication while using Semantic Kernel\n", - "- [GitHub repository Q&A](../../samples/apps/github-qna-webapp-react/README.md) - Use embeddings and memory to store and query your data\n", - "- [Copilot Chat](../../samples/apps/copilot-chat-app/README.md) – Build your own chatbot based on Semantic Kernel" + "Console.WriteLine(result);" ] } ], diff --git a/dotnet/notebooks/01-basic-loading-the-kernel.ipynb b/dotnet/notebooks/01-basic-loading-the-kernel.ipynb index 01b143634017..a5f6d01dc289 100644 --- a/dotnet/notebooks/01-basic-loading-the-kernel.ipynb +++ b/dotnet/notebooks/01-basic-loading-the-kernel.ipynb @@ -32,7 +32,7 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"" + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"" ] }, { @@ -40,7 +40,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "After adding the nuget package, you can instantiate the kernel in a few ways, depending on your use case.\n" + "After adding the nuget package, you can instantiate the kernel:\n" ] }, { @@ -60,36 +60,19 @@ "outputs": [], "source": [ "using Microsoft.SemanticKernel;\n", - "\n", - "// Simple instance\n", - "IKernel kernel_1 = KernelBuilder.Create();" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ "using Microsoft.Extensions.Logging;\n", "using Microsoft.Extensions.Logging.Abstractions;\n", + "using Microsoft.Extensions.DependencyInjection;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", "// Inject your logger \n", "// see Microsoft.Extensions.Logging.ILogger @ https://learn.microsoft.com/dotnet/core/extensions/logging\n", "ILoggerFactory myLoggerFactory = NullLoggerFactory.Instance;\n", - "IKernel kernel_2 = new KernelBuilder()\n", - " .WithLoggerFactory(myLoggerFactory)\n", - " .Build();" + "\n", + "var builder = Kernel.CreateBuilder();\n", + "builder.Services.AddSingleton(myLoggerFactory);\n", + "\n", + "var kernel = builder.Build();" ] }, { @@ -120,14 +103,14 @@ }, "outputs": [], "source": [ - "Kernel.Builder\n", - ".WithAzureChatCompletionService(\n", + "Kernel.CreateBuilder()\n", + ".AddAzureOpenAIChatCompletion(\n", " \"my-finetuned-model\", // Azure OpenAI *Deployment Name*\n", " \"https://contoso.openai.azure.com/\", // Azure OpenAI *Endpoint*\n", " \"...your Azure OpenAI Key...\", // Azure OpenAI *Key*\n", " serviceId: \"Azure_curie\" // alias used in the prompt templates' config.json\n", ")\n", - ".WithOpenAIChatCompletionService(\n", + ".AddOpenAIChatCompletion(\n", " \"gpt-3.5-turbo\", // OpenAI Model Name\n", " \"...your OpenAI API Key...\", // OpenAI API key\n", " \"...your OpenAI Org ID...\", // *optional* OpenAI Organization ID\n", @@ -144,35 +127,7 @@ "is also the \"**default**\" used in these scenarios:\n", "\n", "* a prompt configuration doesn't specify which AI backend to use\n", - "* a prompt configuration requires a backend unknown to the kernel\n", - "\n", - "The default can be set programmatically:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "Kernel.Builder\n", - ".WithOpenAIChatCompletionService(\n", - " \"gpt-3.5-turbo\", // OpenAI Model Name\n", - " \"...your OpenAI API Key...\", // OpenAI API key\n", - " \"...your OpenAI Org ID...\", // *optional* OpenAI Organization ID\n", - " \"OpenAI_davinci\", // alias used in the prompt templates' config.json\n", - " true // This flag specifies that this service is the default one.\n", - ");" + "* a prompt configuration requires a backend unknown to the kernel" ] }, { diff --git a/dotnet/notebooks/02-running-prompts-from-file.ipynb b/dotnet/notebooks/02-running-prompts-from-file.ipynb index dadfe2466a53..0a23abb9e88a 100644 --- a/dotnet/notebooks/02-running-prompts-from-file.ipynb +++ b/dotnet/notebooks/02-running-prompts-from-file.ipynb @@ -57,11 +57,13 @@ "{\n", " \"schema\": 1,\n", " \"description\": \"Generate a funny joke\",\n", - " \"models\": [\n", + " \"execution_settings\": [\n", " {\n", - " \"max_tokens\": 500,\n", - " \"temperature\": 0.5,\n", - " \"top_p\": 0.5\n", + " \"max_tokens\": 1000,\n", + " \"temperature\": 0.9,\n", + " \"top_p\": 0.0,\n", + " \"presence_penalty\": 0.0,\n", + " \"frequency_penalty\": 0.0\n", " }\n", " ]\n", "}\n", @@ -91,22 +93,24 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", "\n", "#!import config/Settings.cs\n", "\n", "using Microsoft.SemanticKernel;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", - "var builder = new KernelBuilder();\n", + "var builder = Kernel.CreateBuilder();\n", "\n", "// Configure AI backend used by the kernel\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", + "\n", "if (useAzureOpenAI)\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", "else\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", "\n", - "IKernel kernel = builder.Build();" + "var kernel = builder.Build();" ] }, { @@ -130,10 +134,11 @@ }, "outputs": [], "source": [ - "// note: using plugins from the repo\n", - "var pluginsDirectory = Path.Combine(System.IO.Directory.GetCurrentDirectory(), \"..\", \"..\", \"samples\", \"plugins\");\n", + "// FunPlugin directory path\n", + "var funPluginDirectoryPath = Path.Combine(System.IO.Directory.GetCurrentDirectory(), \"..\", \"..\", \"samples\", \"plugins\", \"FunPlugin\");\n", "\n", - "var funPluginFunctions = kernel.ImportSemanticFunctionsFromDirectory(pluginsDirectory, \"FunPlugin\");" + "// Load the FunPlugin from the Plugins Directory\n", + "var funPluginFunctions = kernel.ImportPluginFromPromptDirectory(funPluginDirectoryPath);" ] }, { @@ -157,10 +162,14 @@ }, "outputs": [], "source": [ - "var result = await kernel.RunAsync(\"time travel to dinosaur age\", funPluginFunctions[\"Joke\"]);\n", - "var resultString = result.GetValue();\n", + "// Construct arguments\n", + "var arguments = new KernelArguments() { [\"input\"] = \"time travel to dinosaur age\" };\n", + "\n", + "// Run the Function called Joke\n", + "var result = await kernel.InvokeAsync(funPluginFunctions[\"Joke\"], arguments);\n", "\n", - "Console.WriteLine(resultString);" + "// Return the result to the Notebook\n", + "Console.WriteLine(result);" ] }, { diff --git a/dotnet/notebooks/03-semantic-function-inline.ipynb b/dotnet/notebooks/03-semantic-function-inline.ipynb index d5326783be7b..133bcf8ee21c 100644 --- a/dotnet/notebooks/03-semantic-function-inline.ipynb +++ b/dotnet/notebooks/03-semantic-function-inline.ipynb @@ -27,7 +27,7 @@ "For now we'll use only the `{{$input}}` variable, and see more complex templates later.\n", "\n", "Almost all semantic function prompts have a reference to `{{$input}}`, which is the default way\n", - "a user can import content from the context variables." + "a user can import content from the kernel arguments." ] }, { @@ -51,24 +51,26 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", "\n", "#!import config/Settings.cs\n", "\n", "using Microsoft.SemanticKernel;\n", - "using Microsoft.SemanticKernel.SemanticFunctions;\n", - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;\n", + "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", + "using Microsoft.SemanticKernel.TemplateEngine;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", - "var builder = new KernelBuilder();\n", + "var builder = Kernel.CreateBuilder();\n", "\n", - "// Configure AI backend used by the kernel\n", + "// Configure AI service credentials used by the kernel\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", + "\n", "if (useAzureOpenAI)\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", "else\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", "\n", - "IKernel kernel = builder.Build();" + "var kernel = builder.Build();" ] }, { @@ -120,15 +122,12 @@ }, "outputs": [], "source": [ - "var aiRequestSettings = new OpenAIRequestSettings \n", + "var executionSettings = new OpenAIPromptExecutionSettings \n", "{\n", " MaxTokens = 2000,\n", " Temperature = 0.2,\n", " TopP = 0.5\n", - "};\n", - "\n", - "var promptConfig = new PromptTemplateConfig();\n", - "promptConfig.ModelSettings.Add(aiRequestSettings);" + "};" ] }, { @@ -137,7 +136,9 @@ "metadata": {}, "source": [ "The following code prepares an instance of the template, passing in the TXT and configuration above, \n", - "and a couple of other parameters (how to render the TXT and how the template can access other functions)." + "and a couple of other parameters (how to render the TXT and how the template can access other functions).\n", + "\n", + "This allows to see the prompt before it's sent to AI." ] }, { @@ -153,11 +154,14 @@ }, "outputs": [], "source": [ - "var promptTemplate = new PromptTemplate(\n", - " skPrompt, // Prompt template defined in natural language\n", - " promptConfig, // Prompt configuration\n", - " kernel // SK instance\n", - ");" + "var promptTemplateConfig = new PromptTemplateConfig(skPrompt);\n", + "\n", + "var promptTemplateFactory = new KernelPromptTemplateFactory();\n", + "var promptTemplate = promptTemplateFactory.Create(promptTemplateConfig);\n", + "\n", + "var renderedPrompt = await promptTemplate.RenderAsync(kernel);\n", + "\n", + "Console.WriteLine(renderedPrompt);" ] }, { @@ -181,9 +185,7 @@ }, "outputs": [], "source": [ - "var functionConfig = new SemanticFunctionConfig(promptConfig, promptTemplate);\n", - "\n", - "var summaryFunction = kernel.RegisterSemanticFunction(\"MyPlugin\", \"Summary\", functionConfig);" + "var summaryFunction = kernel.CreateFunctionFromPrompt(skPrompt, executionSettings);" ] }, { @@ -241,10 +243,9 @@ }, "outputs": [], "source": [ - "var summaryResult = await kernel.RunAsync(input, summaryFunction);\n", - "var summary = summaryResult.GetValue();\n", + "var summaryResult = await kernel.InvokeAsync(summaryFunction, new() { [\"input\"] = input });\n", "\n", - "Console.WriteLine(summary);" + "Console.WriteLine(summaryResult);" ] }, { @@ -255,7 +256,7 @@ "The code above shows all the steps, to understand how the function is composed step by step. However, the kernel\n", "includes also some helpers to achieve the same more concisely.\n", "\n", - "The same function above can be created with less code:" + "The same function above can be executed with less code:" ] }, { @@ -277,12 +278,9 @@ "Summarize the content above.\n", "\"\"\";\n", "\n", - "var summaryFunction = kernel.CreateSemanticFunction(skPrompt, requestSettings: new OpenAIRequestSettings { MaxTokens = 2000, Temperature = 0.2, TopP = 0.5 });\n", + "var result = await kernel.InvokePromptAsync(skPrompt, new() { [\"input\"] = input });\n", "\n", - "var summaryResult = await kernel.RunAsync(input, summaryFunction);\n", - "var summary = summaryResult.GetValue();\n", - "\n", - "Console.WriteLine(summary);" + "Console.WriteLine(result);" ] }, { @@ -309,17 +307,6 @@ }, "outputs": [], "source": [ - "var builder = new KernelBuilder();\n", - "\n", - "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", - "\n", - "if (useAzureOpenAI)\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", - "else\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", - "\n", - "var kernel = builder.Build();\n", - "\n", "string skPrompt = @\"\n", "{{$input}}\n", "\n", @@ -337,14 +324,9 @@ " does not conflict with the First or Second Law.\n", "\";\n", "\n", - "var tldrFunction = kernel.CreateSemanticFunction(skPrompt, requestSettings: new OpenAIRequestSettings { MaxTokens = 2000, Temperature = 0.2, TopP = 0.5 });\n", - "\n", - "var summaryResult = await kernel.RunAsync(textToSummarize, tldrFunction);\n", - "var summary = summaryResult.GetValue();\n", - "\n", - "Console.WriteLine(summary);\n", + "var result = await kernel.InvokePromptAsync(skPrompt, new() { [\"input\"] = textToSummarize });\n", "\n", - "// Output => Robots must not harm humans." + "Console.WriteLine(result);" ] } ], diff --git a/dotnet/notebooks/04-context-variables-chat.ipynb b/dotnet/notebooks/04-context-variables-chat.ipynb deleted file mode 100644 index f5c33f746a9d..000000000000 --- a/dotnet/notebooks/04-context-variables-chat.ipynb +++ /dev/null @@ -1,390 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Creating a basic chat experience with context variables\n", - "\n", - "In this example, we show how you can build a simple chat bot by sending and updating context with your requests. \n", - "\n", - "We introduce the Context Variables object which in this demo functions similarly as a key-value store that you can use when running the kernel.\n", - "\n", - "The context is local (i.e. in your computer's RAM) and not persisted anywhere beyond the life of this Jupyter session.\n", - "\n", - "In future examples, we will show how to persist the context on disk so that you can bring it into your applications. \n", - "\n", - "In this chat scenario, as the user talks back and forth with the bot, the context gets populated with the history of the conversation. During each new run of the kernel, the context can provide the AI with its variables' content. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", - "#!import config/Settings.cs\n", - "\n", - "using Microsoft.SemanticKernel;\n", - "using Microsoft.SemanticKernel.SemanticFunctions;\n", - "using Microsoft.SemanticKernel.Orchestration;\n", - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;\n", - "\n", - "var builder = new KernelBuilder();\n", - "\n", - "// Configure AI backend used by the kernel\n", - "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", - "if (useAzureOpenAI)\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", - "else\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", - "\n", - "IKernel kernel = builder.Build();" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's define a prompt outlining a dialogue chat bot." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "const string skPrompt = @\"\n", - "ChatBot can have a conversation with you about any topic.\n", - "It can give explicit instructions or say 'I don't know' if it does not have an answer.\n", - "\n", - "{{$history}}\n", - "User: {{$userInput}}\n", - "ChatBot:\";\n", - "\n", - "var aiRequestSettings = new OpenAIRequestSettings \n", - "{\n", - " MaxTokens = 2000,\n", - " Temperature = 0.7,\n", - " TopP = 0.5\n", - "};\n", - "\n", - "var promptConfig = new PromptTemplateConfig();\n", - "promptConfig.ModelSettings.Add(aiRequestSettings);" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Register your semantic function" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var promptTemplate = new PromptTemplate(skPrompt, promptConfig, kernel);\n", - "var functionConfig = new SemanticFunctionConfig(promptConfig, promptTemplate);\n", - "var chatFunction = kernel.RegisterSemanticFunction(\"ChatBot\", \"Chat\", functionConfig);" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Initialize your context" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var context = kernel.CreateNewContext();\n", - "\n", - "var history = \"\";\n", - "context.Variables[\"history\"] = history;" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Chat with the Bot" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var userInput = \"Hi, I'm looking for book suggestions\";\n", - "context.Variables[\"userInput\"] = userInput;\n", - "\n", - "var bot_answer = await chatFunction.InvokeAsync(context);" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Update the history with the output and set this as the new input value for the next request" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "history += $\"\\nUser: {userInput}\\nMelody: {bot_answer.GetValue()}\\n\";\n", - "context.Variables.Update(history);\n", - "\n", - "Console.WriteLine(context);" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Keep Chatting!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "Func Chat = async (string input) => {\n", - " // Save new message in the context variables\n", - " context.Variables[\"userInput\"] = input;\n", - "\n", - " // Process the user message and get an answer\n", - " var answer = await chatFunction.InvokeAsync(context);\n", - "\n", - " // Append the new interaction to the chat history\n", - " history += $\"\\nUser: {input}\\nMelody: {answer.GetValue()}\\n\"; \n", - " context.Variables[\"history\"] = history;\n", - " \n", - " // Show the response\n", - " Console.WriteLine(context);\n", - "};" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "await Chat(\"I would like a non-fiction book suggestion about Greece history. Please only list one book.\");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "await Chat(\"that sounds interesting, what are some of the topics I will learn about?\");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "await Chat(\"Which topic from the ones you listed do you think most people find interesting?\");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "await Chat(\"could you list some more books I could read about the topic(s) you mentioned?\");" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "After chatting for a while, we have built a growing history, which we are attaching to each prompt and which contains the full conversation. Let's take a look!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "Console.WriteLine(context.Variables[\"history\"]);" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".NET (C#)", - "language": "C#", - "name": ".net-csharp" - }, - "language_info": { - "file_extension": ".cs", - "mimetype": "text/x-csharp", - "name": "C#", - "pygments_lexer": "csharp", - "version": "11.0" - }, - "polyglot_notebook": { - "kernelInfo": { - "defaultKernelName": "csharp", - "items": [ - { - "aliases": [], - "name": "csharp" - } - ] - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/dotnet/notebooks/04-kernel-arguments-chat.ipynb b/dotnet/notebooks/04-kernel-arguments-chat.ipynb new file mode 100644 index 000000000000..bcd9748763d7 --- /dev/null +++ b/dotnet/notebooks/04-kernel-arguments-chat.ipynb @@ -0,0 +1,384 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Creating a basic chat experience with kernel arguments\n", + "\n", + "In this example, we show how you can build a simple chat bot by sending and updating arguments with your requests. \n", + "\n", + "We introduce the Kernel Arguments object which in this demo functions similarly as a key-value store that you can use when running the kernel. \n", + "\n", + "In this chat scenario, as the user talks back and forth with the bot, the arguments get populated with the history of the conversation. During each new run of the kernel, the arguments will be provided to the AI with content. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", + "#!import config/Settings.cs\n", + "\n", + "using Microsoft.SemanticKernel;\n", + "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", + "\n", + "var builder = Kernel.CreateBuilder();\n", + "\n", + "// Configure AI service credentials used by the kernel\n", + "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", + "\n", + "if (useAzureOpenAI)\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", + "else\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", + "\n", + "var kernel = builder.Build();" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's define a prompt outlining a dialogue chat bot." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "const string skPrompt = @\"\n", + "ChatBot can have a conversation with you about any topic.\n", + "It can give explicit instructions or say 'I don't know' if it does not have an answer.\n", + "\n", + "{{$history}}\n", + "User: {{$userInput}}\n", + "ChatBot:\";\n", + "\n", + "var executionSettings = new OpenAIPromptExecutionSettings \n", + "{\n", + " MaxTokens = 2000,\n", + " Temperature = 0.7,\n", + " TopP = 0.5\n", + "};" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Register your semantic function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var chatFunction = kernel.CreateFunctionFromPrompt(skPrompt, executionSettings);" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Initialize your arguments" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var history = \"\";\n", + "var arguments = new KernelArguments()\n", + "{\n", + " [\"history\"] = history\n", + "};" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Chat with the Bot" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var userInput = \"Hi, I'm looking for book suggestions\";\n", + "arguments[\"userInput\"] = userInput;\n", + "\n", + "var bot_answer = await chatFunction.InvokeAsync(kernel, arguments);" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Update the history with the output and set this as the new input value for the next request" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "history += $\"\\nUser: {userInput}\\nAI: {bot_answer}\\n\";\n", + "arguments[\"history\"] = history;\n", + "\n", + "Console.WriteLine(history);" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Keep Chatting!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "Func Chat = async (string input) => {\n", + " // Save new message in the arguments\n", + " arguments[\"userInput\"] = input;\n", + "\n", + " // Process the user message and get an answer\n", + " var answer = await chatFunction.InvokeAsync(kernel, arguments);\n", + "\n", + " // Append the new interaction to the chat history\n", + " var result = $\"\\nUser: {input}\\nAI: {answer}\\n\";\n", + " history += result;\n", + "\n", + " arguments[\"history\"] = history;\n", + " \n", + " // Show the response\n", + " Console.WriteLine(result);\n", + "};" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "await Chat(\"I would like a non-fiction book suggestion about Greece history. Please only list one book.\");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "await Chat(\"that sounds interesting, what are some of the topics I will learn about?\");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "await Chat(\"Which topic from the ones you listed do you think most people find interesting?\");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "await Chat(\"could you list some more books I could read about the topic(s) you mentioned?\");" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "After chatting for a while, we have built a growing history, which we are attaching to each prompt and which contains the full conversation. Let's take a look!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "Console.WriteLine(history);" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".NET (C#)", + "language": "C#", + "name": ".net-csharp" + }, + "language_info": { + "file_extension": ".cs", + "mimetype": "text/x-csharp", + "name": "C#", + "pygments_lexer": "csharp", + "version": "11.0" + }, + "polyglot_notebook": { + "kernelInfo": { + "defaultKernelName": "csharp", + "items": [ + { + "aliases": [], + "name": "csharp" + } + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/dotnet/notebooks/05-using-the-planner.ipynb b/dotnet/notebooks/05-using-the-planner.ipynb index 652884a33773..51e3b057ae71 100644 --- a/dotnet/notebooks/05-using-the-planner.ipynb +++ b/dotnet/notebooks/05-using-the-planner.ipynb @@ -25,27 +25,25 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Planners.Handlebars, 1.0.1-preview\"\n", "\n", "#!import config/Settings.cs\n", "#!import config/Utils.cs\n", "\n", "using Microsoft.SemanticKernel;\n", - "using Microsoft.SemanticKernel.Plugins.Core;\n", - "using Microsoft.SemanticKernel.Orchestration;\n", - "using Microsoft.SemanticKernel.Planning;\n", - "using Microsoft.SemanticKernel.Planners;\n", - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;\n", + "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", - "var builder = new KernelBuilder();\n", + "var builder = Kernel.CreateBuilder();\n", "\n", "// Configure AI backend used by the kernel\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", "if (useAzureOpenAI)\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", "else\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", "\n", "var kernel = builder.Build();" ] @@ -55,8 +53,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Setting Up the Planner\n", - "The planner is located in the `Microsoft.SemanticKernel.Planners.Core` package and requires Orchestration" + "### Setting Up Handlebars Planner\n", + "Handlebars Planner is located in the `Microsoft.SemanticKernel.Planning.Handlebars` package." ] }, { @@ -72,8 +70,11 @@ }, "outputs": [], "source": [ - "// Load native plugin into the kernel registry, sharing its functions with prompt templates\n", - "var planner = new SequentialPlanner(kernel);" + "using Microsoft.SemanticKernel.Planning.Handlebars;\n", + "\n", + "#pragma warning disable SKEXP0060\n", + "\n", + "var planner = new HandlebarsPlanner();" ] }, { @@ -82,7 +83,7 @@ "metadata": {}, "source": [ "### Providing plugins to the planner\n", - "The planner needs to know what plugins are available to it. Here we'll give it access to the `SummarizePlugin` and `WriterPlugin` we have defined on disk." + "The planner needs to know what plugins are available to it. Here we'll import the `SummarizePlugin` and `WriterPlugin` we have defined on disk." ] }, { @@ -99,8 +100,9 @@ "outputs": [], "source": [ "var pluginsDirectory = Path.Combine(System.IO.Directory.GetCurrentDirectory(), \"..\", \"..\", \"samples\", \"plugins\");\n", - "kernel.ImportSemanticFunctionsFromDirectory(pluginsDirectory, \"SummarizePlugin\");\n", - "kernel.ImportSemanticFunctionsFromDirectory(pluginsDirectory, \"WriterPlugin\");" + "\n", + "kernel.ImportPluginFromPromptDirectory(Path.Combine(pluginsDirectory, \"SummarizePlugin\"));\n", + "kernel.ImportPluginFromPromptDirectory(Path.Combine(pluginsDirectory, \"WriterPlugin\"));" ] }, { @@ -124,11 +126,13 @@ }, "outputs": [], "source": [ + "#pragma warning disable SKEXP0060\n", + "\n", "var ask = \"Tomorrow is Valentine's day. I need to come up with a few date ideas. My significant other likes poems so write them in the form of a poem.\";\n", - "var originalPlan = await planner.CreatePlanAsync(ask);\n", + "var originalPlan = await planner.CreatePlanAsync(kernel, ask);\n", "\n", "Console.WriteLine(\"Original plan:\\n\");\n", - "Console.WriteLine(JsonSerializer.Serialize(originalPlan, new JsonSerializerOptions { WriteIndented = true }));" + "Console.WriteLine(originalPlan);" ] }, { @@ -138,9 +142,7 @@ "source": [ "As you can see in the above plan, the Planner has taken the user's ask and converted it into a Plan object detailing how the AI would go about solving this task.\n", "\n", - "It makes use of the plugins that the Kernel has available to it and determines which functions to call in order to fulfill the user's ask.\n", - "\n", - "The output of each step of the plan gets set as `setContextVariable` which makes it available as `input` to the next plugin." + "It makes use of the plugins that the Kernel has available to it and determines which functions to call in order to fulfill the user's ask." ] }, { @@ -170,7 +172,15 @@ "\n", "Rewrite the above in the style of Shakespeare.\n", "\"\"\";\n", - "var shakespeareFunction = kernel.CreateSemanticFunction(skPrompt, \"Shakespeare\", \"ShakespearePlugin\", requestSettings: new OpenAIRequestSettings { MaxTokens = 2000, Temperature = 0.2, TopP = 0.5 });" + "\n", + "var executionSettings = new OpenAIPromptExecutionSettings \n", + "{\n", + " MaxTokens = 2000,\n", + " Temperature = 0.7,\n", + " TopP = 0.5\n", + "};\n", + "\n", + "var shakespeareFunction = kernel.CreateFunctionFromPrompt(skPrompt, executionSettings, \"Shakespeare\");" ] }, { @@ -194,13 +204,15 @@ }, "outputs": [], "source": [ + "#pragma warning disable SKEXP0060\n", + "\n", "var ask = @\"Tomorrow is Valentine's day. I need to come up with a few date ideas.\n", "She likes Shakespeare so write using his style. Write them in the form of a poem.\";\n", "\n", - "var newPlan = await planner.CreatePlanAsync(ask);\n", + "var newPlan = await planner.CreatePlanAsync(kernel, ask);\n", "\n", "Console.WriteLine(\"Updated plan:\\n\");\n", - "Console.WriteLine(JsonSerializer.Serialize(newPlan, new JsonSerializerOptions { WriteIndented = true }));" + "Console.WriteLine(newPlan);" ] }, { @@ -226,10 +238,12 @@ }, "outputs": [], "source": [ - "var originalPlanResult = await kernel.RunAsync(originalPlan);\n", + "#pragma warning disable SKEXP0060\n", + "\n", + "var originalPlanResult = await originalPlan.InvokeAsync(kernel, new KernelArguments());\n", "\n", "Console.WriteLine(\"Original Plan results:\\n\");\n", - "Console.WriteLine(Utils.WordWrap(originalPlanResult.GetValue(), 100));" + "Console.WriteLine(Utils.WordWrap(originalPlanResult.ToString(), 100));" ] }, { @@ -253,10 +267,12 @@ }, "outputs": [], "source": [ - "var newPlanResult = await kernel.RunAsync(newPlan);\n", + "#pragma warning disable SKEXP0060\n", + "\n", + "var newPlanResult = await newPlan.InvokeAsync(kernel, new KernelArguments());\n", "\n", "Console.WriteLine(\"New Plan results:\\n\");\n", - "Console.WriteLine(newPlanResult.GetValue());" + "Console.WriteLine(newPlanResult);" ] } ], diff --git a/dotnet/notebooks/06-memory-and-embeddings.ipynb b/dotnet/notebooks/06-memory-and-embeddings.ipynb index 69be621eb614..c6dc727cca24 100644 --- a/dotnet/notebooks/06-memory-and-embeddings.ipynb +++ b/dotnet/notebooks/06-memory-and-embeddings.ipynb @@ -10,11 +10,11 @@ "So far, we've mostly been treating the kernel as a stateless orchestration engine.\n", "We send text into a model API and receive text out. \n", "\n", - "In a [previous notebook](04-context-variables-chat.ipynb), we used `context variables` to pass in additional\n", - "text into prompts to enrich them with more context. This allowed us to create a basic chat experience. \n", + "In a [previous notebook](04-kernel-arguments-chat.ipynb), we used `kernel arguments` to pass in additional\n", + "text into prompts to enrich them with more data. This allowed us to create a basic chat experience. \n", "\n", - "However, if you solely relied on context variables, you would quickly realize that eventually your prompt\n", - "would grow so large that you would run into a the model's token limit. What we need is a way to persist state\n", + "However, if you solely relied on kernel arguments, you would quickly realize that eventually your prompt\n", + "would grow so large that you would run into the model's token limit. What we need is a way to persist state\n", "and build both short-term and long-term memory to empower even more intelligent applications. \n", "\n", "To do this, we dive into the key concept of `Semantic Memory` in the Semantic Kernel. " @@ -33,26 +33,26 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Memory, 1.0.1-alpha\"\n", "#r \"nuget: System.Linq.Async, 6.0.1\"\n", "\n", "#!import config/Settings.cs\n", "\n", "using Microsoft.SemanticKernel;\n", - "using Microsoft.SemanticKernel.SemanticFunctions;\n", - "using Microsoft.SemanticKernel.Orchestration;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", - "var kernelBuilder = new KernelBuilder();\n", + "var builder = Kernel.CreateBuilder();\n", "\n", - "// Configure AI backend used by the kernel\n", + "// Configure AI service credentials used by the kernel\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", "if (useAzureOpenAI)\n", - " kernelBuilder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", "else\n", - " kernelBuilder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", "\n", - "var kernel = kernelBuilder.Build();" + "var kernel = builder.Build();" ] }, { @@ -84,8 +84,11 @@ }, "outputs": [], "source": [ - "using Microsoft.SemanticKernel.Plugins.Memory;\n", - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;\n", + "using Microsoft.SemanticKernel.Memory;\n", + "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", + "\n", + "// Memory functionality is experimental\n", + "#pragma warning disable SKEXP0003, SKEXP0011, SKEXP0052\n", "\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", @@ -93,11 +96,15 @@ "\n", "if (useAzureOpenAI)\n", "{\n", - " memoryBuilder.WithAzureTextEmbeddingGenerationService(\"text-embedding-ada-002\", azureEndpoint, apiKey);\n", + " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\n", + " \"text-embedding-ada-002\",\n", + " azureEndpoint, \n", + " apiKey,\n", + " \"model-id\");\n", "}\n", "else\n", "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGenerationService(\"text-embedding-ada-002\", apiKey);\n", + " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", "}\n", "\n", "memoryBuilder.WithMemoryStore(new VolatileMemoryStore());\n", @@ -197,7 +204,7 @@ "metadata": {}, "source": [ "Let's now revisit our chat sample from the [previous notebook](04-context-variables-chat.ipynb).\n", - "If you remember, we used context variables to fill the prompt with a `history` that continuously got populated as we chatted with the bot. Let's add also memory to it!" + "If you remember, we used kernel arguments to fill the prompt with a `history` that continuously got populated as we chatted with the bot. Let's add also memory to it!" ] }, { @@ -224,8 +231,12 @@ }, "outputs": [], "source": [ + "using Microsoft.SemanticKernel.Plugins.Memory;\n", + "\n", + "#pragma warning disable SKEXP0052\n", + "\n", "// TextMemoryPlugin provides the \"recall\" function\n", - "kernel.ImportFunctions(new TextMemoryPlugin(memory));" + "kernel.ImportPluginFromObject(new TextMemoryPlugin(memory));" ] }, { @@ -257,7 +268,7 @@ "User: {{$userInput}}\n", "ChatBot: \";\n", "\n", - "var chatFunction = kernel.CreateSemanticFunction(skPrompt, requestSettings: new OpenAIRequestSettings { MaxTokens = 200, Temperature = 0.8 });" + "var chatFunction = kernel.CreateFunctionFromPrompt(skPrompt, new OpenAIPromptExecutionSettings { MaxTokens = 200, Temperature = 0.8 });" ] }, { @@ -281,16 +292,19 @@ }, "outputs": [], "source": [ - "var context = kernel.CreateNewContext();\n", + "#pragma warning disable SKEXP0052\n", + "\n", + "var arguments = new KernelArguments();\n", "\n", - "context.Variables[\"fact1\"] = \"what is my name?\";\n", - "context.Variables[\"fact2\"] = \"where do I live?\";\n", - "context.Variables[\"fact3\"] = \"where is my family from?\";\n", - "context.Variables[\"fact4\"] = \"where have I travelled?\";\n", - "context.Variables[\"fact5\"] = \"what do I do for work?\";\n", + "arguments[\"fact1\"] = \"what is my name?\";\n", + "arguments[\"fact2\"] = \"where do I live?\";\n", + "arguments[\"fact3\"] = \"where is my family from?\";\n", + "arguments[\"fact4\"] = \"where have I travelled?\";\n", + "arguments[\"fact5\"] = \"what do I do for work?\";\n", "\n", - "context.Variables[TextMemoryPlugin.CollectionParam] = MemoryCollectionName;\n", - "context.Variables[TextMemoryPlugin.RelevanceParam] = \"0.8\";" + "arguments[TextMemoryPlugin.CollectionParam] = MemoryCollectionName;\n", + "arguments[TextMemoryPlugin.LimitParam] = \"2\";\n", + "arguments[TextMemoryPlugin.RelevanceParam] = \"0.8\";" ] }, { @@ -315,20 +329,22 @@ "outputs": [], "source": [ "var history = \"\";\n", - "context.Variables[\"history\"] = history;\n", + "arguments[\"history\"] = history;\n", "Func Chat = async (string input) => {\n", - " // Save new message in the context variables\n", - " context.Variables[\"userInput\"] = input;\n", + " // Save new message in the kernel arguments\n", + " arguments[\"userInput\"] = input;\n", "\n", " // Process the user message and get an answer\n", - " var answer = await chatFunction.InvokeAsync(context);\n", + " var answer = await chatFunction.InvokeAsync(kernel, arguments);\n", "\n", " // Append the new interaction to the chat history\n", - " history += $\"\\nUser: {input}\\nChatBot: {answer.GetValue()}\\n\";\n", - " context.Variables[\"history\"] = history;\n", + " var result = $\"\\nUser: {input}\\nChatBot: {answer}\\n\";\n", + "\n", + " history += result;\n", + " arguments[\"history\"] = history;\n", " \n", " // Show the bot response\n", - " Console.WriteLine(\"ChatBot: \" + context);\n", + " Console.WriteLine(result);\n", "};" ] }, @@ -419,10 +435,6 @@ " = \"Sample demonstrating how to create a chat plugin interfacing with ChatGPT\",\n", " [\"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs\"]\n", " = \"C# class that defines a volatile embedding store\",\n", - " [\"https://github.com/microsoft/semantic-kernel/tree/main/samples/dotnet/KernelHttpServer/README.md\"]\n", - " = \"README: How to set up a Semantic Kernel Service API using Azure Function Runtime v4\",\n", - " [\"https://github.com/microsoft/semantic-kernel/tree/main/samples/apps/chat-summary-webapp-react/README.md\"]\n", - " = \"README: README associated with a sample starter react-based chat summary webapp\",\n", "};" ] }, @@ -447,15 +459,22 @@ }, "outputs": [], "source": [ + "// Memory functionality is experimental\n", + "#pragma warning disable SKEXP0003, SKEXP0011, SKEXP0052\n", + "\n", "var memoryBuilder = new MemoryBuilder();\n", "\n", "if (useAzureOpenAI)\n", "{\n", - " memoryBuilder.WithAzureTextEmbeddingGenerationService(\"text-embedding-ada-002\", azureEndpoint, apiKey);\n", + " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\n", + " \"text-embedding-ada-002\",\n", + " azureEndpoint, \n", + " apiKey,\n", + " \"model-id\");\n", "}\n", "else\n", "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGenerationService(\"text-embedding-ada-002\", apiKey);\n", + " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", "}\n", "\n", "memoryBuilder.WithMemoryStore(new VolatileMemoryStore());\n", diff --git a/dotnet/notebooks/07-DALL-E-2.ipynb b/dotnet/notebooks/07-DALL-E-2.ipynb deleted file mode 100644 index 8bdd42abde6e..000000000000 --- a/dotnet/notebooks/07-DALL-E-2.ipynb +++ /dev/null @@ -1,228 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Generating images with AI\n", - "\n", - "This notebook demonstrates how to use OpenAI DALL-E 2 to generate images, in combination with other LLM features like text and embedding generation.\n", - "\n", - "Here, we use Chat Completion to generate a random image description and DALL-E 2 to create an image from that description, showing the image inline.\n", - "\n", - "Lastly, the notebook asks the user to describe the image. The embedding of the user's description is compared to the original description, using Cosine Similarity, and returning a score from 0 to 1, where 1 means exact match." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "tags": [], - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Usual setup: importing Semantic Kernel SDK and SkiaSharp, used to display images inline.\n", - "\n", - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", - "#r \"nuget: SkiaSharp, 2.88.3\"\n", - "\n", - "#!import config/Settings.cs\n", - "#!import config/Utils.cs\n", - "#!import config/SkiaUtils.cs\n", - "\n", - "using Microsoft.SemanticKernel;\n", - "using Microsoft.SemanticKernel.AI.ImageGeneration; \n", - "using Microsoft.SemanticKernel.AI.Embeddings;\n", - "using Microsoft.SemanticKernel.AI.Embeddings.VectorOperations;\n", - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Setup, using three AI services: images, text, embedding\n", - "\n", - "The notebook uses:\n", - "\n", - "* **OpenAI Dall-E 2** to transform the image description into an image\n", - "* **text-embedding-ada-002** to compare your guess against the real image description\n", - "\n", - "**Note:**: For Azure OpenAI, your endpoint should have DALL-E API enabled." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Load OpenAI credentials from config/settings.json\n", - "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", - "\n", - "// Configure the three AI features: text embedding (using Ada), text completion (using DaVinci 3), image generation (DALL-E 2)\n", - "var builder = new KernelBuilder();\n", - "\n", - "if(useAzureOpenAI)\n", - "{\n", - " builder.WithAzureTextEmbeddingGenerationService(\"text-embedding-ada-002\", azureEndpoint, apiKey);\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", - " builder.WithAzureOpenAIImageGenerationService(azureEndpoint, apiKey);\n", - "}\n", - "else\n", - "{\n", - " builder.WithOpenAITextEmbeddingGenerationService(\"text-embedding-ada-002\", apiKey, orgId);\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", - " builder.WithOpenAIImageGenerationService(apiKey, orgId);\n", - "}\n", - " \n", - "var kernel = builder.Build();\n", - "\n", - "// Get AI service instance used to generate images\n", - "var dallE = kernel.GetService();\n", - "\n", - "// Get AI service instance used to extract embedding from a text\n", - "var textEmbedding = kernel.GetService();" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Generate a (random) image with DALL-E 2\n", - "\n", - "**genImgDescription** is a Semantic Function used to generate a random image description. \n", - "The function takes in input a random number to increase the diversity of its output.\n", - "\n", - "The random image description is then given to **Dall-E 2** asking to create an image." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "tags": [], - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Create a semantic function that generate a random image description.\n", - "var genImgDescription = kernel.CreateSemanticFunction(\n", - " \"Think about an artificial object correlated to number {{$input}}. \" +\n", - " \"Describe the image with one detailed sentence. The description cannot contain numbers.\", \n", - " requestSettings: new OpenAIRequestSettings { MaxTokens = 256, Temperature = 1 });\n", - "\n", - "var random = new Random().Next(0, 200);\n", - "var imageDescriptionResult = await kernel.RunAsync($\"{random}\", genImgDescription);\n", - "var imageDescription = imageDescriptionResult.GetValue();\n", - "\n", - "// Use DALL-E 2 to generate an image. OpenAI in this case returns a URL (though you can ask to return a base64 image)\n", - "var imageUrl = await dallE.GenerateImageAsync(imageDescription.Trim(), 512, 512);\n", - "\n", - "await SkiaUtils.ShowImage(imageUrl, 512, 512);" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Let's play a guessing game\n", - "\n", - "Try to guess what the image is about, describing the content.\n", - "\n", - "You'll get a score at the end 😉" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "tags": [], - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Prompt the user to guess what the image is\n", - "var guess = await InteractiveKernel.GetInputAsync(\"Describe the image in your words\");\n", - "\n", - "// Compare user guess with real description and calculate score\n", - "var origEmbedding = await textEmbedding.GenerateEmbeddingsAsync(new List { imageDescription } );\n", - "var guessEmbedding = await textEmbedding.GenerateEmbeddingsAsync(new List { guess } );\n", - "var similarity = origEmbedding.First().Span.CosineSimilarity(guessEmbedding.First().Span);\n", - "\n", - "Console.WriteLine($\"Your description:\\n{Utils.WordWrap(guess, 90)}\\n\");\n", - "Console.WriteLine($\"Real description:\\n{Utils.WordWrap(imageDescription.Trim(), 90)}\\n\");\n", - "Console.WriteLine($\"Score: {similarity:0.00}\\n\\n\");\n", - "\n", - "//Uncomment this line to see the URL provided by OpenAI\n", - "//Console.WriteLine(imageUrl);" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".NET (C#)", - "language": "C#", - "name": ".net-csharp" - }, - "language_info": { - "file_extension": ".cs", - "mimetype": "text/x-csharp", - "name": "C#", - "pygments_lexer": "csharp", - "version": "11.0" - }, - "polyglot_notebook": { - "kernelInfo": { - "defaultKernelName": "csharp", - "items": [ - { - "aliases": [], - "name": "csharp" - } - ] - } - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/dotnet/notebooks/07-DALL-E-3.ipynb b/dotnet/notebooks/07-DALL-E-3.ipynb new file mode 100644 index 000000000000..209342bcc584 --- /dev/null +++ b/dotnet/notebooks/07-DALL-E-3.ipynb @@ -0,0 +1,242 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Generating images with AI\n", + "\n", + "This notebook demonstrates how to use OpenAI DALL-E 3 to generate images, in combination with other LLM features like text and embedding generation.\n", + "\n", + "Here, we use Chat Completion to generate a random image description and DALL-E 3 to create an image from that description, showing the image inline.\n", + "\n", + "Lastly, the notebook asks the user to describe the image. The embedding of the user's description is compared to the original description, using Cosine Similarity, and returning a score from 0 to 1, where 1 means exact match." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "tags": [], + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// Usual setup: importing Semantic Kernel SDK and SkiaSharp, used to display images inline.\n", + "\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", + "#r \"nuget: System.Numerics.Tensors, 8.0.0\"\n", + "#r \"nuget: SkiaSharp, 2.88.3\"\n", + "\n", + "#!import config/Settings.cs\n", + "#!import config/Utils.cs\n", + "#!import config/SkiaUtils.cs\n", + "\n", + "using Microsoft.SemanticKernel;\n", + "using Microsoft.SemanticKernel.TextToImage;\n", + "using Microsoft.SemanticKernel.Embeddings;\n", + "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", + "using System.Numerics.Tensors;" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Setup, using three AI services: images, text, embedding\n", + "\n", + "The notebook uses:\n", + "\n", + "* **OpenAI Dall-E 3** to transform the image description into an image\n", + "* **text-embedding-ada-002** to compare your guess against the real image description\n", + "\n", + "**Note:**: For Azure OpenAI, your endpoint should have DALL-E API enabled." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", + "\n", + "#pragma warning disable SKEXP0001, SKEXP0002, SKEXP0011, SKEXP0012\n", + "\n", + "// Load OpenAI credentials from config/settings.json\n", + "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", + "\n", + "// Configure the three AI features: text embedding (using Ada), chat completion, image generation (DALL-E 3)\n", + "var builder = Kernel.CreateBuilder();\n", + "\n", + "if(useAzureOpenAI)\n", + "{\n", + " builder.AddAzureOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAITextToImage(\"dall-e-3\", azureEndpoint, apiKey);\n", + "}\n", + "else\n", + "{\n", + " builder.AddOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", + " builder.AddOpenAITextToImage(apiKey, orgId);\n", + "}\n", + " \n", + "var kernel = builder.Build();\n", + "\n", + "// Get AI service instance used to generate images\n", + "var dallE = kernel.GetRequiredService();\n", + "\n", + "// Get AI service instance used to extract embedding from a text\n", + "var textEmbedding = kernel.GetRequiredService();" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Generate a (random) image with DALL-E 3\n", + "\n", + "**genImgDescription** is a Semantic Function used to generate a random image description. \n", + "The function takes in input a random number to increase the diversity of its output.\n", + "\n", + "The random image description is then given to **Dall-E 3** asking to create an image." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "tags": [], + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#pragma warning disable SKEXP0002\n", + "\n", + "var prompt = @\"\n", + "Think about an artificial object correlated to number {{$input}}.\n", + "Describe the image with one detailed sentence. The description cannot contain numbers.\";\n", + "\n", + "var executionSettings = new OpenAIPromptExecutionSettings \n", + "{\n", + " MaxTokens = 256,\n", + " Temperature = 1\n", + "};\n", + "\n", + "// Create a semantic function that generate a random image description.\n", + "var genImgDescription = kernel.CreateFunctionFromPrompt(prompt, executionSettings);\n", + "\n", + "var random = new Random().Next(0, 200);\n", + "var imageDescriptionResult = await kernel.InvokeAsync(genImgDescription, new() { [\"input\"] = random });\n", + "var imageDescription = imageDescriptionResult.ToString();\n", + "\n", + "// Use DALL-E 3 to generate an image. OpenAI in this case returns a URL (though you can ask to return a base64 image)\n", + "var imageUrl = await dallE.GenerateImageAsync(imageDescription.Trim(), 1024, 1024);\n", + "\n", + "await SkiaUtils.ShowImage(imageUrl, 1024, 1024);" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Let's play a guessing game\n", + "\n", + "Try to guess what the image is about, describing the content.\n", + "\n", + "You'll get a score at the end 😉" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "tags": [], + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// Prompt the user to guess what the image is\n", + "var guess = await InteractiveKernel.GetInputAsync(\"Describe the image in your words\");\n", + "\n", + "// Compare user guess with real description and calculate score\n", + "var origEmbedding = await textEmbedding.GenerateEmbeddingsAsync(new List { imageDescription } );\n", + "var guessEmbedding = await textEmbedding.GenerateEmbeddingsAsync(new List { guess } );\n", + "var similarity = TensorPrimitives.CosineSimilarity(origEmbedding.First().Span, guessEmbedding.First().Span);\n", + "\n", + "Console.WriteLine($\"Your description:\\n{Utils.WordWrap(guess, 90)}\\n\");\n", + "Console.WriteLine($\"Real description:\\n{Utils.WordWrap(imageDescription.Trim(), 90)}\\n\");\n", + "Console.WriteLine($\"Score: {similarity:0.00}\\n\\n\");\n", + "\n", + "//Uncomment this line to see the URL provided by OpenAI\n", + "//Console.WriteLine(imageUrl);" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".NET (C#)", + "language": "C#", + "name": ".net-csharp" + }, + "language_info": { + "file_extension": ".cs", + "mimetype": "text/x-csharp", + "name": "C#", + "pygments_lexer": "csharp", + "version": "11.0" + }, + "polyglot_notebook": { + "kernelInfo": { + "defaultKernelName": "csharp", + "items": [ + { + "aliases": [], + "name": "csharp" + } + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/dotnet/notebooks/08-chatGPT-with-DALL-E-2.ipynb b/dotnet/notebooks/08-chatGPT-with-DALL-E-2.ipynb deleted file mode 100644 index 532a7b640f89..000000000000 --- a/dotnet/notebooks/08-chatGPT-with-DALL-E-2.ipynb +++ /dev/null @@ -1,243 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Using ChatGPT with the Semantic Kernel featuring DALL-E 2\n", - "\n", - "This notebook shows how to make use of the new ChatCompletion API from OpenAI, popularized by ChatGPT. This API brings a new ChatML schema which is different from the TextCompletion API. While the text completion API expects input a prompt and returns a simple string, the chat completion API expects in input a Chat history and returns a new message:\n", - "\n", - "```\n", - "messages=[\n", - " { \"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n", - " { \"role\": \"user\", \"content\": \"Who won the world series in 2020?\"},\n", - " { \"role\": \"assistant\", \"content\": \"The Los Angeles Dodgers won the World Series in 2020.\"},\n", - " { \"role\": \"user\", \"content\": \"Where was it played?\"}\n", - "]\n", - "```\n", - "\n", - "Note that there are three message types:\n", - "\n", - "1. A System message is used to give instructions to the chat model, e.g. setting the context and the kind of conversation your app is offering.\n", - "2. User messages store the data received from the user of your app.\n", - "3. Assistant messages store the replies generated by the LLM model. \n", - "\n", - "Your app is responsible for adding information to the chat history and maintaining this object. The Chat Completion API is stateless, and returns only new messages, that your app can use, e.g. to execute commands, generate images, or simply continue the conversation." - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "When deciding between which one to use, know that ChatGPT models (i.e. gpt-3.5-turbo) are optimized for chat applications and have been fine-tuned for instruction-following and dialogue. As such, for creating semantic plugins with the Semantic Kernel, users may still find the TextCompletion model better suited for certain use cases.\n", - "\n", - "The code below shows how to setup SK with ChatGPT, how to manage the Chat history object, and to make things a little more interesting asks ChatGPT to reply with image descriptions instead so we can have a dialog using images, leveraging DALL-E 2 integration." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "tags": [], - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Usual setup: importing Semantic Kernel SDK and SkiaSharp, used to display images inline.\n", - "\n", - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", - "#r \"nuget: SkiaSharp, 2.88.3\"\n", - "\n", - "#!import config/Settings.cs\n", - "#!import config/Utils.cs\n", - "#!import config/SkiaUtils.cs\n", - "\n", - "using Microsoft.SemanticKernel;\n", - "using Microsoft.SemanticKernel.AI.ImageGeneration;\n", - "using Microsoft.SemanticKernel.AI.ChatCompletion;\n", - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The notebook uses:\n", - "\n", - "* **OpenAI ChatGPT** to chat with the user\n", - "* **OpenAI Dall-E 2** to transform messages into images" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Load OpenAI credentials from config/settings.json\n", - "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", - "\n", - "// Configure the two AI features: OpenAI Chat and DALL-E 2 for image generation\n", - "var builder = new KernelBuilder();\n", - "\n", - "if(useAzureOpenAI)\n", - "{\n", - " builder.WithAzureChatCompletionService(\"gpt-35-turbo\", azureEndpoint, apiKey);\n", - " builder.WithAzureOpenAIImageGenerationService(azureEndpoint, apiKey);\n", - "}\n", - "else\n", - "{\n", - " builder.WithOpenAIChatCompletionService(\"gpt-3.5-turbo\", apiKey, orgId);\n", - " builder.WithOpenAIImageGenerationService(apiKey, orgId);\n", - "}\n", - "\n", - "var kernel = builder.Build();\n", - "\n", - "// Get AI service instance used to generate images\n", - "var dallE = kernel.GetService();\n", - "\n", - "// Get AI service instance used to manage the user chat\n", - "var chatGPT = kernel.GetService();" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Chat configuration\n", - "\n", - "Before starting the chat, we create a new chat object with some instructions, which are included in the chat history. \n", - "\n", - "The instructions tell OpenAI what kind of chat we want to have, in this case we ask to reply with \"image descriptions\", so that we can chain ChatGPT with DALL-E 2." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "tags": [], - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion;\n", - "\n", - "var systemMessage = \"You're chatting with a user. Instead of replying directly to the user\"\n", - " + \" provide a description of a cartoonish image that expresses what you want to say.\"\n", - " + \" The user won't see your message, they will see only the image.\"\n", - " + \" Describe the image with details in one sentence.\";\n", - "\n", - "var chat = (OpenAIChatHistory)chatGPT.CreateNewChat(systemMessage);" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Let's chat\n", - "\n", - "Run the following code to start the chat. The chat consists of a loop with these main steps:\n", - "\n", - "1. Ask the user (you) for a message. The user enters a message. Add the user message into the Chat History object.\n", - "2. Send the chat object to AI asking to generate a response. Add the bot message into the Chat History object.\n", - "3. Show the answer to the user. In our case before showing the answer, generate an image and show that to the user too.\n", - "\n", - "*Note: to stop the chat in VS Code press ESC on the kyboard or the \"stop\" button on the left side.*" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "while (true)\n", - "{\n", - " // 1. Ask the user for a message. The user enters a message. Add the user message into the Chat History object.\n", - " var userMessage = await InteractiveKernel.GetInputAsync(\"Your message\");\n", - " Console.WriteLine($\"User: {userMessage}\");\n", - " chat.AddUserMessage(userMessage);\n", - "\n", - " // 2. Send the chat object to AI asking to generate a response. Add the bot message into the Chat History object.\n", - " string assistantReply = await chatGPT.GenerateMessageAsync(chat, new OpenAIRequestSettings());\n", - " chat.AddAssistantMessage(assistantReply);\n", - "\n", - " // 3. Show the reply as an image\n", - " Console.WriteLine($\"\\nBot:\");\n", - " var imageUrl = await dallE.GenerateImageAsync(assistantReply, 256, 256);\n", - " await SkiaUtils.ShowImage(imageUrl, 256, 256);\n", - " Console.WriteLine($\"[{assistantReply}]\\n\");\n", - "}" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".NET (C#)", - "language": "C#", - "name": ".net-csharp" - }, - "language_info": { - "file_extension": ".cs", - "mimetype": "text/x-csharp", - "name": "C#", - "pygments_lexer": "csharp", - "version": "11.0" - }, - "polyglot_notebook": { - "kernelInfo": { - "defaultKernelName": "csharp", - "items": [ - { - "aliases": [], - "name": "csharp" - } - ] - } - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/dotnet/notebooks/08-chatGPT-with-DALL-E-3.ipynb b/dotnet/notebooks/08-chatGPT-with-DALL-E-3.ipynb new file mode 100644 index 000000000000..132f663edea9 --- /dev/null +++ b/dotnet/notebooks/08-chatGPT-with-DALL-E-3.ipynb @@ -0,0 +1,247 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using ChatGPT with the Semantic Kernel featuring DALL-E 3\n", + "\n", + "This notebook shows how to make use of the new ChatCompletion API from OpenAI, popularized by ChatGPT. This API brings a new ChatML schema which is different from the TextCompletion API. While the text completion API expects input a prompt and returns a simple string, the chat completion API expects in input a Chat history and returns a new message:\n", + "\n", + "```\n", + "messages=[\n", + " { \"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n", + " { \"role\": \"user\", \"content\": \"Who won the world series in 2020?\"},\n", + " { \"role\": \"assistant\", \"content\": \"The Los Angeles Dodgers won the World Series in 2020.\"},\n", + " { \"role\": \"user\", \"content\": \"Where was it played?\"}\n", + "]\n", + "```\n", + "\n", + "Note that there are three message types:\n", + "\n", + "1. A System message is used to give instructions to the chat model, e.g. setting the context and the kind of conversation your app is offering.\n", + "2. User messages store the data received from the user of your app.\n", + "3. Assistant messages store the replies generated by the LLM model. \n", + "\n", + "Your app is responsible for adding information to the chat history and maintaining this object. The Chat Completion API is stateless, and returns only new messages, that your app can use, e.g. to execute commands, generate images, or simply continue the conversation." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When deciding between which one to use, know that ChatGPT models (i.e. gpt-3.5-turbo) are optimized for chat applications and have been fine-tuned for instruction-following and dialogue. As such, for creating semantic plugins with the Semantic Kernel, users may still find the TextCompletion model better suited for certain use cases.\n", + "\n", + "The code below shows how to setup SK with ChatGPT, how to manage the Chat history object, and to make things a little more interesting asks ChatGPT to reply with image descriptions instead so we can have a dialog using images, leveraging DALL-E 3 integration." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "tags": [], + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// Usual setup: importing Semantic Kernel SDK and SkiaSharp, used to display images inline.\n", + "\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", + "#r \"nuget: SkiaSharp, 2.88.3\"\n", + "\n", + "#!import config/Settings.cs\n", + "#!import config/Utils.cs\n", + "#!import config/SkiaUtils.cs\n", + "\n", + "using Microsoft.SemanticKernel;\n", + "using Microsoft.SemanticKernel.TextToImage;\n", + "using Microsoft.SemanticKernel.ChatCompletion;\n", + "using Microsoft.SemanticKernel.Connectors.OpenAI;" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The notebook uses:\n", + "\n", + "* **OpenAI ChatGPT** to chat with the user\n", + "* **OpenAI Dall-E 3** to transform messages into images" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", + "\n", + "#pragma warning disable SKEXP0002, SKEXP0012\n", + "\n", + "// Load OpenAI credentials from config/settings.json\n", + "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", + "\n", + "// Configure the two AI features: OpenAI Chat and DALL-E 3 for image generation\n", + "var builder = Kernel.CreateBuilder();\n", + "\n", + "if(useAzureOpenAI)\n", + "{\n", + " builder.AddAzureOpenAIChatCompletion(\"gpt-35-turbo\", azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAITextToImage(\"dall-e-3\", azureEndpoint, apiKey);\n", + "}\n", + "else\n", + "{\n", + " builder.AddOpenAIChatCompletion(\"gpt-3.5-turbo\", apiKey, orgId);\n", + " builder.AddOpenAITextToImage(apiKey, orgId);\n", + "}\n", + "\n", + "var kernel = builder.Build();\n", + "\n", + "// Get AI service instance used to generate images\n", + "var dallE = kernel.GetRequiredService();\n", + "\n", + "// Get AI service instance used to manage the user chat\n", + "var chatGPT = kernel.GetRequiredService();" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Chat configuration\n", + "\n", + "Before starting the chat, we create a new chat object with some instructions, which are included in the chat history. \n", + "\n", + "The instructions tell OpenAI what kind of chat we want to have, in this case we ask to reply with \"image descriptions\", so that we can chain ChatGPT with DALL-E 3." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "tags": [], + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var systemMessage = \"You're chatting with a user. Instead of replying directly to the user\"\n", + " + \" provide a description of a cartoonish image that expresses what you want to say.\"\n", + " + \" The user won't see your message, they will see only the image.\"\n", + " + \" Describe the image with details in one sentence.\";\n", + "\n", + "var chat = new ChatHistory(systemMessage);" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Let's chat\n", + "\n", + "Run the following code to start the chat. The chat consists of a loop with these main steps:\n", + "\n", + "1. Ask the user (you) for a message. The user enters a message. Add the user message into the Chat History object.\n", + "2. Send the chat object to AI asking to generate a response. Add the bot message into the Chat History object.\n", + "3. Show the answer to the user. In our case before showing the answer, generate an image and show that to the user too.\n", + "\n", + "*Note: to stop the chat in VS Code press ESC on the kyboard or the \"stop\" button on the left side.*" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#pragma warning disable SKEXP0002\n", + "\n", + "while (true)\n", + "{\n", + " // 1. Ask the user for a message. The user enters a message. Add the user message into the Chat History object.\n", + " var userMessage = await InteractiveKernel.GetInputAsync(\"Your message\");\n", + " Console.WriteLine($\"User: {userMessage}\");\n", + " chat.AddUserMessage(userMessage);\n", + "\n", + " // 2. Send the chat object to AI asking to generate a response. Add the bot message into the Chat History object.\n", + " var assistantReply = await chatGPT.GetChatMessageContentAsync(chat, new OpenAIPromptExecutionSettings());\n", + " chat.AddAssistantMessage(assistantReply.Content);\n", + "\n", + " // 3. Show the reply as an image\n", + " Console.WriteLine($\"\\nBot:\");\n", + " var imageUrl = await dallE.GenerateImageAsync(assistantReply.Content, 1024, 1024);\n", + " await SkiaUtils.ShowImage(imageUrl, 1024, 1024);\n", + " Console.WriteLine($\"[{assistantReply}]\\n\");\n", + "}" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".NET (C#)", + "language": "C#", + "name": ".net-csharp" + }, + "language_info": { + "file_extension": ".cs", + "mimetype": "text/x-csharp", + "name": "C#", + "pygments_lexer": "csharp", + "version": "11.0" + }, + "polyglot_notebook": { + "kernelInfo": { + "defaultKernelName": "csharp", + "items": [ + { + "aliases": [], + "name": "csharp" + } + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/dotnet/notebooks/09-memory-with-chroma.ipynb b/dotnet/notebooks/09-memory-with-chroma.ipynb index 9e1257fd3121..65e7b67a0a94 100644 --- a/dotnet/notebooks/09-memory-with-chroma.ipynb +++ b/dotnet/notebooks/09-memory-with-chroma.ipynb @@ -9,10 +9,10 @@ "\n", "In this notebook, we show how to use [Chroma](https://www.trychroma.com/) with Semantic Kernel to create even more\n", "intelligent applications. We assume that you are already familiar with the concepts of Semantic Kernel\n", - "and memory. [Previously](04-context-variables-chat.ipynb), we have used `context variables` to pass\n", + "and memory. [Previously](04-kernel-arguments-chat.ipynb), we have used `kernel arguments` to pass\n", "additional text into prompts, enriching them with more context for a basic chat experience.\n", "\n", - "However, relying solely on context variables has its limitations, such as the model's token limit.\n", + "However, relying solely on kernel arguments has its limitations, such as the model's token limit.\n", "To overcome these limitations, we will use **SK Semantic Memory**, leveraging Chroma as a persistent\n", "Semantic Memory Storage.\n", "\n", @@ -38,8 +38,9 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Connectors.Memory.Chroma, 1.0.0-beta1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Connectors.Chroma, 1.0.1-alpha\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Memory, 1.0.1-alpha\"\n", "#r \"nuget: System.Linq.Async, 6.0.1\"\n", "\n", "#!import config/Settings.cs\n", @@ -49,21 +50,22 @@ "using System.Linq;\n", "using System.Threading.Tasks;\n", "using Microsoft.SemanticKernel;\n", - "using Microsoft.SemanticKernel.Connectors.Memory.Chroma;\n", + "using Microsoft.SemanticKernel.Connectors.Chroma;\n", "using Microsoft.SemanticKernel.Memory;\n", "using Microsoft.SemanticKernel.Plugins.Memory;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", - "var kernelBuilder = new KernelBuilder();\n", + "var builder = Kernel.CreateBuilder();\n", "\n", "// Configure AI backend used by the kernel\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", "if (useAzureOpenAI)\n", - " kernelBuilder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", "else\n", - " kernelBuilder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", "\n", - "var kernel = kernelBuilder.Build();" + "var kernel = builder.Build();" ] }, { @@ -98,7 +100,9 @@ }, "outputs": [], "source": [ - "using Microsoft.SemanticKernel.Connectors.AI.OpenAI;\n", + "#pragma warning disable SKEXP0003, SKEXP0011, SKEXP0022, SKEXP0052\n", + "\n", + "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", "\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", @@ -106,11 +110,11 @@ "\n", "if (useAzureOpenAI)\n", "{\n", - " memoryBuilder.WithAzureTextEmbeddingGenerationService(\"text-embedding-ada-002\", azureEndpoint, apiKey);\n", + " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", azureEndpoint, apiKey, \"model-id\");\n", "}\n", "else\n", "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGenerationService(\"text-embedding-ada-002\", apiKey);\n", + " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", "}\n", "\n", "var chromaMemoryStore = new ChromaMemoryStore(\"http://127.0.0.1:8000\");\n", @@ -212,8 +216,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Let's now revisit our chat sample from the [previous notebook](04-context-variables-chat.ipynb).\n", - "If you remember, we used context variables to fill the prompt with a `history` that continuously got populated as we chatted with the bot. Let's add also memory to it!" + "Let's now revisit our chat sample from the [previous notebook](04-kernel-arguments-chat.ipynb).\n", + "If you remember, we used kernel arguments to fill the prompt with a `history` that continuously got populated as we chatted with the bot. Let's add also memory to it!" ] }, { @@ -240,10 +244,10 @@ }, "outputs": [], "source": [ - "using Microsoft.SemanticKernel.Plugins.Memory;\n", + "#pragma warning disable SKEXP0052\n", "\n", "// TextMemoryPlugin provides the \"recall\" function\n", - "kernel.ImportFunctions(new TextMemoryPlugin(memory));" + "kernel.ImportPluginFromObject(new TextMemoryPlugin(memory));" ] }, { @@ -275,7 +279,7 @@ "User: {{$userInput}}\n", "ChatBot: \";\n", "\n", - "var chatFunction = kernel.CreateSemanticFunction(skPrompt, requestSettings: new OpenAIRequestSettings { MaxTokens = 200, Temperature = 0.8 });" + "var chatFunction = kernel.CreateFunctionFromPrompt(skPrompt, new OpenAIPromptExecutionSettings { MaxTokens = 200, Temperature = 0.8 });" ] }, { @@ -299,16 +303,19 @@ }, "outputs": [], "source": [ - "var context = kernel.CreateNewContext();\n", + "#pragma warning disable SKEXP0052\n", + "\n", + "var arguments = new KernelArguments();\n", "\n", - "context.Variables[\"fact1\"] = \"what is my name?\";\n", - "context.Variables[\"fact2\"] = \"where do I live?\";\n", - "context.Variables[\"fact3\"] = \"where is my family from?\";\n", - "context.Variables[\"fact4\"] = \"where have I travelled?\";\n", - "context.Variables[\"fact5\"] = \"what do I do for work?\";\n", + "arguments[\"fact1\"] = \"what is my name?\";\n", + "arguments[\"fact2\"] = \"where do I live?\";\n", + "arguments[\"fact3\"] = \"where is my family from?\";\n", + "arguments[\"fact4\"] = \"where have I travelled?\";\n", + "arguments[\"fact5\"] = \"what do I do for work?\";\n", "\n", - "context.Variables[TextMemoryPlugin.CollectionParam] = MemoryCollectionName;\n", - "context.Variables[TextMemoryPlugin.RelevanceParam] = \"0.6\";" + "arguments[TextMemoryPlugin.CollectionParam] = MemoryCollectionName;\n", + "arguments[TextMemoryPlugin.LimitParam] = \"2\";\n", + "arguments[TextMemoryPlugin.RelevanceParam] = \"0.8\";" ] }, { @@ -333,20 +340,22 @@ "outputs": [], "source": [ "var history = \"\";\n", - "context.Variables[\"history\"] = history;\n", + "arguments[\"history\"] = history;\n", "Func Chat = async (string input) => {\n", - " // Save new message in the context variables\n", - " context.Variables[\"userInput\"] = input;\n", + " // Save new message in the kernel arguments\n", + " arguments[\"userInput\"] = input;\n", "\n", " // Process the user message and get an answer\n", - " var answer = await chatFunction.InvokeAsync(context);\n", + " var answer = await chatFunction.InvokeAsync(kernel, arguments);\n", "\n", " // Append the new interaction to the chat history\n", - " history += $\"\\nUser: {input}\\nChatBot: {answer.GetValue()}\\n\";\n", - " context.Variables[\"history\"] = history;\n", + " var result = $\"\\nUser: {input}\\nChatBot: {answer}\\n\";\n", + "\n", + " history += result;\n", + " arguments[\"history\"] = history;\n", " \n", " // Show the bot response\n", - " Console.WriteLine(\"ChatBot: \" + context);\n", + " Console.WriteLine(result);\n", "};" ] }, @@ -437,10 +446,6 @@ " = \"Sample demonstrating how to create a chat plugin interfacing with ChatGPT\",\n", " [\"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs\"]\n", " = \"C# class that defines a volatile embedding store\",\n", - " [\"https://github.com/microsoft/semantic-kernel/tree/main/samples/dotnet/KernelHttpServer/README.md\"]\n", - " = \"README: How to set up a Semantic Kernel Service API using Azure Function Runtime v4\",\n", - " [\"https://github.com/microsoft/semantic-kernel/tree/main/samples/apps/chat-summary-webapp-react/README.md\"]\n", - " = \"README: README associated with a sample starter react-based chat summary webapp\",\n", "};" ] }, @@ -465,15 +470,17 @@ }, "outputs": [], "source": [ + "#pragma warning disable SKEXP0003, SKEXP0011, SKEXP0022, SKEXP0052\n", + "\n", "var memoryBuilder = new MemoryBuilder();\n", "\n", "if (useAzureOpenAI)\n", "{\n", - " memoryBuilder.WithAzureTextEmbeddingGenerationService(\"text-embedding-ada-002\", azureEndpoint, apiKey);\n", + " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", azureEndpoint, apiKey, \"model-id\");\n", "}\n", "else\n", "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGenerationService(\"text-embedding-ada-002\", apiKey);\n", + " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", "}\n", "\n", "var chromaMemoryStore = new ChromaMemoryStore(\"http://127.0.0.1:8000\");\n", diff --git a/dotnet/notebooks/10-BingSearch-using-kernel.ipynb b/dotnet/notebooks/10-BingSearch-using-kernel.ipynb index 17dbafa49f86..0f69120e665f 100644 --- a/dotnet/notebooks/10-BingSearch-using-kernel.ipynb +++ b/dotnet/notebooks/10-BingSearch-using-kernel.ipynb @@ -35,29 +35,28 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.0.0-beta1\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Plugins.Web, 1.0.0-beta1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.0.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Web, 1.0.1-alpha\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Core, 1.0.1-alpha\"\n", "\n", "#!import config/Settings.cs\n", "#!import config/Utils.cs\n", "\n", "using Microsoft.SemanticKernel;\n", "using Microsoft.SemanticKernel.Plugins.Core;\n", - "using Microsoft.SemanticKernel.Orchestration;\n", - "using Microsoft.SemanticKernel.Planning;\n", - "using Microsoft.SemanticKernel.Planners;\n", "using Microsoft.SemanticKernel.TemplateEngine;\n", "using InteractiveKernel = Microsoft.DotNet.Interactive.Kernel;\n", + "using Kernel = Microsoft.SemanticKernel.Kernel;\n", "\n", - "var builder = new KernelBuilder();\n", + "var builder = Kernel.CreateBuilder();\n", "\n", "// Configure AI backend used by the kernel\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", "if (useAzureOpenAI)\n", - " builder.WithAzureChatCompletionService(model, azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(model, azureEndpoint, apiKey);\n", "else\n", - " builder.WithOpenAIChatCompletionService(model, apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(model, apiKey, orgId);\n", "\n", "var kernel = builder.Build();" ] @@ -134,19 +133,18 @@ }, "outputs": [], "source": [ - "private static async Task Example1Async(IKernel kernel)\n", + "private static async Task Example1Async(Microsoft.SemanticKernel.Kernel kernel)\n", "{\n", " Console.WriteLine(\"Example 1\");\n", "\n", " // Run \n", " var question = \"What is quantum tunnelling\";\n", - " var function = kernel.Functions.GetFunction(\"bing\", \"search\");\n", - " var bingResult = await kernel.RunAsync(question, function);\n", - " var bingResultString = bingResult.GetValue();\n", + " var function = kernel.Plugins[\"bing\"][\"search\"];\n", + " var bingResult = await kernel.InvokeAsync(function, new() { [\"query\"] = question });\n", "\n", " Console.WriteLine(question);\n", " Console.WriteLine(\"----\");\n", - " Console.WriteLine(bingResultString);\n", + " Console.WriteLine(bingResult);\n", " Console.WriteLine();\n", "\n", " /* OUTPUT:\n", @@ -175,29 +173,20 @@ }, "outputs": [], "source": [ - "private static async Task Example2Async(IKernel kernel)\n", + "private static async Task Example2Async(Microsoft.SemanticKernel.Kernel kernel)\n", "{\n", " Console.WriteLine(\"Example 2\");\n", "\n", " //The following function only works in interactive notebooks\n", " string question = await InteractiveKernel.GetInputAsync(\"Please ask your question\"); \n", "\n", - " var function = kernel.Functions.GetFunction(\"bing\", \"search\");\n", - " var bingResult = await kernel.RunAsync(question, function);\n", - " var bingResultString = bingResult.GetValue();\n", + " var function = kernel.Plugins[\"bing\"][\"search\"];\n", + " var bingResult = await kernel.InvokeAsync(function, new() { [\"query\"] = question });\n", "\n", - " Console.WriteLine(bingResultString);\n", + " Console.WriteLine(bingResult);\n", "}" ] }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Just uncomment the examples to run them in the following code:" - ] - }, { "cell_type": "code", "execution_count": null, @@ -211,13 +200,15 @@ }, "outputs": [], "source": [ + "#pragma warning disable SKEXP0054\n", + "\n", "// Load Bing plugin\n", "var bingConnector = new BingConnector(BING_KEY);\n", "\n", - "kernel.ImportFunctions(new WebSearchEnginePlugin(bingConnector), \"bing\");\n", + "kernel.ImportPluginFromObject(new WebSearchEnginePlugin(bingConnector), \"bing\");\n", "\n", - "//await Example1Async(kernel);\n", - "//await Example2Async(kernel);" + "await Example1Async(kernel);\n", + "await Example2Async(kernel);" ] } ], diff --git a/dotnet/notebooks/README.md b/dotnet/notebooks/README.md index b7df44d5b309..0f6eaf207608 100644 --- a/dotnet/notebooks/README.md +++ b/dotnet/notebooks/README.md @@ -5,10 +5,10 @@ the Semantic Kernel. The notebooks are organized in order of increasing complexi To run the notebooks, we recommend the following steps: -- [Install .NET 7](https://dotnet.microsoft.com/download/dotnet/7.0) +- [Install .NET 8](https://dotnet.microsoft.com/download/dotnet/8.0) - [Install Visual Studio Code (VS Code)](https://code.visualstudio.com) - Launch VS Code and [install the "Polyglot" extension](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.dotnet-interactive-vscode). - Min version required: v1.0.4102020 (Feb 2022). + Min version required: v1.0.4606021 (Dec 2023). The steps above should be sufficient, you can now **open all the C# notebooks in VS Code**. @@ -57,18 +57,20 @@ For a quick dive, look at the [getting started notebook](00-getting-started.ipyn 1. [Loading and configuring Semantic Kernel](01-basic-loading-the-kernel.ipynb) 2. [Running AI prompts from file](02-running-prompts-from-file.ipynb) 3. [Creating Semantic Functions at runtime (i.e. inline functions)](03-semantic-function-inline.ipynb) -4. [Using Context Variables to Build a Chat Experience](04-context-variables-chat.ipynb) +4. [Using Kernel Arguments to Build a Chat Experience](04-kernel-arguments-chat.ipynb) 5. [Creating and Executing Plans](05-using-the-planner.ipynb) 6. [Building Memory with Embeddings](06-memory-and-embeddings.ipynb) -7. [Creating images with DALL-E 2](07-DALL-E-2.ipynb) -8. [Chatting with ChatGPT and Images](08-chatGPT-with-DALL-E-2.ipynb) +7. [Creating images with DALL-E 3](07-DALL-E-3.ipynb) +8. [Chatting with ChatGPT and Images](08-chatGPT-with-DALL-E-3.ipynb) +9. [Building Semantic Memory with Chroma](09-memory-with-chroma.ipynb) +10. [BingSearch using Kernel](10-BingSearch-using-kernel.ipynb) # Run notebooks in the browser with JupyterLab You can run the notebooks also in the browser with JupyterLab. These steps should be sufficient to start: -Install Python 3, Pip and .NET 7 in your system, then: +Install Python 3, Pip and .NET 8 in your system, then: pip install jupyterlab dotnet tool install -g Microsoft.dotnet-interactive diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props index 1b1145588e47..25eda02953c0 100644 --- a/dotnet/nuget/nuget-package.props +++ b/dotnet/nuget/nuget-package.props @@ -1,11 +1,21 @@ - 1.0.0-beta4 + 1.3.1 + + $(VersionPrefix)-$(VersionSuffix) + $(VersionPrefix) Debug;Release;Publish true + + 1.3.0 + + $(NoWarn);CP0003 + + $(NoWarn);CP1002 + Microsoft Microsoft diff --git a/dotnet/samples/.editorconfig b/dotnet/samples/.editorconfig new file mode 100644 index 000000000000..7fb69b748935 --- /dev/null +++ b/dotnet/samples/.editorconfig @@ -0,0 +1,25 @@ +# Setting errors for SDK projects under samples folder +[*.cs] +dotnet_diagnostic.CA2007.severity = error # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = error # Use .ConfigureAwait(bool) +dotnet_diagnostic.IDE1006.severity = error # Naming rule violations +dotnet_diagnostic.RCS1110.severity = none # Declare type inside namespace +dotnet_diagnostic.CA2201.severity = none # Exception is not sufficiently specific +dotnet_diagnostic.CS1998.severity = none # Async method lacks 'await' operators and will run synchronously +dotnet_diagnostic.CA1851.severity = none # Possible multiple enumerations of 'IEnumerable' collection +dotnet_diagnostic.CA1819.severity = none # Properties should not return arrays +dotnet_diagnostic.CA1812.severity = none # Avoid uninstantiated internal classes +dotnet_diagnostic.VSTHRD002.severity = none # Avoid problematic synchronous waits +dotnet_diagnostic.CS1587.severity = none # XML comment is not placed on a valid language element +dotnet_diagnostic.CA1031.severity = none # Do not catch general exception types +dotnet_diagnostic.CA2000.severity = none # Dispose objects before losing scope +dotnet_diagnostic.RCS1110.severity = none # Declare type inside namespace +dotnet_diagnostic.CA5394.severity = none # Do not use insecure randomness + +# Resharper disabled rules: https://www.jetbrains.com/help/resharper/Reference__Code_Inspections_CSHARP.html#CodeSmell +resharper_condition_is_always_true_or_false_according_to_nullable_api_contract_highlighting = none # ConditionIsAlwaysTrueOrFalseAccordingToNullableAPIContract +resharper_inconsistent_naming_highlighting = none # InconsistentNaming +resharper_equal_expression_comparison_highlighting = none # EqualExpressionComparison +resharper_check_namespace_highlighting = none # CheckNamespace +resharper_arrange_object_creation_when_type_not_evident_highlighting = none # Disable "Arrange object creation when type is not evident" highlighting +resharper_arrange_this_qualifier_highlighting = none # Disable "Arrange 'this.' qualifier" highlighting \ No newline at end of file diff --git a/dotnet/samples/ApplicationInsightsExample/ApplicationInsightsExample.csproj b/dotnet/samples/ApplicationInsightsExample/ApplicationInsightsExample.csproj deleted file mode 100644 index 3957425441f5..000000000000 --- a/dotnet/samples/ApplicationInsightsExample/ApplicationInsightsExample.csproj +++ /dev/null @@ -1,31 +0,0 @@ - - - - net6.0 - LatestMajor - Exe - 10 - enable - disable - false - - CA1050;CA1707;CA2007;VSTHRD111 - 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 - - - - - - - - - - - - - - - - - - diff --git a/dotnet/samples/ApplicationInsightsExample/Program.cs b/dotnet/samples/ApplicationInsightsExample/Program.cs deleted file mode 100644 index 0432ef34cb83..000000000000 --- a/dotnet/samples/ApplicationInsightsExample/Program.cs +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Threading.Tasks; -using Microsoft.ApplicationInsights; -using Microsoft.ApplicationInsights.DataContracts; -using Microsoft.ApplicationInsights.Extensibility; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.ApplicationInsights; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Planners; -using Microsoft.SemanticKernel.Planning; -using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.Plugins.Web; -using Microsoft.SemanticKernel.Plugins.Web.Bing; -using NCalcPlugins; - -/// -/// Example of telemetry in Semantic Kernel using Application Insights within console application. -/// -public sealed class Program -{ - /// - /// Log level to be used by . - /// - /// - /// is set by default. - /// will enable logging with more detailed information, including sensitive data. Should not be used in production. - /// - private const LogLevel MinLogLevel = LogLevel.Information; - - /// - /// The main entry point for the application. - /// - /// A representing the asynchronous operation. - public static async Task Main() - { - var serviceProvider = GetServiceProvider(); - - var telemetryClient = serviceProvider.GetRequiredService(); - var loggerFactory = serviceProvider.GetRequiredService(); - - using var meterListener = new MeterListener(); - using var activityListener = new ActivityListener(); - - ConfigureMetering(meterListener, telemetryClient); - ConfigureTracing(activityListener, telemetryClient); - - var kernel = GetKernel(loggerFactory); - var planner = GetSequentialPlanner(kernel, loggerFactory); - - try - { - using var operation = telemetryClient.StartOperation("ApplicationInsights.Example"); - - Console.WriteLine("Operation/Trace ID:"); - Console.WriteLine(Activity.Current?.TraceId); - - var plan = await planner.CreatePlanAsync("Write a poem about John Doe, then translate it into Italian."); - - Console.WriteLine("Original plan:"); - Console.WriteLine(plan.ToPlanString()); - - var result = await kernel.RunAsync(plan); - - Console.WriteLine("Result:"); - Console.WriteLine(result.GetValue()); - } - finally - { - // Explicitly call Flush() followed by sleep is required in console apps. - // This is to ensure that even if application terminates, telemetry is sent to the back-end. - telemetryClient.Flush(); - await Task.Delay(5000); - } - } - - private static ServiceProvider GetServiceProvider() - { - var services = new ServiceCollection(); - - ConfigureApplicationInsightsTelemetry(services); - - return services.BuildServiceProvider(); - } - - private static void ConfigureApplicationInsightsTelemetry(ServiceCollection services) - { - string instrumentationKey = Env.Var("ApplicationInsights__InstrumentationKey"); - - services.AddLogging(loggingBuilder => - { - loggingBuilder.AddFilter(logLevel => logLevel == MinLogLevel); - loggingBuilder.SetMinimumLevel(MinLogLevel); - }); - - services.AddApplicationInsightsTelemetryWorkerService(options => - { - options.ConnectionString = $"InstrumentationKey={instrumentationKey}"; - }); - } - - private static IKernel GetKernel(ILoggerFactory loggerFactory) - { - var folder = RepoFiles.SamplePluginsPath(); - var bingConnector = new BingConnector(Env.Var("Bing__ApiKey")); - var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); - - var kernel = new KernelBuilder() - .WithLoggerFactory(loggerFactory) - .WithAzureChatCompletionService( - Env.Var("AzureOpenAI__ChatDeploymentName"), - Env.Var("AzureOpenAI__Endpoint"), - Env.Var("AzureOpenAI__ApiKey")) - .Build(); - - kernel.ImportSemanticFunctionsFromDirectory(folder, "SummarizePlugin", "WriterPlugin"); - - kernel.ImportFunctions(webSearchEnginePlugin, "WebSearch"); - kernel.ImportFunctions(new LanguageCalculatorPlugin(kernel), "advancedCalculator"); - kernel.ImportFunctions(new TimePlugin(), "time"); - - return kernel; - } - - private static ISequentialPlanner GetSequentialPlanner( - IKernel kernel, - ILoggerFactory loggerFactory, - int maxTokens = 1024) - { - var plannerConfig = new SequentialPlannerConfig { MaxTokens = maxTokens }; - - return new SequentialPlanner(kernel, plannerConfig).WithInstrumentation(loggerFactory); - } - - private static IActionPlanner GetActionPlanner( - IKernel kernel, - ILoggerFactory loggerFactory) - { - return new ActionPlanner(kernel).WithInstrumentation(loggerFactory); - } - - private static IStepwisePlanner GetStepwisePlanner( - IKernel kernel, - ILoggerFactory loggerFactory, - int minIterationTimeMs = 1500, - int maxTokens = 2000) - { - var plannerConfig = new StepwisePlannerConfig - { - MinIterationTimeMs = minIterationTimeMs, - MaxTokens = maxTokens - }; - - return new StepwisePlanner(kernel, plannerConfig).WithInstrumentation(loggerFactory); - } - - /// - /// Example of metering configuration in Application Insights - /// using to attach for recordings. - /// - /// Instance of for metering configuration. - /// Instance of Application Insights . - private static void ConfigureMetering(MeterListener meterListener, TelemetryClient telemetryClient) - { - meterListener.InstrumentPublished = (instrument, listener) => - { - // Subscribe to all metrics in Semantic Kernel - if (instrument.Meter.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal)) - { - listener.EnableMeasurementEvents(instrument); - } - }; - - meterListener.SetMeasurementEventCallback(GetMeasurementCallback(telemetryClient)); - meterListener.SetMeasurementEventCallback(GetMeasurementCallback(telemetryClient)); - - meterListener.Start(); - } - - /// - /// The callback which can be used to get measurement recording. - /// - /// Instance of Application Insights . - private static MeasurementCallback GetMeasurementCallback(TelemetryClient telemetryClient) where T : struct - { - return (instrument, measurement, tags, state) => - { - telemetryClient.GetMetric(instrument.Name).TrackValue(measurement); - }; - } - - /// - /// Example of advanced distributed tracing configuration in Application Insights - /// using to attach for events. - /// - /// Instance of for tracing configuration. - /// Instance of Application Insights . - private static void ConfigureTracing(ActivityListener activityListener, TelemetryClient telemetryClient) - { - var operations = new ConcurrentDictionary>(); - - // For more detailed tracing we need to attach Activity entity to Application Insights operation manually. - void activityStarted(Activity activity) - { - var operation = telemetryClient.StartOperation(activity); - operation.Telemetry.Type = activity.Kind.ToString(); - - operations.TryAdd(activity.TraceId.ToString(), operation); - } - - // We also need to manually stop Application Insights operation when Activity entity is stopped. - void activityStopped(Activity activity) - { - if (operations.TryRemove(activity.TraceId.ToString(), out var operation)) - { - telemetryClient.StopOperation(operation); - } - } - - // Subscribe to all traces in Semantic Kernel - activityListener.ShouldListenTo = - activitySource => activitySource.Name.StartsWith("Microsoft.SemanticKernel", StringComparison.Ordinal); - - activityListener.Sample = (ref ActivityCreationOptions _) => ActivitySamplingResult.AllData; - activityListener.SampleUsingParentId = (ref ActivityCreationOptions _) => ActivitySamplingResult.AllData; - activityListener.ActivityStarted = activityStarted; - activityListener.ActivityStopped = activityStopped; - - ActivitySource.AddActivityListener(activityListener); - } -} diff --git a/dotnet/samples/CreateChatGptPlugin/.editorconfig b/dotnet/samples/CreateChatGptPlugin/.editorconfig new file mode 100644 index 000000000000..39b98ac3a778 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/.editorconfig @@ -0,0 +1,2 @@ +[*.cs] +dotnet_diagnostic.CA1016.severity = none \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/.gitignore b/dotnet/samples/CreateChatGptPlugin/MathPlugin/.gitignore new file mode 100644 index 000000000000..6491a696b6e8 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/.gitignore @@ -0,0 +1,4 @@ +**/bin +**/obj +local.settings.json +azure-function/appsettings.json \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/README.md b/dotnet/samples/CreateChatGptPlugin/MathPlugin/README.md new file mode 100644 index 000000000000..aa75b0475df1 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/README.md @@ -0,0 +1,54 @@ +# Semantic Kernel OpenAI plugin starter + +This project provides starter code to create a OpenAI plugin. It includes the following components: + +- An endpoint that serves up an ai-plugin.json file for ChatGPT to discover the plugin +- A generator that automatically converts prompts into prompt endpoints +- The ability to add additional native functions as endpoints to the plugin + +## Prerequisites + +- [.NET 6](https://dotnet.microsoft.com/download/dotnet/8.0) is required to run this starter. +- [Azure Functions Core Tools](https://www.npmjs.com/package/azure-functions-core-tools) is required to run this starter. +- Install the recommended extensions +- [C#](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.csharp) +- [Semantic Kernel Tools](https://marketplace.visualstudio.com/items?itemName=ms-semantic-kernel.semantic-kernel) (optional) + +## Configuring the starter + +To configure the starter, you need to provide the following information: + +- Define the properties of the plugin in the appsettings.json file. +- Enter the API key for your AI endpoint in the [local.settings.json](./azure-function/local.settings.json.example) file. + +### Using appsettings.json + +Configure an OpenAI endpoint + +1. Copy [settings.json.openai-example](./azure-function/config-samples/appsettings.json.openai-example) to `./appsettings.json` +1. Edit the `kernel` object to add your OpenAI endpoint configuration +1. Edit the `aiPlugin` object to define the properties that get exposed in the ai-plugin.json file + +Configure an Azure OpenAI endpoint + +1. Copy [settings.json.azure-example](./azure-function/config-samples/appsettings.json.azure-example) to `./appsettings.json` +1. Edit the `kernel` object to add your Azure OpenAI endpoint configuration +1. Edit the `aiPlugin` object to define the properties that get exposed in the ai-plugin.json file + +### Using local.settings.json + +1. Copy [local.settings.json.example](./azure-function/local.settings.json.example) to `./azure-function/local.settings.json` +1. Edit the `Values` object to add your OpenAI endpoint configuration in the `apiKey` property + +## Running the starter + +To run the Azure Functions application just hit `F5`. + +To build and run the Azure Functions application from a terminal use the following commands: + +```powershell +cd azure-function +dotnet build +cd bin/Debug/net8.0 +func host start +``` diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/AIPluginJson.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/AIPluginJson.cs new file mode 100644 index 000000000000..3c6fae77e99f --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/AIPluginJson.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Models; + +public class AIPluginJson +{ + [Function("GetAIPluginJson")] + public HttpResponseData Run([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = ".well-known/ai-plugin.json")] HttpRequestData req) + { + var currentDomain = $"{req.Url.Scheme}://{req.Url.Host}:{req.Url.Port}"; + + HttpResponseData response = req.CreateResponse(HttpStatusCode.OK); + response.Headers.Add("Content-Type", "application/json"); + + var appSettings = AppSettings.LoadSettings(); + + // serialize app settings to json using System.Text.Json + var json = System.Text.Json.JsonSerializer.Serialize(appSettings.AIPlugin); + + // replace {url} with the current domain + json = json.Replace("{url}", currentDomain, StringComparison.OrdinalIgnoreCase); + + response.WriteString(json); + + return response; + } +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Directory.Build.props b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Directory.Build.props new file mode 100644 index 000000000000..607fdf28db46 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Directory.Build.props @@ -0,0 +1,33 @@ + + + + true + true + AllEnabledByDefault + latest + true + 11 + enable + disable + CS1591,CA1852,CA1050 + + + + + disable + + + + true + full + + + + portable + + + + $([System.IO.Path]::GetDirectoryName($([MSBuild]::GetPathOfFileAbove('.gitignore', '$(MSBuildThisFileDirectory)')))) + + + \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Directory.Build.targets b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Directory.Build.targets new file mode 100644 index 000000000000..6f85f5df3943 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Directory.Build.targets @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + <_Parameter1 Condition="'%(InternalsVisibleTo.PublicKey)' != ''">%(InternalsVisibleTo.Identity), PublicKey="%(InternalsVisibleTo.PublicKey) + <_Parameter1 Condition="'%(InternalsVisibleTo.PublicKey)' == ''">%(InternalsVisibleTo.Identity) + <_Parameter1_TypeName>System.String + + + + \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Extensions/AIPluginRunner.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Extensions/AIPluginRunner.cs new file mode 100644 index 000000000000..eb7be4213020 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Extensions/AIPluginRunner.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using System.Reflection; +using System.Text.Json; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; + +namespace Plugins.AzureFunctions.Extensions; + +public class AIPluginRunner +{ + private readonly ILogger _logger; + private readonly Kernel _kernel; + + public AIPluginRunner(Kernel kernel, ILoggerFactory loggerFactory) + { + this._kernel = kernel; + this._logger = loggerFactory.CreateLogger(); + } + + /// + /// Runs a prompt using the operationID and returns back an HTTP response. + /// + /// + /// + /// + public async Task RunAIPluginOperationAsync(HttpRequestData req, string pluginName, string functionName) + { + KernelArguments arguments = ConvertToKernelArguments((await JsonSerializer.DeserializeAsync(req.Body).ConfigureAwait(true))!); + + var response = req.CreateResponse(HttpStatusCode.OK); + response.Headers.Add("Content-Type", "text/plain;charset=utf-8"); + await response.WriteStringAsync( + (await this._kernel.InvokeAsync(pluginName, functionName, arguments).ConfigureAwait(false)).ToString() + ).ConfigureAwait(false); + return response; + } + + // Method to convert model to dictionary + private static KernelArguments ConvertToKernelArguments(T model) + { + { + var arguments = new KernelArguments(); + foreach (PropertyInfo property in typeof(T).GetProperties()) + { + if (property.GetValue(model) != null) + { + arguments.Add(property.Name, property.GetValue(model)); + } + } + return arguments; + } + } +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Extensions/KernelBuilderExtensions.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Extensions/KernelBuilderExtensions.cs new file mode 100644 index 000000000000..64f1985e1064 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Extensions/KernelBuilderExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Models; + +internal static class KernelBuilderExtensions +{ + /// + /// Adds a chat completion service to the list. It can be either an OpenAI or Azure OpenAI backend service. + /// + /// + /// + /// + internal static IServiceCollection WithChatCompletionService(this IServiceCollection kernelBuilder, KernelSettings kernelSettings) + { + switch (kernelSettings.ServiceType.ToUpperInvariant()) + { + case ServiceTypes.AzureOpenAI: + kernelBuilder.AddAzureOpenAIChatCompletion(deploymentName: kernelSettings.DeploymentOrModelId, modelId: kernelSettings.DeploymentOrModelId, endpoint: kernelSettings.Endpoint, apiKey: kernelSettings.ApiKey, serviceId: kernelSettings.ServiceId); + break; + + case ServiceTypes.OpenAI: + kernelBuilder.AddOpenAIChatCompletion(modelId: kernelSettings.DeploymentOrModelId, apiKey: kernelSettings.ApiKey, orgId: kernelSettings.OrgId, serviceId: kernelSettings.ServiceId); + break; + + default: + throw new ArgumentException($"Invalid service type value: {kernelSettings.ServiceType}"); + } + + return kernelBuilder; + } +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Logo.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Logo.cs new file mode 100644 index 000000000000..ce6e70a69cfb --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Logo.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; + +public class Logo +{ + [Function("GetLogo")] + public HttpResponseData Run([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "logo.png")] HttpRequestData req) + { + // Return logo.png that's in the root of the project + var response = req.CreateResponse(HttpStatusCode.OK); + response.Headers.Add("Content-Type", "image/png"); + + var logo = System.IO.File.ReadAllBytes("logo.png"); + response.Body.Write(logo); + + return response; + } +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/AIPluginSettings.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/AIPluginSettings.cs new file mode 100644 index 000000000000..929b21b26d18 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/AIPluginSettings.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft. All rights reserved. +using System.Text.Json.Serialization; + +namespace Models; + +#pragma warning disable CA1056 +#pragma warning disable CA1034 + +public class Pluginsettings +{ + [JsonPropertyName("schema_version")] + public string SchemaVersion { get; set; } = "v1"; + + [JsonPropertyName("name_for_model")] + public string NameForModel { get; set; } = string.Empty; + + [JsonPropertyName("name_for_human")] + public string NameForHuman { get; set; } = string.Empty; + + [JsonPropertyName("description_for_model")] + public string DescriptionForModel { get; set; } = string.Empty; + + [JsonPropertyName("description_for_human")] + public string DescriptionForHuman { get; set; } = string.Empty; + + [JsonPropertyName("auth")] + public AuthModel Auth { get; set; } = new AuthModel(); + + [JsonPropertyName("api")] + public ApiModel Api { get; set; } = new ApiModel(); + + [JsonPropertyName("logo_url")] + public string LogoUrl { get; set; } = string.Empty; + + [JsonPropertyName("contact_email")] + public string ContactEmail { get; set; } = string.Empty; + + [JsonPropertyName("legal_info_url")] + public string LegalInfoUrl { get; set; } = string.Empty; + + public class AuthModel + { + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + [JsonPropertyName("authorization_url")] + public string AuthorizationType { get; set; } = string.Empty; + } + + public class ApiModel + { + [JsonPropertyName("type")] + public string Type { get; set; } = "openapi"; + + [JsonPropertyName("url")] + public string Url { get; set; } = string.Empty; + + [JsonPropertyName("has_user_authentication")] + public bool HasUserAuthentication { get; set; } = false; + } +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/AppSettings.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/AppSettings.cs new file mode 100644 index 000000000000..b173bbb3dc0a --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/AppSettings.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Configuration; + +namespace Models; + +#pragma warning disable CA1724 +#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. +public class AppSettings +{ + public const string DefaultConfigFile = "appsettings.json"; + + public KernelSettings Kernel { get; set; } + public Pluginsettings AIPlugin { get; set; } + + /// + /// Load the kernel settings from settings.json if the file exists and if not attempt to use user secrets. + /// + public static AppSettings LoadSettings() + { + try + { + var appSettings = FromFile(DefaultConfigFile); + appSettings.Kernel.ApiKey = GetApiKey(); + + return appSettings; + } + catch (InvalidDataException ide) + { + Console.Error.WriteLine( + "Unable to load app settings, please provide configuration settings using instructions in the README.\n" + + "Please refer to: https://github.com/microsoft/semantic-kernel-starters/blob/main/sk-csharp-chatgpt-plugin/README.md#configuring-the-starter" + ); + throw new InvalidOperationException(ide.Message); + } + } + + /// + /// Load the kernel settings from the specified configuration file if it exists. + /// + private static AppSettings FromFile(string configFile = DefaultConfigFile) + { + if (!File.Exists(configFile)) + { + throw new FileNotFoundException($"Configuration not found: {configFile}"); + } + + var configuration = new ConfigurationBuilder() + .SetBasePath(System.IO.Directory.GetCurrentDirectory()) + .AddJsonFile(configFile, optional: true, reloadOnChange: true) + .Build(); + + return configuration.Get() + ?? throw new InvalidDataException($"Invalid app settings in '{configFile}', please provide configuration settings using instructions in the README."); + } + + /// + /// Load the API key for the AI endpoint from user secrets. + /// + internal static string GetApiKey() + { + return System.Environment.GetEnvironmentVariable("apiKey", EnvironmentVariableTarget.Process) + ?? throw new InvalidDataException("Invalid semantic kernel settings in user secrets, please provide configuration settings using instructions in the README."); + } +} +#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/KernelSettings.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/KernelSettings.cs new file mode 100644 index 000000000000..c785b7a9358d --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/KernelSettings.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Logging; + +namespace Models; + +#pragma warning disable CA1812 +public class KernelSettings +{ + public string ServiceType { get; set; } = string.Empty; + public string ServiceId { get; set; } = string.Empty; + public string DeploymentOrModelId { get; set; } = string.Empty; + public string Endpoint { get; set; } = string.Empty; + public string OrgId { get; set; } = string.Empty; + public LogLevel? LogLevel { get; set; } + public string ApiKey { get; set; } = string.Empty; +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/ServiceTypes.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/ServiceTypes.cs new file mode 100644 index 000000000000..2c671828cb9d --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Models/ServiceTypes.cs @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Models; + +internal static class ServiceTypes +{ + internal const string OpenAI = "OPENAI"; + internal const string AzureOpenAI = "AZUREOPENAI"; +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Plugins/MathPlugin.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Plugins/MathPlugin.cs new file mode 100644 index 000000000000..a0b6bfa7c30a --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Plugins/MathPlugin.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using Microsoft.SemanticKernel; + +namespace Plugins; + +public sealed class MathPlugin +{ + [KernelFunction, Description("Take the square root of a number")] + public static double Sqrt( + [Description("The number to take a square root of")] double number1 + ) + { + return Math.Sqrt(number1); + } + + [KernelFunction, Description("Add two numbers")] + public static double Add( + [Description("The first number to add")] double number1, + [Description("The second number to add")] double number2 + ) + { + return number1 + number2; + } + + [KernelFunction, Description("Subtract two numbers")] + public static double Subtract( + [Description("The first number to subtract from")] double number1, + [Description("The second number to subtract away")] double number2 + ) + { + return number1 - number2; + } + + [KernelFunction, Description("Multiply two numbers. When increasing by a percentage, don't forget to add 1 to the percentage.")] + public static double Multiply( + [Description("The first number to multiply")] double number1, + [Description("The second number to multiply")] double number2 + ) + { + return number1 * number2; + } + + [KernelFunction, Description("Divide two numbers")] + public static double Divide( + [Description("The first number to divide from")] double number1, + [Description("The second number to divide by")] double number2 + ) + { + return number1 / number2; + } + + [KernelFunction, Description("Raise a number to a power")] + public static double Power( + [Description("The number to raise")] double number1, + [Description("The power to raise the number to")] double number2 + ) + { + return Math.Pow(number1, number2); + } + + [KernelFunction, Description("Take the log of a number")] + public static double Log( + [Description("The number to take the log of")] double number1, + [Description("The base of the log")] double number2 + ) + { + return Math.Log(number1, number2); + } + + [KernelFunction, Description("Round a number to the target number of decimal places")] + public static double Round( + [Description("The number to round")] double number1, + [Description("The number of decimal places to round to")] double number2 + ) + { + return Math.Round(number1, (int)number2); + } + + [KernelFunction, Description("Take the absolute value of a number")] + public static double Abs( + [Description("The number to take the absolute value of")] double number1 + ) + { + return Math.Abs(number1); + } + + [KernelFunction, Description("Take the floor of a number")] + public static double Floor( + [Description("The number to take the floor of")] double number1 + ) + { + return Math.Floor(number1); + } + + [KernelFunction, Description("Take the ceiling of a number")] + public static double Ceiling( + [Description("The number to take the ceiling of")] double number1 + ) + { + return Math.Ceiling(number1); + } + + [KernelFunction, Description("Take the sine of a number")] + public static double Sin( + [Description("The number to take the sine of")] double number1 + ) + { + return Math.Sin(number1); + } + + [KernelFunction, Description("Take the cosine of a number")] + public static double Cos( + [Description("The number to take the cosine of")] double number1 + ) + { + return Math.Cos(number1); + } + + [KernelFunction, Description("Take the tangent of a number")] + public static double Tan( + [Description("The number to take the tangent of")] double number1 + ) + { + return Math.Tan(number1); + } + + [KernelFunction, Description("Take the arcsine of a number")] + public static double Asin( + [Description("The number to take the arcsine of")] double number1 + ) + { + return Math.Asin(number1); + } + + [KernelFunction, Description("Take the arccosine of a number")] + public static double Acos( + [Description("The number to take the arccosine of")] double number1 + ) + { + return Math.Acos(number1); + } + + [KernelFunction, Description("Take the arctangent of a number")] + public static double Atan( + [Description("The number to take the arctangent of")] double number1 + ) + { + return Math.Atan(number1); + } +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Program.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Program.cs new file mode 100644 index 000000000000..7edc8e78c0a8 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Program.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Abstractions; +using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Configurations; +using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Enums; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.OpenApi.Models; +using Microsoft.SemanticKernel; +using Models; +using Plugins; +using Plugins.AzureFunctions.Extensions; + +const string DefaultSemanticFunctionsFolder = "Prompts"; +string semanticFunctionsFolder = Environment.GetEnvironmentVariable("SEMANTIC_SKILLS_FOLDER") ?? DefaultSemanticFunctionsFolder; + +var host = new HostBuilder() + .ConfigureFunctionsWorkerDefaults() + .ConfigureAppConfiguration(configuration => + { + var config = configuration.SetBasePath(Directory.GetCurrentDirectory()) + .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true); + var builtConfig = config.Build(); + }) + .ConfigureServices((context, services) => + { + services.Configure(options => + { + // `ConfigureFunctionsWorkerDefaults` sets the default to ignore casing already. + options.PropertyNamingPolicy = JsonNamingPolicy.CamelCase; + }); + + services.AddSingleton(_ => + { + var options = new OpenApiConfigurationOptions() + { + Info = new OpenApiInfo() + { + Version = "1.0.0", + Title = "My Plugin", + Description = "This plugin does..." + }, + Servers = DefaultOpenApiConfigurationOptions.GetHostNames(), + OpenApiVersion = OpenApiVersionType.V3, + //IncludeRequestingHostName = true, + ForceHttps = false, + ForceHttp = false, + }; + + return options; + }); + services + .AddTransient((providers) => + { + var appSettings = AppSettings.LoadSettings(); + var builder = Kernel.CreateBuilder(); + builder.Services.WithChatCompletionService(appSettings.Kernel); + builder.Services.AddLogging(loggingBuilder => + { + loggingBuilder.AddFilter(level => true); + loggingBuilder.AddConsole(); + }); + builder.Plugins.AddFromType(); + return builder.Build(); + }) + .AddScoped(); + }) + .Build(); + +host.Run(); diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Prompts/GetLogicalValue/config.json b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Prompts/GetLogicalValue/config.json new file mode 100644 index 000000000000..602efd48b2b2 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Prompts/GetLogicalValue/config.json @@ -0,0 +1,26 @@ +{ + "schema": 1, + "description": "If the request does not provide an explicit number for a parameter, use this function to make up a logical value; do not make up a value yourself.", + "type": "completion", + "completion": { + "max_tokens": 1000, + "temperature": 0.9, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0 + }, + "input": { + "parameters": [ + { + "name": "input", + "description": "A detailed description (2-3 sentences) of the missing value; provide all the context the user provided to get the best results.", + "defaultValue": "" + }, + { + "name": "units", + "description": "The units used to measure the value (e.g., 'meters', 'seconds', 'dollars', etc.). (required)", + "defaultValue": "" + } + ] + } +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Prompts/GetLogicalValue/skprompt.txt b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Prompts/GetLogicalValue/skprompt.txt new file mode 100644 index 000000000000..baa85e422ac2 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/Prompts/GetLogicalValue/skprompt.txt @@ -0,0 +1,18 @@ +INSTURCTIONS: +Provide a realistic value for the missing parameter. If you don't know the answer, provide a best guess using the limited information provided. +Do not give a range of values. Do not give a value that is not realistic. Do not give a value that is not possible. + +OUTPUT FORMAT: +{ + "value": "", + "reason": "", + "units": "", +} + +MISSING PARAMETER DESCRIPTION: +{{$input}} + +PARAMETER UNITS: +{{$units}} + +ANSWER: diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/azure-function.sln b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/azure-function.sln new file mode 100644 index 000000000000..ebbca6abe6cb --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/azure-function.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.5.002.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "sk-chatgpt-azure-function", "sk-chatgpt-azure-function.csproj", "{20F608E8-E748-4432-B673-7D98782A6E23}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PluginShared", "shared\PluginShared.csproj", "{82A779FB-9CFD-4159-8AFC-54D00C76947C}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {20F608E8-E748-4432-B673-7D98782A6E23}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {20F608E8-E748-4432-B673-7D98782A6E23}.Debug|Any CPU.Build.0 = Debug|Any CPU + {20F608E8-E748-4432-B673-7D98782A6E23}.Release|Any CPU.ActiveCfg = Release|Any CPU + {20F608E8-E748-4432-B673-7D98782A6E23}.Release|Any CPU.Build.0 = Release|Any CPU + {82A779FB-9CFD-4159-8AFC-54D00C76947C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {82A779FB-9CFD-4159-8AFC-54D00C76947C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {82A779FB-9CFD-4159-8AFC-54D00C76947C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {82A779FB-9CFD-4159-8AFC-54D00C76947C}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {76F2FDDE-51A7-4934-BEB6-2D6F082412BD} + EndGlobalSection +EndGlobal diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/config-samples/appsettings.json.azure-example b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/config-samples/appsettings.json.azure-example new file mode 100644 index 000000000000..ff4f1529a25d --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/config-samples/appsettings.json.azure-example @@ -0,0 +1,25 @@ +{ + "kernel": { + "serviceType": "AzureOpenAI", + "serviceId": "gpt-35-turbo", + "deploymentOrModelId": "gpt-35-turbo", + "endpoint": "https:// ... your endpoint ... .openai.azure.com/" + }, + "aiPlugin": { + "schemaVersion": "v1", + "nameForModel": "MyPlugin", + "nameForHuman": "My Plugin", + "descriptionForModel": "", + "descriptionForHuman": "", + "auth": { + "type": "none" + }, + "api": { + "type": "openapi", + "url": "{url}/swagger.json" + }, + "logoUrl": "{url}/logo.png", + "contactEmail": "support@example.com", + "legalInfoUrl": "http://www.example.com/legal" + } +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/config-samples/appsettings.json.openai-example b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/config-samples/appsettings.json.openai-example new file mode 100644 index 000000000000..b81fc7da29ec --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/config-samples/appsettings.json.openai-example @@ -0,0 +1,25 @@ +{ + "kernel": { + "serviceType": "OpenAI", + "serviceId": "gpt-3.5-turbo", + "deploymentOrModelId": "gpt-3.5-turbo", + "orgId": "" + }, + "aiPlugin": { + "schemaVersion": "v1", + "nameForModel": "MyPlugin", + "nameForHuman": "My Plugin", + "descriptionForModel": "", + "descriptionForHuman": "", + "auth": { + "type": "none" + }, + "api": { + "type": "openapi", + "url": "{url}/swagger.json" + }, + "logoUrl": "{url}/logo.png", + "contactEmail": "support@example.com", + "legalInfoUrl": "http://www.example.com/legal" + } +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/host.json b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/host.json new file mode 100644 index 000000000000..5ae5a72c7802 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/host.json @@ -0,0 +1,16 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensions": { + "http": { + "routePrefix": "" + } + } +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/local.settings.json.example b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/local.settings.json.example new file mode 100644 index 000000000000..63158c5c9413 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/local.settings.json.example @@ -0,0 +1,10 @@ +{ + "IsEncrypted": false, + "Host": { + "CORS": "*" + }, + "Values": { + "apiKey": "", + "FUNCTIONS_WORKER_RUNTIME": "dotnet-isolated" + } +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/logo.png b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/logo.png new file mode 100644 index 000000000000..6f4b5cc5b98a Binary files /dev/null and b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/logo.png differ diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginApi.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginApi.cs new file mode 100644 index 000000000000..421d50b029f9 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginApi.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace skchatgptazurefunction.PluginShared; + +/// +/// This class represents the plugin API specification. +/// +public class PluginApi +{ + /// + /// The API specification + /// + public string Type { get; set; } = "openapi"; + + /// + /// URL used to fetch the specification + /// +#pragma warning disable CA1056 // URI-like properties should not be strings + public string Url { get; set; } = string.Empty; +#pragma warning restore CA1056 // URI-like properties should not be strings +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginAuth.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginAuth.cs new file mode 100644 index 000000000000..4b789b4ed60b --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginAuth.cs @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace skchatgptazurefunction.PluginShared; + +/// +/// This class represents the OpenAI plugin authentication schema. +/// +public class PluginAuth +{ + /// + /// Tokens for API key authentication + /// + public class VerificationTokens + { + /// + /// The API key + /// + public string OpenAI { get; set; } = string.Empty; + } + + /// + /// The authentication schema + /// Supported values: none, service_http, user_http + /// + public string Type { get; set; } = "none"; + + /// + /// Manifest schema version + /// + [JsonPropertyName("authorization_type")] + public string AuthorizationType { get; } = "bearer"; + + /// + /// Tokens for API key authentication + /// + [JsonPropertyName("verification_tokens")] + public VerificationTokens Tokens { get; set; } = new VerificationTokens(); +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginManifest.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginManifest.cs new file mode 100644 index 000000000000..6fee4d5f8ec2 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginManifest.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace skchatgptazurefunction.PluginShared; + +/// +/// This class represents the OpenAI plugin manifest: +/// https://platform.openai.com/docs/plugins/getting-started/plugin-manifest +/// +public class PluginManifest +{ + /// + /// Manifest schema version + /// + [JsonPropertyName("schema_version")] + public string SchemaVersion { get; set; } = "v1"; + + /// + /// The name of the plugin that the model will use + /// + [JsonPropertyName("name_for_model")] + public string NameForModel { get; set; } = string.Empty; + + /// + /// Human-readable name of the plugin + /// + [JsonPropertyName("name_for_human")] + public string NameForHuman { get; set; } = string.Empty; + + /// + /// Description of the plugin that the model will use + /// + [JsonPropertyName("description_for_model")] + public string DescriptionForModel { get; set; } = string.Empty; + + /// + /// Human-readable description of the plugin + /// + [JsonPropertyName("description_for_human")] + public string DescriptionForHuman { get; set; } = string.Empty; + + /// + /// The authentication schema + /// + public PluginAuth Auth { get; set; } = new PluginAuth(); + + /// + /// The API specification + /// + public PluginApi Api { get; set; } = new PluginApi(); + +#pragma warning disable CA1056 // URI-like properties should not be strings + /// + /// Redirect URL for users to get more information about the plugin + /// + [JsonPropertyName("legal_info_url")] + public string LegalInfoUrl { get; set; } = string.Empty; + + /// + /// URL used to fetch the logo + /// + [JsonPropertyName("logo_url")] + public string LogoUrl { get; set; } = string.Empty; +#pragma warning restore CA1056 // URI-like properties should not be strings + + /// + /// Email contact for safety/moderation, support and deactivation + /// + [JsonPropertyName("contact_email")] + public string ContactEmail { get; set; } = string.Empty; + + /// + /// "Bearer" or "Basic" + /// + public string HttpAuthorizationType { get; set; } = string.Empty; +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginShared.csproj b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginShared.csproj new file mode 100644 index 000000000000..fa0c17bf202c --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/shared/PluginShared.csproj @@ -0,0 +1,10 @@ + + + + net6.0 + 10 + enable + skchatgptazurefunction.PluginShared + + + \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj new file mode 100644 index 000000000000..7ea6c27ad163 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj @@ -0,0 +1,68 @@ + + + + $([System.IO.Path]::GetDirectoryName($([MSBuild]::GetPathOfFileAbove('.gitignore', '$(MSBuildThisFileDirectory)')))) + d733072a-9c0f-473c-a99b-6e12f7b5ef28 + + + + net6.0 + skchatgptazurefunction + v4 + <_FunctionsSkipCleanOutput>true + Exe + 10 + enable + enable + false + + + + false + false + + + + + + + + + + + + + + + + .generated\%(RecursiveDir)%(Filename)%(Extension) + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + PreserveNewest + + + PreserveNewest + + + + + + PreserveNewest + + + + + + + diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/Extensions/GeneratorExecutionContextExtensions.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/Extensions/GeneratorExecutionContextExtensions.cs new file mode 100644 index 000000000000..29854dbb3895 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/Extensions/GeneratorExecutionContextExtensions.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.CodeAnalysis; + +namespace Plugins.AzureFunctions.Generator.Extensions; + +internal static class GeneratorExecutionContextExtensions +{ + public static string? GetMSBuildProperty( + this GeneratorExecutionContext context, + string name, + string defaultValue = "") + { + context.AnalyzerConfigOptions.GlobalOptions.TryGetValue($"build_property.{name}", out var value); + return value ?? defaultValue; + } + + public static string? GetRootNamespace(this GeneratorExecutionContext context) + { + return context.GetMSBuildProperty("RootNamespace"); + } +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/KernelFunctionGenerator.cs b/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/KernelFunctionGenerator.cs new file mode 100644 index 000000000000..9ae83cf56377 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/KernelFunctionGenerator.cs @@ -0,0 +1,212 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Microsoft.CodeAnalysis; +using Microsoft.CodeAnalysis.CSharp.Syntax; + +namespace Plugins.AzureFunctions.Generator; + +/// +/// Generates kernel functions +/// +[Generator] +public class KernelFunctionGenerator : ISourceGenerator +{ + /// + /// Generates kernel functions + /// + public void Execute(GeneratorExecutionContext context) + { + var functionDetailsByPlugin = new Dictionary>(); + + foreach (var syntaxTree in context.Compilation.SyntaxTrees) + { + var semanticModel = context.Compilation.GetSemanticModel(syntaxTree); + var root = syntaxTree.GetRoot(); + + var configureServicesCalls = root.DescendantNodes() + .OfType() + .Where(ies => ies.Expression is MemberAccessExpressionSyntax maes && maes.Name.ToString() == "AddTransient"); + + foreach (var configureServicesCall in configureServicesCalls) + { + // Analyze within ConfigureServices + foreach (var invocation in configureServicesCall.DescendantNodes().OfType()) + { + var symbol = semanticModel.GetSymbolInfo(invocation).Symbol as IMethodSymbol; + if (symbol?.ContainingType.ToString() == "Microsoft.SemanticKernel.KernelExtensions") + { + INamedTypeSymbol? pluginTypeArgument = null; + if (symbol.Name == "AddFromType") + { + pluginTypeArgument = symbol.TypeArguments.FirstOrDefault() as INamedTypeSymbol; + } + else if (symbol.Name == "AddFromObject") + { + var objectCreationExpression = invocation.ArgumentList.Arguments.FirstOrDefault()?.Expression as ObjectCreationExpressionSyntax; + if (objectCreationExpression != null) + { + var typeInfo = semanticModel.GetTypeInfo(objectCreationExpression); + pluginTypeArgument = typeInfo.Type as INamedTypeSymbol; + } + } + + if (pluginTypeArgument != null && configureServicesCall.Expression is MemberAccessExpressionSyntax maes) + { + var pluginName = pluginTypeArgument.Name; + var functionDetails = this.ExtractFunctionDetails(context, pluginTypeArgument); + functionDetailsByPlugin[pluginName] = functionDetails; + } + } + } + } + } + + // Generate source for each plugin + foreach (var pluginEntry in functionDetailsByPlugin) + { + var sourceCode = GenerateClassSource("AzureFunctionPlugins", pluginEntry.Key, pluginEntry.Value); + context.AddSource($"{pluginEntry.Key}.g.cs", sourceCode); + } + } + + private List ExtractFunctionDetails(GeneratorExecutionContext context, INamedTypeSymbol pluginClass) + { + var functionDetailsList = new List(); + + foreach (var member in pluginClass.GetMembers()) + { + if (member is IMethodSymbol methodSymbol && methodSymbol.GetAttributes().Any(attr => attr?.AttributeClass?.Name == "KernelFunctionAttribute")) + { + var functionDetails = new FunctionDetails + { + Name = methodSymbol.Name, + Description = methodSymbol.GetAttributes().FirstOrDefault(a => a?.AttributeClass?.Name == "DescriptionAttribute")?.ConstructorArguments.FirstOrDefault().Value.ToString(), + Parameters = new List() + }; + + foreach (var parameter in methodSymbol.Parameters) + { + var parameterDetails = new ParameterDetails + { + Name = parameter.Name, + Type = parameter.Type.ToString(), + Description = parameter.GetAttributes().FirstOrDefault(a => a?.AttributeClass?.Name == "DescriptionAttribute")?.ConstructorArguments.FirstOrDefault().Value.ToString() + }; + + functionDetails.Parameters.Add(parameterDetails); + } + + functionDetailsList.Add(functionDetails); + } + } + + return functionDetailsList; + } + + // Generate the source code for a folder of prompts + private static string GenerateClassSource(string rootNamespace, string pluginName, List functions) + { + StringBuilder functionsCode = new(); + + foreach (var function in functions) + { + functionsCode.AppendLine(GenerateFunctionSource(pluginName, function) ?? string.Empty); + } + + return $@"/* ### GENERATED CODE - Do not modify. Edits will be lost on build. ### */ + using System; + using System.Net; + using System.Reflection; + using System.Threading.Tasks; + using Microsoft.Azure.Functions.Worker; + using Microsoft.Azure.Functions.Worker.Http; + using Microsoft.Azure.WebJobs.Extensions.OpenApi.Core.Attributes; + using Microsoft.Extensions.Logging; + using Microsoft.OpenApi.Models; + using Microsoft.SemanticKernel; + using Plugins.AzureFunctions.Extensions; + + namespace {rootNamespace}; + + public class {pluginName} + {{ + private readonly ILogger _logger; + private readonly AIPluginRunner _pluginRunner; + + public {pluginName}(AIPluginRunner pluginRunner, ILoggerFactory loggerFactory) + {{ + _pluginRunner = pluginRunner; + _logger = loggerFactory.CreateLogger<{pluginName}>(); + }} + + {functionsCode} + }}"; + } + + private static string? GenerateFunctionSource(string pluginName, FunctionDetails functionDetails) + { + string modelClassName = $"{functionDetails.Name}Model"; // Name of the model class + string parameterAttributes = GenerateModelClassSource(modelClassName, functionDetails.Parameters); + + return $@" + {parameterAttributes} + + [OpenApiOperation(operationId: ""{functionDetails.Name}"", tags: new[] {{ ""{functionDetails.Name}"" }})] + [OpenApiRequestBody(contentType: ""application/json"", bodyType: typeof({modelClassName}), Required = true, Description = ""JSON request body"")] + [OpenApiResponseWithBody(statusCode: HttpStatusCode.OK, contentType: ""application/json"", bodyType: typeof(string), Description = ""The OK response"")] + [Function(""{functionDetails.Name}"")] + public async Task {functionDetails.Name}([HttpTrigger(AuthorizationLevel.Anonymous, ""post"")] HttpRequestData req) + {{ + _logger.LogInformation(""HTTP trigger processed a request for function {pluginName}-{functionDetails.Name}.""); + return await _pluginRunner.RunAIPluginOperationAsync<{modelClassName}>(req, ""{pluginName}"", ""{functionDetails.Name}""); + }}"; + } + + private static string GenerateModelClassSource(string modelClassName, List parameters) + { + StringBuilder modelClassBuilder = new(); + + modelClassBuilder.AppendLine($"public class {modelClassName}"); + modelClassBuilder.AppendLine("{"); + + foreach (var parameter in parameters) + { + modelClassBuilder.AppendLine($" public {parameter.Type} {parameter.Name} {{ get; set; }}"); + } + + modelClassBuilder.AppendLine("}"); + + return modelClassBuilder.ToString(); + } + + /// + /// Does nothing + /// + public void Initialize(GeneratorInitializationContext context) + { + // No initialization required + } +} + +/// +/// Function details +/// +public class FunctionDetails +{ + public string Name { get; set; } = string.Empty; + public string Description { get; set; } = string.Empty; + public List Parameters { get; set; } = new List(); +} + +/// +/// Parameter details +/// +public class ParameterDetails +{ + public string Name { get; set; } = string.Empty; + public string Type { get; set; } = string.Empty; + public string Description { get; set; } = string.Empty; +} diff --git a/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/kernel-functions-generator.csproj b/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/kernel-functions-generator.csproj new file mode 100644 index 000000000000..274d4eb52e3f --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/MathPlugin/kernel-functions-generator/kernel-functions-generator.csproj @@ -0,0 +1,22 @@ + + + + netstandard2.0 + 10 + enable + true + RS1035,CS0612,CS1591,CS8601,CS8602,CS860218 + Plugins.AzureFunctions.Generator + + + + + + + + $(GetTargetPathDependsOn);GetDependencyTargetPaths + + + + + \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/README.md b/dotnet/samples/CreateChatGptPlugin/README.md new file mode 100644 index 000000000000..3394ad2b1693 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/README.md @@ -0,0 +1,76 @@ +# Creating and using a OpenAI plugin + +## Prerequisites + +- [Azure Functions Core Tools](https://github.com/Azure/azure-functions-core-tools) version 4.x. +- [.NET 6](https://dotnet.microsoft.com/download/dotnet/6.0) is required to run this sample. +- Install the recommended extensions +- [C#](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.csharp) +- [Semantic Kernel Tools](https://marketplace.visualstudio.com/items?itemName=ms-semantic-kernel.semantic-kernel) (optional) + +You must also have the Azure Function located [here](./MathPlugin/) running locally, otherwise the sample will fail. + +## Configuring the sample + +The sample can be configured by using the command line with .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) to avoid the risk of leaking secrets into the repository, branches and pull requests. + +This sample has been tested with the following models: + +| Service | Model type | Model | Model version | Supported | +| ------------ | --------------- | ---------------- | ------------: | --------- | +| OpenAI | Text Completion | text-davinci-003 | 1 | ❌ | +| OpenAI | Chat Completion | gpt-3.5-turbo | 1 | ❌ | +| OpenAI | Chat Completion | gpt-3.5-turbo | 0301 | ❌ | +| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 0613 | ✅ | +| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 1106 | ✅ | +| OpenAI | Chat Completion | gpt-4 | 1 | ❌ | +| OpenAI | Chat Completion | gpt-4 | 0314 | ❌ | +| Azure OpenAI | Chat Completion | gpt-4 | 0613 | ✅ | +| Azure OpenAI | Chat Completion | gpt-4 | 1106 | ✅ | + +This sample uses function calling, so it only works on models newer than 0613. + +### Using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) + +Configure an OpenAI endpoint + +```powershell +cd 14-Create-ChatGPT-Plugin/Solution + +dotnet user-secrets set "Global:LlmService" "OpenAI" + +dotnet user-secrets set "OpenAI:ModelType" "chat-completion" +dotnet user-secrets set "OpenAI:ChatCompletionModelId" "gpt-4" +dotnet user-secrets set "OpenAI:ApiKey" "... your OpenAI key ..." +dotnet user-secrets set "OpenAI:OrgId" "... your ord ID ..." +``` + +Configure an Azure OpenAI endpoint + +```powershell +cd 14-Create-ChatGPT-Plugin/Solution + +dotnet user-secrets set "Global:LlmService" "AzureOpenAI" + +dotnet user-secrets set "AzureOpenAI:DeploymentType" "chat-completion" +dotnet user-secrets set "AzureOpenAI:ChatCompletionDeploymentName" "gpt-35-turbo" +dotnet user-secrets set "AzureOpenAI:ChatCompletionModelId" "gpt-3.5-turbo-0613" +dotnet user-secrets set "AzureOpenAI:Endpoint" "... your Azure OpenAI endpoint ..." +dotnet user-secrets set "AzureOpenAI:ApiKey" "... your Azure OpenAI key ..." +``` + +## Running the sample + +First, refer to the [README](./MathPlugin/README.md) in the MathPlugin\ folder +to start the Azure Function. + +After starting the Azure Function and configuring the sample, +to build and run the console application, navigate to the [Solution](./Solution/) folder and hit `F5`. + +To build and run the console application from the terminal use the following commands: + +```powershell +cd Solution +dotnet build +dotnet run +``` diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/.gitignore b/dotnet/samples/CreateChatGptPlugin/Solution/.gitignore new file mode 100644 index 000000000000..7e5351290bf0 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/.gitignore @@ -0,0 +1,7 @@ +appsettings.json +bin +obj +*.ini +*.cache +*.log +*.tmp diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/extensions.json b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/extensions.json new file mode 100644 index 000000000000..00de051b99a0 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/extensions.json @@ -0,0 +1,6 @@ +{ + "recommendations": [ + "ms-dotnettools.csharp", + "ms-semantic-kernel.semantic-kernel" + ] +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/launch.json b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/launch.json new file mode 100644 index 000000000000..85ab19de82f9 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/launch.json @@ -0,0 +1,26 @@ +{ + "version": "0.2.0", + "configurations": [ + { + // Use IntelliSense to find out which attributes exist for C# debugging + // Use hover for the description of the existing attributes + // For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md + "name": ".NET Core Launch (console)", + "type": "coreclr", + "request": "launch", + "preLaunchTask": "build", + // If you have changed target frameworks, make sure to update the program path. + "program": "${workspaceFolder}/bin/Debug/net8.0/14-Create-ChatGPT-Plugin.dll", + "args": [], + "cwd": "${workspaceFolder}", + // For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console + "console": "integratedTerminal", + "stopAtEntry": false + }, + { + "name": ".NET Core Attach", + "type": "coreclr", + "request": "attach" + } + ] +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/settings.json b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/settings.json new file mode 100644 index 000000000000..440f8aa89330 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/settings.json @@ -0,0 +1,84 @@ +{ + "prettier.enable": true, + "css.lint.validProperties": [ + "composes" + ], + "editor.defaultFormatter": "ms-dotnettools.csharp", + "editor.formatOnType": true, + "editor.formatOnSave": true, + "editor.formatOnPaste": true, + "editor.codeActionsOnSave": { + "source.fixAll": "explicit" + }, + "[csharp]": { + "editor.defaultFormatter": "ms-dotnettools.csharp" + }, + "editor.bracketPairColorization.enabled": true, + "editor.guides.bracketPairs": "active", + "python.formatting.provider": "autopep8", + "python.formatting.autopep8Args": [ + "--max-line-length=120" + ], + "notebook.output.textLineLimit": 500, + "python.analysis.extraPaths": [ + "./python/src" + ], + "javascript.updateImportsOnFileMove.enabled": "always", + "search.exclude": { + "**/node_modules": true, + "**/bower_components": true, + "**/build": true + }, + "[typescript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.codeActionsOnSave": { + "source.organizeImports": true, + "source.fixAll": true + } + }, + "[typescriptreact]": { + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.codeActionsOnSave": { + "source.organizeImports": true, + "source.fixAll": true + } + }, + "typescript.updateImportsOnFileMove.enabled": "always", + "eslint.enable": true, + "eslint.validate": [ + "javascript", + "javascriptreact", + "typescript", + "typescriptreact" + ], + "eslint.lintTask.enable": true, + "eslint.workingDirectories": [ + { + "mode": "auto" + } + ], + "eslint.options": { + "overrideConfigFile": ".eslintrc.js" + }, + "eslint.packageManager": "yarn", + "files.associations": { + "*.json": "jsonc" + }, + "files.exclude": { + "**/.git": true, + "**/.svn": true, + "**/.hg": true, + "**/CVS": true, + "**/.DS_Store": true, + "**/Thumbs.db": true + }, + "cSpell.words": [ + "Partitioner" + ], + "[typescript][typescriptreact]": { + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit", + "source.fixAll": "explicit" + } + } + } \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/tasks.json b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/tasks.json new file mode 100644 index 000000000000..d8ca29a9bc9a --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "build", + "command": "dotnet", + "type": "process", + "args": [ + "build", + "${workspaceFolder}/14-Create-ChatGPT-Plugin.csproj", + "/property:GenerateFullPaths=true", + "/consoleloggerparameters:NoSummary" + ], + "problemMatcher": "$msCompile" + }, + { + "label": "publish", + "command": "dotnet", + "type": "process", + "args": [ + "publish", + "${workspaceFolder}/14-Create-ChatGPT-Plugin.csproj", + "/property:GenerateFullPaths=true", + "/consoleloggerparameters:NoSummary" + ], + "problemMatcher": "$msCompile" + }, + { + "label": "watch", + "command": "dotnet", + "type": "process", + "args": [ + "watch", + "run", + "--project", + "${workspaceFolder}/14-Create-ChatGPT-Plugin.csproj" + ], + "problemMatcher": "$msCompile" + } + ] +} \ No newline at end of file diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj b/dotnet/samples/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj new file mode 100644 index 000000000000..338d4dd18d6c --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj @@ -0,0 +1,29 @@ + + + + Exe + net6.0 + + enable + enable + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + false + SKEXP0042 + + + + + + + + + + + + + + PreserveNewest + + + + diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/Program.cs b/dotnet/samples/CreateChatGptPlugin/Solution/Program.cs new file mode 100644 index 000000000000..ace962fe2a3e --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/Program.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Plugins.OpenApi; + +// Create kernel +var builder = Kernel.CreateBuilder(); +// Add a text or chat completion service using either: +// builder.Services.AddAzureOpenAIChatCompletion() +// builder.Services.AddAzureOpenAITextGeneration() +// builder.Services.AddOpenAIChatCompletion() +// builder.Services.AddOpenAITextGeneration() +builder.WithCompletionService(); +var kernel = builder.Build(); + +// Add the math plugin using the plugin manifest URL +await kernel.ImportPluginFromOpenApiAsync("MathPlugin", new Uri("http://localhost:7071/swagger.json")).ConfigureAwait(false); + +// Create chat history +ChatHistory history = new(); + +// Get chat completion service +var chatCompletionService = kernel.GetRequiredService(); + +// Start the conversation +while (true) +{ + // Get user input + Console.Write("User > "); + history.AddUserMessage(Console.ReadLine()!); + + // Enable auto function calling + OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() + { + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }; + + // Get the response from the AI + var result = chatCompletionService.GetStreamingChatMessageContentsAsync( + history, + executionSettings: openAIPromptExecutionSettings, + kernel: kernel); + + // Stream the results + string fullMessage = ""; + var first = true; + await foreach (var content in result) + { + if (content.Role.HasValue && first) + { + Console.Write("Assistant > "); + first = false; + } + Console.Write(content.Content); + fullMessage += content.Content; + } + Console.WriteLine(); + + // Add the message from the agent to the chat history + history.AddAssistantMessage(fullMessage); +} diff --git a/dotnet/samples/ApplicationInsightsExample/RepoUtils/Env.cs b/dotnet/samples/CreateChatGptPlugin/Solution/config/Env.cs similarity index 81% rename from dotnet/samples/ApplicationInsightsExample/RepoUtils/Env.cs rename to dotnet/samples/CreateChatGptPlugin/Solution/config/Env.cs index e24da8fd7b20..1f2836f7bbe6 100644 --- a/dotnet/samples/ApplicationInsightsExample/RepoUtils/Env.cs +++ b/dotnet/samples/CreateChatGptPlugin/Solution/config/Env.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using Microsoft.Extensions.Configuration; #pragma warning disable CA1812 // instantiated by AddUserSecrets @@ -13,7 +12,7 @@ internal sealed class Env /// /// Secret name / Env var name /// Value found in Secret Manager or Environment Variable - internal static string Var(string name) + internal static string? Var(string name) { var configuration = new ConfigurationBuilder() .AddUserSecrets() @@ -26,10 +25,6 @@ internal static string Var(string name) } value = Environment.GetEnvironmentVariable(name); - if (string.IsNullOrEmpty(value)) - { - throw new ArgumentException($"Secret / Env var not set: {name}"); - } return value; } diff --git a/dotnet/samples/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs b/dotnet/samples/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs new file mode 100644 index 000000000000..21fc499fef80 --- /dev/null +++ b/dotnet/samples/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs @@ -0,0 +1,62 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; + +internal static class KernelBuilderExtensions +{ + /// + /// Adds a text completion service to the list. It can be either an OpenAI or Azure OpenAI backend service. + /// + /// + /// + internal static IKernelBuilder WithCompletionService(this IKernelBuilder kernelBuilder) + { + switch (Env.Var("Global:LlmService")!) + { + case "AzureOpenAI": + if (Env.Var("AzureOpenAI:DeploymentType") == "text-completion") + { + kernelBuilder.Services.AddAzureOpenAITextGeneration( + deploymentName: Env.Var("AzureOpenAI:TextCompletionDeploymentName")!, + modelId: Env.Var("AzureOpenAI:TextCompletionModelId")!, + endpoint: Env.Var("AzureOpenAI:Endpoint")!, + apiKey: Env.Var("AzureOpenAI:ApiKey")! + ); + } + else if (Env.Var("AzureOpenAI:DeploymentType") == "chat-completion") + { + kernelBuilder.Services.AddAzureOpenAIChatCompletion( + deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!, + modelId: Env.Var("AzureOpenAI:ChatCompletionModelId")!, + endpoint: Env.Var("AzureOpenAI:Endpoint")!, + apiKey: Env.Var("AzureOpenAI:ApiKey")! + ); + } + break; + + case "OpenAI": + if (Env.Var("OpenAI:ModelType") == "text-completion") + { + kernelBuilder.Services.AddOpenAITextGeneration( + modelId: Env.Var("OpenAI:TextCompletionModelId")!, + apiKey: Env.Var("OpenAI:ApiKey")!, + orgId: Env.Var("OpenAI:OrgId") + ); + } + else if (Env.Var("OpenAI:ModelType") == "chat-completion") + { + kernelBuilder.Services.AddOpenAIChatCompletion( + modelId: Env.Var("OpenAI:ChatCompletionModelId")!, + apiKey: Env.Var("OpenAI:ApiKey")!, + orgId: Env.Var("OpenAI:OrgId") + ); + } + break; + + default: + throw new ArgumentException($"Invalid service type value: {Env.Var("OpenAI:ModelType")}"); + } + + return kernelBuilder; + } +} diff --git a/dotnet/samples/DocumentationExamples/AIServices.cs b/dotnet/samples/DocumentationExamples/AIServices.cs new file mode 100644 index 000000000000..1975c278e3d8 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/AIServices.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to add AI services to a kernel as described at +/// https://learn.microsoft.com/semantic-kernel/agents/kernel/adding-services +/// +public class AIServices : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== AI Services ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? textModelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || textModelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + string? openAImodelId = TestConfiguration.OpenAI.ChatModelId; + string? openAItextModelId = TestConfiguration.OpenAI.ChatModelId; + string? openAIapiKey = TestConfiguration.OpenAI.ApiKey; + + if (openAImodelId is null || openAItextModelId is null || openAIapiKey is null) + { + WriteLine("OpenAI credentials not found. Skipping example."); + + return; + } + + // Create a kernel with an Azure OpenAI chat completion service + // + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey) + .Build(); + // + + // You could instead create a kernel with a legacy Azure OpenAI text completion service + // + kernel = Kernel.CreateBuilder() + .AddAzureOpenAITextGeneration(textModelId, endpoint, apiKey) + .Build(); + // + + // You can also create a kernel with a (non-Azure) OpenAI chat completion service + // + kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(openAImodelId, openAIapiKey) + .Build(); + // + + // Or a kernel with a legacy OpenAI text completion service + // + kernel = Kernel.CreateBuilder() + .AddOpenAITextGeneration(openAItextModelId, openAIapiKey) + .Build(); + // + } + + public AIServices(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/DocumentationExamples/BaseTest.cs b/dotnet/samples/DocumentationExamples/BaseTest.cs new file mode 100644 index 000000000000..4017d80066b5 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/BaseTest.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.Extensions.Configuration; +using Xunit.Abstractions; + +namespace Examples; + +public abstract class BaseTest +{ + protected ITestOutputHelper Output { get; } + + protected List SimulatedInputText = new(); + protected int SimulatedInputTextIndex = 0; + + protected BaseTest(ITestOutputHelper output) + { + this.Output = output; + LoadUserSecrets(); + } + + private static void LoadUserSecrets() + { + IConfigurationRoot configRoot = new ConfigurationBuilder() + .AddJsonFile("appsettings.Development.json", true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + TestConfiguration.Initialize(configRoot); + } + + /// + /// This method can be substituted by Console.WriteLine when used in Console apps. + /// + /// Target object to write + protected void WriteLine(object? target = null) + { + this.Output.WriteLine(target?.ToString() ?? string.Empty); + } + + /// + /// Current interface ITestOutputHelper does not have a Write method. This extension method adds it to make it analogous to Console.Write when used in Console apps. + /// + /// Target object to write + protected void Write(object? target = null) + { + this.Output.WriteLine(target?.ToString() ?? string.Empty); + } + + /// + /// Simulates reading input strings from a user for the purpose of running tests. + /// + /// A simulate user input string, if available. Null otherwise. + protected string? ReadLine() + { + if (SimulatedInputTextIndex < SimulatedInputText.Count) + { + return SimulatedInputText[SimulatedInputTextIndex++]; + } + + return null; + } +} diff --git a/dotnet/samples/DocumentationExamples/ConfiguringPrompts.cs b/dotnet/samples/DocumentationExamples/ConfiguringPrompts.cs new file mode 100644 index 000000000000..8802210f9d6e --- /dev/null +++ b/dotnet/samples/DocumentationExamples/ConfiguringPrompts.cs @@ -0,0 +1,134 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Plugins.Core; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to configure prompts as described at +/// https://learn.microsoft.com/semantic-kernel/prompts/configure-prompts +/// +public class ConfiguringPrompts : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Configuring Prompts ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Plugins.AddFromType(); + Kernel kernel = builder.Build(); + + // + // Create a template for chat with settings + var chat = kernel.CreateFunctionFromPrompt( + new PromptTemplateConfig() + { + Name = "Chat", + Description = "Chat with the assistant.", + Template = @"{{ConversationSummaryPlugin.SummarizeConversation $history}} + User: {{$request}} + Assistant: ", + TemplateFormat = "semantic-kernel", + InputVariables = new List() + { + new() { Name = "history", Description = "The history of the conversation.", IsRequired = false, Default = "" }, + new() { Name = "request", Description = "The user's request.", IsRequired = true } + }, + ExecutionSettings = + { + { + "default", + new OpenAIPromptExecutionSettings() + { + MaxTokens = 1000, + Temperature = 0 + } + }, + { + "gpt-3.5-turbo", new OpenAIPromptExecutionSettings() + { + ModelId = "gpt-3.5-turbo-0613", + MaxTokens = 4000, + Temperature = 0.2 + } + }, + { + "gpt-4", + new OpenAIPromptExecutionSettings() + { + ModelId = "gpt-4-1106-preview", + MaxTokens = 8000, + Temperature = 0.3 + } + } + } + } + ); + // + + // Create chat history and choices + ChatHistory history = new(); + + // Start the chat loop + Write("User > "); + string? userInput; + while ((userInput = ReadLine()) != null) + { + // Get chat response + var chatResult = kernel.InvokeStreamingAsync( + chat, + new() + { + { "request", userInput }, + { "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) } + } + ); + + // Stream the response + string message = ""; + await foreach (var chunk in chatResult) + { + if (chunk.Role.HasValue) + { + Write(chunk.Role + " > "); + } + message += chunk; + Write(chunk); + } + WriteLine(); + + // Append to history + history.AddUserMessage(userInput); + history.AddAssistantMessage(message); + + // Get user input again + Write("User > "); + } + } + + public ConfiguringPrompts(ITestOutputHelper output) : base(output) + { + SimulatedInputText = ["Who were the Vikings?"]; + } +} diff --git a/dotnet/samples/DocumentationExamples/CreatingFunctions.cs b/dotnet/samples/DocumentationExamples/CreatingFunctions.cs new file mode 100644 index 000000000000..80f002404178 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/CreatingFunctions.cs @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Plugins; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to create native functions for AI to call as described at +/// https://learn.microsoft.com/semantic-kernel/agents/plugins/using-the-KernelFunction-decorator +/// +public class CreatingFunctions : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Creating native functions ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + // + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Plugins.AddFromType(); + Kernel kernel = builder.Build(); + + // Test the math plugin + double answer = await kernel.InvokeAsync( + "MathPlugin", "Sqrt", new() + { + { "number1", 12 } + }); + WriteLine($"The square root of 12 is {answer}."); + // + + // Create chat history + ChatHistory history = new(); + + // + + // Get chat completion service + var chatCompletionService = kernel.GetRequiredService(); + + // Start the conversation + Write("User > "); + string? userInput; + while ((userInput = ReadLine()) != null) + { + history.AddUserMessage(userInput); + + // Enable auto function calling + OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() + { + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }; + + // Get the response from the AI + var result = chatCompletionService.GetStreamingChatMessageContentsAsync( + history, + executionSettings: openAIPromptExecutionSettings, + kernel: kernel); + + // Stream the results + string fullMessage = ""; + var first = true; + await foreach (var content in result) + { + if (content.Role.HasValue && first) + { + Write("Assistant > "); + first = false; + } + Write(content.Content); + fullMessage += content.Content; + } + WriteLine(); + + // Add the message from the agent to the chat history + history.AddAssistantMessage(fullMessage); + + // Get user input again + Write("User > "); + } + + // + } + + public CreatingFunctions(ITestOutputHelper output) : base(output) + { + SimulatedInputText = ["What is 49 diivided by 37?"]; + } +} diff --git a/dotnet/samples/DocumentationExamples/DocumentationExamples.csproj b/dotnet/samples/DocumentationExamples/DocumentationExamples.csproj new file mode 100644 index 000000000000..d4591d826599 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/DocumentationExamples.csproj @@ -0,0 +1,72 @@ + + + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + + + DocumentationExamples + + net6.0 + LatestMajor + true + false + + CS8618,IDE0009,CA1051,CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,,SKEXP0012,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0040,SKEXP0041,SKEXP0042,SKEXP0050,SKEXP0051,SKEXP0052,SKEXP0053,SKEXP0054,SKEXP0055,SKEXP0060,SKEXP0061,SKEXP0101,SKEXP0102 + Library + 12.0 + + + + + + + + + PreserveNewest + + + PreserveNewest + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Always + + + \ No newline at end of file diff --git a/dotnet/samples/DocumentationExamples/FunctionsWithinPrompts.cs b/dotnet/samples/DocumentationExamples/FunctionsWithinPrompts.cs new file mode 100644 index 000000000000..e2fb161176d1 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/FunctionsWithinPrompts.cs @@ -0,0 +1,164 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Plugins.Core; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to call functions within prompts as described at +/// https://learn.microsoft.com/semantic-kernel/prompts/calling-nested-functions +/// +public class FunctionsWithinPrompts : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Functions within Prompts ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + // + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Plugins.AddFromType(); + Kernel kernel = builder.Build(); + // + + List choices = new() { "ContinueConversation", "EndConversation" }; + + // Create few-shot examples + List fewShotExamples = new() + { + new ChatHistory() + { + new ChatMessageContent(AuthorRole.User, "Can you send a very quick approval to the marketing team?"), + new ChatMessageContent(AuthorRole.System, "Intent:"), + new ChatMessageContent(AuthorRole.Assistant, "ContinueConversation") + }, + new ChatHistory() + { + new ChatMessageContent(AuthorRole.User, "Can you send the full update to the marketing team?"), + new ChatMessageContent(AuthorRole.System, "Intent:"), + new ChatMessageContent(AuthorRole.Assistant, "EndConversation") + } + }; + + // Create handlebars template for intent + // + var getIntent = kernel.CreateFunctionFromPrompt( + new() + { + Template = @" +Instructions: What is the intent of this request? +Do not explain the reasoning, just reply back with the intent. If you are unsure, reply with {{choices[0]}}. +Choices: {{choices}}. + +{{#each fewShotExamples}} + {{#each this}} + {{content}} + {{/each}} +{{/each}} + +{{ConversationSummaryPlugin-SummarizeConversation history}} + +{{request}} +Intent:", + TemplateFormat = "handlebars" + }, + new HandlebarsPromptTemplateFactory() + ); + // + + // Create a Semantic Kernel template for chat + // + var chat = kernel.CreateFunctionFromPrompt( +@"{{ConversationSummaryPlugin.SummarizeConversation $history}} +User: {{$request}} +Assistant: " + ); + // + + // + // Create chat history + ChatHistory history = new(); + + // Start the chat loop + while (true) + { + // Get user input + Write("User > "); + var request = ReadLine(); + + // Invoke handlebars prompt + var intent = await kernel.InvokeAsync( + getIntent, + new() + { + { "request", request }, + { "choices", choices }, + { "history", history }, + { "fewShotExamples", fewShotExamples } + } + ); + + // End the chat if the intent is "Stop" + if (intent.ToString() == "EndConversation") + { + break; + } + + // Get chat response + var chatResult = kernel.InvokeStreamingAsync( + chat, + new() + { + { "request", request }, + { "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) } + } + ); + + // Stream the response + string message = ""; + await foreach (var chunk in chatResult) + { + if (chunk.Role.HasValue) + { + Write(chunk.Role + " > "); + } + message += chunk; + Write(chunk); + } + WriteLine(); + + // Append to history + history.AddUserMessage(request!); + history.AddAssistantMessage(message); + } + + // + } + + public FunctionsWithinPrompts(ITestOutputHelper output) : base(output) + { + SimulatedInputText = [ + "Can you send an approval to the marketing team?", + "That is all, thanks."]; + } +} diff --git a/dotnet/samples/DocumentationExamples/Planner.cs b/dotnet/samples/DocumentationExamples/Planner.cs new file mode 100644 index 000000000000..c7e2bc727b6b --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Planner.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Plugins; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to create native functions for AI to call as described at +/// https://learn.microsoft.com/semantic-kernel/agents/plugins/using-the-KernelFunction-decorator +/// +public class Planner : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Planner ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + // + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Services.AddLogging(c => c.AddDebug().SetMinimumLevel(LogLevel.Trace)); + builder.Plugins.AddFromType(); + Kernel kernel = builder.Build(); + + // Get chat completion service + var chatCompletionService = kernel.GetRequiredService(); + + // Create chat history + ChatHistory history = new(); + + // Start the conversation + Write("User > "); + string? userInput; + while ((userInput = ReadLine()) != null) + { + // Get user input + Write("User > "); + history.AddUserMessage(ReadLine()!); + + // Enable auto function calling + OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() + { + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }; + + // Get the response from the AI + var result = chatCompletionService.GetStreamingChatMessageContentsAsync( + history, + executionSettings: openAIPromptExecutionSettings, + kernel: kernel); + + // Stream the results + string fullMessage = ""; + var first = true; + await foreach (var content in result) + { + if (content.Role.HasValue && first) + { + Write("Assistant > "); + first = false; + } + Write(content.Content); + fullMessage += content.Content; + } + WriteLine(); + + // Add the message from the agent to the chat history + history.AddAssistantMessage(fullMessage); + + // Get user input again + Write("User > "); + } + } + + public Planner(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/DocumentationExamples/Plugin.cs b/dotnet/samples/DocumentationExamples/Plugin.cs new file mode 100644 index 000000000000..9888313a24d1 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugin.cs @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example shows how to create a plugin class and interact with as described at +/// https://learn.microsoft.com/semantic-kernel/overview/ +/// This sample uses function calling, so it only works on models newer than 0613. +/// +public class Plugin : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Plugin ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + // Create kernel + // + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Plugins.AddFromType(); + Kernel kernel = builder.Build(); + // + + // + + // Create chat history + var history = new ChatHistory(); + + // Get chat completion service + var chatCompletionService = kernel.GetRequiredService(); + + // Start the conversation + Write("User > "); + string? userInput; + while ((userInput = ReadLine()) != null) + { + // Add user input + history.AddUserMessage(userInput); + + // Enable auto function calling + OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() + { + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }; + + // Get the response from the AI + var result = await chatCompletionService.GetChatMessageContentAsync( + history, + executionSettings: openAIPromptExecutionSettings, + kernel: kernel); + + // Print the results + WriteLine("Assistant > " + result); + + // Add the message from the agent to the chat history + history.AddMessage(result.Role, result.Content ?? string.Empty); + + // Get user input again + Write("User > "); + } + // + } + + public Plugin(ITestOutputHelper output) : base(output) + { + SimulatedInputText = [ + "Hello", + "Can you turn on the lights"]; + } +} + +// +public class LightPlugin +{ + public bool IsOn { get; set; } = false; + +#pragma warning disable CA1024 // Use properties where appropriate + [KernelFunction] + [Description("Gets the state of the light.")] + public string GetState() => IsOn ? "on" : "off"; +#pragma warning restore CA1024 // Use properties where appropriate + + [KernelFunction] + [Description("Changes the state of the light.'")] + public string ChangeState(bool newState) + { + this.IsOn = newState; + var state = GetState(); + + // Print the state to the console + Console.WriteLine($"[Light is now {state}]"); + + return state; + } +} +// diff --git a/dotnet/samples/DocumentationExamples/Plugins/MathPlugin.cs b/dotnet/samples/DocumentationExamples/Plugins/MathPlugin.cs new file mode 100644 index 000000000000..101f03505d2a --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/MathPlugin.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using Microsoft.SemanticKernel; + +namespace Plugins; + +public sealed class MathPlugin +{ + [KernelFunction, Description("Take the square root of a number")] + public static double Sqrt( + [Description("The number to take a square root of")] double number1 + ) + { + return Math.Sqrt(number1); + } + + [KernelFunction, Description("Add two numbers")] + public static double Add( + [Description("The first number to add")] double number1, + [Description("The second number to add")] double number2 + ) + { + return number1 + number2; + } + + [KernelFunction, Description("Subtract two numbers")] + public static double Subtract( + [Description("The first number to subtract from")] double number1, + [Description("The second number to subtract away")] double number2 + ) + { + return number1 - number2; + } + + [KernelFunction, Description("Multiply two numbers. When increasing by a percentage, don't forget to add 1 to the percentage.")] + public static double Multiply( + [Description("The first number to multiply")] double number1, + [Description("The second number to multiply")] double number2 + ) + { + return number1 * number2; + } + + [KernelFunction, Description("Divide two numbers")] + public static double Divide( + [Description("The first number to divide from")] double number1, + [Description("The second number to divide by")] double number2 + ) + { + return number1 / number2; + } + + [KernelFunction, Description("Raise a number to a power")] + public static double Power( + [Description("The number to raise")] double number1, + [Description("The power to raise the number to")] double number2 + ) + { + return Math.Pow(number1, number2); + } + + [KernelFunction, Description("Take the log of a number")] + public static double Log( + [Description("The number to take the log of")] double number1, + [Description("The base of the log")] double number2 + ) + { + return Math.Log(number1, number2); + } + + [KernelFunction, Description("Round a number to the target number of decimal places")] + public static double Round( + [Description("The number to round")] double number1, + [Description("The number of decimal places to round to")] double number2 + ) + { + return Math.Round(number1, (int)number2); + } + + [KernelFunction, Description("Take the absolute value of a number")] + public static double Abs( + [Description("The number to take the absolute value of")] double number1 + ) + { + return Math.Abs(number1); + } + + [KernelFunction, Description("Take the floor of a number")] + public static double Floor( + [Description("The number to take the floor of")] double number1 + ) + { + return Math.Floor(number1); + } + + [KernelFunction, Description("Take the ceiling of a number")] + public static double Ceiling( + [Description("The number to take the ceiling of")] double number1 + ) + { + return Math.Ceiling(number1); + } + + [KernelFunction, Description("Take the sine of a number")] + public static double Sin( + [Description("The number to take the sine of")] double number1 + ) + { + return Math.Sin(number1); + } + + [KernelFunction, Description("Take the cosine of a number")] + public static double Cos( + [Description("The number to take the cosine of")] double number1 + ) + { + return Math.Cos(number1); + } + + [KernelFunction, Description("Take the tangent of a number")] + public static double Tan( + [Description("The number to take the tangent of")] double number1 + ) + { + return Math.Tan(number1); + } + + [KernelFunction, Description("Take the arcsine of a number")] + public static double Asin( + [Description("The number to take the arcsine of")] double number1 + ) + { + return Math.Asin(number1); + } + + [KernelFunction, Description("Take the arccosine of a number")] + public static double Acos( + [Description("The number to take the arccosine of")] double number1 + ) + { + return Math.Acos(number1); + } + + [KernelFunction, Description("Take the arctangent of a number")] + public static double Atan( + [Description("The number to take the arctangent of")] double number1 + ) + { + return Math.Atan(number1); + } +} diff --git a/dotnet/samples/DocumentationExamples/Plugins/MathSolver.cs b/dotnet/samples/DocumentationExamples/Plugins/MathSolver.cs new file mode 100644 index 000000000000..23d0d3b9a0ea --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/MathSolver.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Planning.Handlebars; + +namespace Plugins; + +public class MathSolver +{ + private readonly ILogger _logger; + + public MathSolver(ILoggerFactory loggerFactory) + { + this._logger = loggerFactory.CreateLogger(); + } + + [KernelFunction] + [Description("Solves a math problem.")] + [return: Description("The solution to the math problem.")] + public async Task SolveAsync( + Kernel kernel, + [Description("The math problem to solve; describe it in 2-3 sentences to ensure full context is provided")] string problem + ) + { + var kernelWithMath = kernel.Clone(); + + // Remove the math solver plugin so that we don't get into an infinite loop + kernelWithMath.Plugins.Remove(kernelWithMath.Plugins["MathSolver"]); + + // Add the math plugin so the LLM can solve the problem + kernelWithMath.Plugins.AddFromType(); + + var planner = new HandlebarsPlanner(new HandlebarsPlannerOptions() { AllowLoops = true }); + + // Create a plan + var plan = await planner.CreatePlanAsync(kernelWithMath, problem); + this._logger.LogInformation("Plan: {Plan}", plan); + + // Execute the plan + var result = (await plan.InvokeAsync(kernelWithMath)).Trim(); + this._logger.LogInformation("Results: {Result}", result); + + return result; + } +} diff --git a/dotnet/samples/DocumentationExamples/Plugins/OrchestratorPlugin/GetIntent/config.json b/dotnet/samples/DocumentationExamples/Plugins/OrchestratorPlugin/GetIntent/config.json new file mode 100644 index 000000000000..501fadf5b95b --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/OrchestratorPlugin/GetIntent/config.json @@ -0,0 +1,31 @@ +{ + "schema": 1, + "type": "completion", + "description": "Gets the intent of the user.", + "completion": { + "max_tokens": 500, + "temperature": 0.0, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0 + }, + "input": { + "parameters": [ + { + "name": "input", + "description": "The user's request.", + "defaultValue": "" + }, + { + "name": "history", + "description": "The history of the conversation.", + "defaultValue": "" + }, + { + "name": "options", + "description": "The options to choose from.", + "defaultValue": "" + } + ] + } +} \ No newline at end of file diff --git a/dotnet/samples/DocumentationExamples/Plugins/OrchestratorPlugin/GetIntent/skprompt.txt b/dotnet/samples/DocumentationExamples/Plugins/OrchestratorPlugin/GetIntent/skprompt.txt new file mode 100644 index 000000000000..1fb1cab814be --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/OrchestratorPlugin/GetIntent/skprompt.txt @@ -0,0 +1,10 @@ +[History] +{{$history}} + +User: {{$input}} + +--------------------------------------------- + +Provide the intent of the user. The intent should be one of the following: {{$options}} + +INTENT: \ No newline at end of file diff --git a/dotnet/samples/DocumentationExamples/Plugins/Prompts/chat/config.json b/dotnet/samples/DocumentationExamples/Plugins/Prompts/chat/config.json new file mode 100644 index 000000000000..28c9e2374c5b --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/Prompts/chat/config.json @@ -0,0 +1,33 @@ +{ + "schema": 1, + "type": "completion", + "description": "Creates a chat response to the user", + "execution_settings": { + "default": { + "max_tokens": 1000, + "temperature": 0 + }, + "gpt-3.5-turbo": { + "model_id": "gpt-3.5-turbo-0613", + "max_tokens": 4000, + "temperature": 0.1 + }, + "gpt-4": { + "model_id": "gpt-4-1106-preview", + "max_tokens": 8000, + "temperature": 0.3 + } + }, + "input_variables": [ + { + "name": "request", + "description": "The user's request.", + "required": true + }, + { + "name": "history", + "description": "The history of the conversation.", + "required": true + } + ] +} \ No newline at end of file diff --git a/dotnet/samples/DocumentationExamples/Plugins/Prompts/chat/skprompt.txt b/dotnet/samples/DocumentationExamples/Plugins/Prompts/chat/skprompt.txt new file mode 100644 index 000000000000..c4f39134698e --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/Prompts/chat/skprompt.txt @@ -0,0 +1,3 @@ +{{ConversationSummaryPlugin.SummarizeConversation $history}} +User: {{$request}} +Assistant: \ No newline at end of file diff --git a/dotnet/samples/DocumentationExamples/Plugins/WriterPlugin/ShortPoem/config.json b/dotnet/samples/DocumentationExamples/Plugins/WriterPlugin/ShortPoem/config.json new file mode 100644 index 000000000000..c03553d76657 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/WriterPlugin/ShortPoem/config.json @@ -0,0 +1,21 @@ +{ + "schema": 1, + "type": "completion", + "description": "Turn a scenario into a short and entertaining poem.", + "completion": { + "max_tokens": 200, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0 + }, + "input": { + "parameters": [ + { + "name": "input", + "description": "The scenario to turn into a poem.", + "defaultValue": "" + } + ] + } +} diff --git a/dotnet/samples/DocumentationExamples/Plugins/WriterPlugin/ShortPoem/skprompt.txt b/dotnet/samples/DocumentationExamples/Plugins/WriterPlugin/ShortPoem/skprompt.txt new file mode 100644 index 000000000000..d85146e03127 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Plugins/WriterPlugin/ShortPoem/skprompt.txt @@ -0,0 +1,2 @@ +Generate a short funny poem or limerick to explain the given event. Be creative and be funny. Let your imagination run wild. +Event: {{$input}} diff --git a/dotnet/samples/DocumentationExamples/Prompts.cs b/dotnet/samples/DocumentationExamples/Prompts.cs new file mode 100644 index 000000000000..f84e29bb010d --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Prompts.cs @@ -0,0 +1,232 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to use prompts as described at +/// https://learn.microsoft.com/semantic-kernel/prompts/your-first-prompt +/// +public class Prompts : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Prompts ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + // + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey) + .Build(); + // + + // 0.0 Initial prompt + ////////////////////////////////////////////////////////////////////////////////// + string request = "I want to send an email to the marketing team celebrating their recent milestone."; + string prompt = $"What is the intent of this request? {request}"; + + /* Uncomment this code to make this example interactive + // + Write("Your request: "); + string request = ReadLine()!; + string prompt = $"What is the intent of this request? {request}"; + // + */ + + WriteLine("0.0 Initial prompt"); + // + WriteLine(await kernel.InvokePromptAsync(prompt)); + // + + // 1.0 Make the prompt more specific + ////////////////////////////////////////////////////////////////////////////////// + // + prompt = @$"What is the intent of this request? {request} + You can choose between SendEmail, SendMessage, CompleteTask, CreateDocument."; + // + + WriteLine("1.0 Make the prompt more specific"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + + // 2.0 Add structure to the output with formatting + ////////////////////////////////////////////////////////////////////////////////// + // + prompt = @$"Instructions: What is the intent of this request? + Choices: SendEmail, SendMessage, CompleteTask, CreateDocument. + User Input: {request} + Intent: "; + // + + WriteLine("2.0 Add structure to the output with formatting"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + + // 2.1 Add structure to the output with formatting (using Markdown and JSON) + ////////////////////////////////////////////////////////////////////////////////// + // + prompt = @$"## Instructions +Provide the intent of the request using the following format: + +```json +{{ + ""intent"": {{intent}} +}} +``` + +## Choices +You can choose between the following intents: + +```json +[""SendEmail"", ""SendMessage"", ""CompleteTask"", ""CreateDocument""] +``` + +## User Input +The user input is: + +```json +{{ + ""request"": ""{request}"" +}} +``` + +## Intent"; + // + + WriteLine("2.1 Add structure to the output with formatting (using Markdown and JSON)"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + + // 3.0 Provide examples with few-shot prompting + ////////////////////////////////////////////////////////////////////////////////// + // + prompt = @$"Instructions: What is the intent of this request? +Choices: SendEmail, SendMessage, CompleteTask, CreateDocument. + +User Input: Can you send a very quick approval to the marketing team? +Intent: SendMessage + +User Input: Can you send the full update to the marketing team? +Intent: SendEmail + +User Input: {request} +Intent: "; + // + + WriteLine("3.0 Provide examples with few-shot prompting"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + + // 4.0 Tell the AI what to do to avoid doing something wrong + ////////////////////////////////////////////////////////////////////////////////// + // + prompt = @$"Instructions: What is the intent of this request? +If you don't know the intent, don't guess; instead respond with ""Unknown"". +Choices: SendEmail, SendMessage, CompleteTask, CreateDocument, Unknown. + +User Input: Can you send a very quick approval to the marketing team? +Intent: SendMessage + +User Input: Can you send the full update to the marketing team? +Intent: SendEmail + +User Input: {request} +Intent: "; + // + + WriteLine("4.0 Tell the AI what to do to avoid doing something wrong"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + + // 5.0 Provide context to the AI + ////////////////////////////////////////////////////////////////////////////////// + // + string history = @"User input: I hate sending emails, no one ever reads them. +AI response: I'm sorry to hear that. Messages may be a better way to communicate."; + + prompt = @$"Instructions: What is the intent of this request? +If you don't know the intent, don't guess; instead respond with ""Unknown"". +Choices: SendEmail, SendMessage, CompleteTask, CreateDocument, Unknown. + +User Input: Can you send a very quick approval to the marketing team? +Intent: SendMessage + +User Input: Can you send the full update to the marketing team? +Intent: SendEmail + +{history} +User Input: {request} +Intent: "; + // + + WriteLine("5.0 Provide context to the AI"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + + // 6.0 Using message roles in chat completion prompts + ////////////////////////////////////////////////////////////////////////////////// + // + history = @"I hate sending emails, no one ever reads them. +I'm sorry to hear that. Messages may be a better way to communicate."; + + prompt = @$"Instructions: What is the intent of this request? +If you don't know the intent, don't guess; instead respond with ""Unknown"". +Choices: SendEmail, SendMessage, CompleteTask, CreateDocument, Unknown. + +Can you send a very quick approval to the marketing team? +Intent: +SendMessage + +Can you send the full update to the marketing team? +Intent: +SendEmail + +{history} +{request} +Intent:"; + // + + WriteLine("6.0 Using message roles in chat completion prompts"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + + // 7.0 Give your AI words of encouragement + ////////////////////////////////////////////////////////////////////////////////// + // + history = @"I hate sending emails, no one ever reads them. +I'm sorry to hear that. Messages may be a better way to communicate."; + + prompt = @$"Instructions: What is the intent of this request? +If you don't know the intent, don't guess; instead respond with ""Unknown"". +Choices: SendEmail, SendMessage, CompleteTask, CreateDocument, Unknown. +Bonus: You'll get $20 if you get this right. + +Can you send a very quick approval to the marketing team? +Intent: +SendMessage + +Can you send the full update to the marketing team? +Intent: +SendEmail + +{history} +{request} +Intent:"; + // + + WriteLine("7.0 Give your AI words of encouragement"); + WriteLine(await kernel.InvokePromptAsync(prompt)); + } + + public Prompts(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/DocumentationExamples/README.md b/dotnet/samples/DocumentationExamples/README.md new file mode 100644 index 000000000000..7ad6666e3e59 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/README.md @@ -0,0 +1,56 @@ +#Semantic Kernel documentation examples + +This project contains a collection of examples used in documentation on [learn.microsoft.com](https://learn.microsoft.com/). + +## Running Examples with Filters + +You can run specific examples by using test filters (dotnet test --filter). +Type "dotnet test --help" at the command line for more details. + +## Configuring Secrets + +Most of the examples will require secrets and credentials to access OpenAI, Azure OpenAI, +and other resources. We suggest using .NET +[Secret Manager](https://learn.microsoft.com/aspnet/core/security/app-secrets) +to avoid the risk of leaking secrets into the repository, branches and pull requests. +You can also use environment variables if you prefer. + +This project and KernelSyntaxExamples use the same pool of secrets. + +To set your secrets with Secret Manager: + +``` +cd dotnet/samples/DocumentationExamples + +dotnet user-secrets init + +dotnet user-secrets set "OpenAI:ModelId" "..." +dotnet user-secrets set "OpenAI:ChatModelId" "..." +dotnet user-secrets set "OpenAI:EmbeddingModelId" "..." +dotnet user-secrets set "OpenAI:ApiKey" "..." + +dotnet user-secrets set "AzureOpenAI:ServiceId" "..." +dotnet user-secrets set "AzureOpenAI:DeploymentName" "..." +dotnet user-secrets set "AzureOpenAI:ModelId" "..." +dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..." +dotnet user-secrets set "AzureOpenAI:ChatModelId" "..." +dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/" +dotnet user-secrets set "AzureOpenAI:ApiKey" "..." +``` + +To set your secrets with environment variables, use these names: + +``` +# OpenAI +OpenAI__ModelId +OpenAI__ChatModelId +OpenAI__EmbeddingModelId +OpenAI__ApiKey + +# Azure OpenAI +AzureOpenAI__ServiceId +AzureOpenAI__DeploymentName +AzureOpenAI__ChatDeploymentName +AzureOpenAI__Endpoint +AzureOpenAI__ApiKey +``` diff --git a/dotnet/samples/DocumentationExamples/Resources/getIntent.prompt.yaml b/dotnet/samples/DocumentationExamples/Resources/getIntent.prompt.yaml new file mode 100644 index 000000000000..e01cb765c2d2 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Resources/getIntent.prompt.yaml @@ -0,0 +1,40 @@ +name: getIntent +description: Gets the intent of the user. +template: | + Instructions: What is the intent of this request? + Do not explain the reasoning, just reply back with the intent. If you are unsure, reply with {{choices[0]}}. + Choices: {{choices}}. + + {{#each fewShotExamples}} + {{#each this}} + {{content}} + {{/each}} + {{/each}} + + {{ConversationSummaryPlugin.SummarizeConversation history}} + + {{request}} + Intent: +template_format: handlebars +input_variables: + - name: choices + description: The choices for the AI to choose from + default: ContinueConversation, EndConversation + - name: fewShotExamples + description: Few shot examples for the AI to learn from + is_required: true + - name: request + description: The user's request + is_required: true +execution_settings: + default: + max_tokens: 10 + temperature: 0 + gpt-3.5-turbo: + model_id: gpt-3.5-turbo-0613 + max_tokens: 10 + temperature: 0.2 + gpt-4: + model_id: gpt-4-1106-preview + max_tokens: 10 + temperature: 0.2 \ No newline at end of file diff --git a/dotnet/samples/DocumentationExamples/SerializingPrompts.cs b/dotnet/samples/DocumentationExamples/SerializingPrompts.cs new file mode 100644 index 000000000000..8d309e0ebabe --- /dev/null +++ b/dotnet/samples/DocumentationExamples/SerializingPrompts.cs @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Plugins.Core; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to serialize prompts as described at +/// https://learn.microsoft.com/semantic-kernel/prompts/saving-prompts-as-files +/// +public class SerializingPrompts : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Serializing Prompts ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Plugins.AddFromType(); + Kernel kernel = builder.Build(); + + // Load prompts + var prompts = kernel.CreatePluginFromPromptDirectory("./../../../Plugins/Prompts"); + + // Load prompt from YAML + using StreamReader reader = new(Assembly.GetExecutingAssembly().GetManifestResourceStream("Resources." + "getIntent.prompt.yaml")!); + KernelFunction getIntent = kernel.CreateFunctionFromPromptYaml( + await reader.ReadToEndAsync(), + promptTemplateFactory: new HandlebarsPromptTemplateFactory() + ); + + // Create choices + List choices = new() { "ContinueConversation", "EndConversation" }; + + // Create few-shot examples + List fewShotExamples = new() + { + new ChatHistory() + { + new ChatMessageContent(AuthorRole.User, "Can you send a very quick approval to the marketing team?"), + new ChatMessageContent(AuthorRole.System, "Intent:"), + new ChatMessageContent(AuthorRole.Assistant, "ContinueConversation") + }, + new ChatHistory() + { + new ChatMessageContent(AuthorRole.User, "Can you send the full update to the marketing team?"), + new ChatMessageContent(AuthorRole.System, "Intent:"), + new ChatMessageContent(AuthorRole.Assistant, "EndConversation") + } + }; + + // Create chat history + ChatHistory history = new(); + + // Start the chat loop + Write("User > "); + string? userInput; + while ((userInput = ReadLine()) != null) + { + // Invoke handlebars prompt + var intent = await kernel.InvokeAsync( + getIntent, + new() + { + { "request", userInput }, + { "choices", choices }, + { "history", history }, + { "fewShotExamples", fewShotExamples } + } + ); + + // End the chat if the intent is "Stop" + if (intent.ToString() == "EndConversation") + { + break; + } + + // Get chat response + var chatResult = kernel.InvokeStreamingAsync( + prompts["chat"], + new() + { + { "request", userInput }, + { "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) } + } + ); + + // Stream the response + string message = ""; + await foreach (var chunk in chatResult) + { + if (chunk.Role.HasValue) + { + Write(chunk.Role + " > "); + } + message += chunk; + Write(chunk); + } + WriteLine(); + + // Append to history + history.AddUserMessage(userInput); + history.AddAssistantMessage(message); + + // Get user input again + Write("User > "); + } + } + + public SerializingPrompts(ITestOutputHelper output) : base(output) + { + SimulatedInputText = [ + "Can you send an approval to the marketing team?", + "That is all, thanks."]; + } +} diff --git a/dotnet/samples/DocumentationExamples/Templates.cs b/dotnet/samples/DocumentationExamples/Templates.cs new file mode 100644 index 000000000000..e75f6de98213 --- /dev/null +++ b/dotnet/samples/DocumentationExamples/Templates.cs @@ -0,0 +1,155 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to templatize prompts as described at +/// https://learn.microsoft.com/semantic-kernel/prompts/templatizing-prompts +/// +public class Templates : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Templates ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey) + .Build(); + + // Create a Semantic Kernel template for chat + var chat = kernel.CreateFunctionFromPrompt( + @"{{$history}} + User: {{$request}} + Assistant: "); + + // Create choices + List choices = new() { "ContinueConversation", "EndConversation" }; + + // Create few-shot examples + List fewShotExamples = + [ + new ChatHistory() + { + new ChatMessageContent(AuthorRole.User, "Can you send a very quick approval to the marketing team?"), + new ChatMessageContent(AuthorRole.System, "Intent:"), + new ChatMessageContent(AuthorRole.Assistant, "ContinueConversation") + }, + new ChatHistory() + { + new ChatMessageContent(AuthorRole.User, "Thanks, I'm done for now"), + new ChatMessageContent(AuthorRole.System, "Intent:"), + new ChatMessageContent(AuthorRole.Assistant, "EndConversation") + } + ]; + + // Create handlebars template for intent + var getIntent = kernel.CreateFunctionFromPrompt( + new() + { + Template = @" +Instructions: What is the intent of this request? +Do not explain the reasoning, just reply back with the intent. If you are unsure, reply with {{choices[0]}}. +Choices: {{choices}}. + +{{#each fewShotExamples}} + {{#each this}} + {{content}} + {{/each}} +{{/each}} + +{{#each chatHistory}} + {{content}} +{{/each}} + +{{request}} +Intent:", + TemplateFormat = "handlebars" + }, + new HandlebarsPromptTemplateFactory() + ); + + ChatHistory history = new(); + + // Start the chat loop + while (true) + { + // Get user input + Write("User > "); + var request = ReadLine(); + + // Invoke prompt + var intent = await kernel.InvokeAsync( + getIntent, + new() + { + { "request", request }, + { "choices", choices }, + { "history", history }, + { "fewShotExamples", fewShotExamples } + } + ); + + // End the chat if the intent is "Stop" + if (intent.ToString() == "EndConversation") + { + break; + } + + // Get chat response + var chatResult = kernel.InvokeStreamingAsync( + chat, + new() + { + { "request", request }, + { "history", string.Join("\n", history.Select(x => x.Role + ": " + x.Content)) } + } + ); + + // Stream the response + string message = ""; + await foreach (var chunk in chatResult) + { + if (chunk.Role.HasValue) + { + Write(chunk.Role + " > "); + } + + message += chunk; + Write(chunk); + } + WriteLine(); + + // Append to history + history.AddUserMessage(request!); + history.AddAssistantMessage(message); + } + } + + public Templates(ITestOutputHelper output) : base(output) + { + SimulatedInputText = [ + "Can you send an approval to the marketing team?", + "That is all, thanks."]; + } +} diff --git a/dotnet/samples/DocumentationExamples/TestConfiguration.cs b/dotnet/samples/DocumentationExamples/TestConfiguration.cs new file mode 100644 index 000000000000..01108b8827dc --- /dev/null +++ b/dotnet/samples/DocumentationExamples/TestConfiguration.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Runtime.CompilerServices; +using Microsoft.Extensions.Configuration; + +public sealed class TestConfiguration +{ + private readonly IConfigurationRoot _configRoot; + private static TestConfiguration? s_instance; + + private TestConfiguration(IConfigurationRoot configRoot) + { + this._configRoot = configRoot; + } + + public static void Initialize(IConfigurationRoot configRoot) + { + s_instance = new TestConfiguration(configRoot); + } + + public static OpenAIConfig OpenAI => LoadSection(); + public static AzureOpenAIConfig AzureOpenAI => LoadSection(); + public static AzureOpenAIEmbeddingsConfig AzureOpenAIEmbeddings => LoadSection(); + + private static T LoadSection([CallerMemberName] string? caller = null) + { + if (s_instance == null) + { + throw new InvalidOperationException( + "TestConfiguration must be initialized with a call to Initialize(IConfigurationRoot) before accessing configuration values."); + } + + if (string.IsNullOrEmpty(caller)) + { + throw new ArgumentNullException(nameof(caller)); + } + return s_instance._configRoot.GetSection(caller).Get() ?? + throw new ArgumentException($"Missing {caller} configuration section"); + } + + public class OpenAIConfig + { + public string? ModelId { get; set; } + public string? ChatModelId { get; set; } + public string? EmbeddingModelId { get; set; } + public string? ApiKey { get; set; } + } + + public class AzureOpenAIConfig + { + public string? ServiceId { get; set; } + public string? DeploymentName { get; set; } + public string? ModelId { get; set; } + public string? ChatDeploymentName { get; set; } + public string? ChatModelId { get; set; } + public string? ImageDeploymentName { get; set; } + public string? ImageModelId { get; set; } + public string? ImageEndpoint { get; set; } + public string? Endpoint { get; set; } + public string? ApiKey { get; set; } + public string? ImageApiKey { get; set; } + } + + public class AzureOpenAIEmbeddingsConfig + { + public string? DeploymentName { get; set; } + public string? Endpoint { get; set; } + public string? ApiKey { get; set; } + } +} diff --git a/dotnet/samples/DocumentationExamples/UsingTheKernel.cs b/dotnet/samples/DocumentationExamples/UsingTheKernel.cs new file mode 100644 index 000000000000..8600efdddd5f --- /dev/null +++ b/dotnet/samples/DocumentationExamples/UsingTheKernel.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +// +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.Core; +// +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to interact with the kernel as described at +/// https://learn.microsoft.com/semantic-kernel/agents/kernel +/// +public class UsingTheKernel : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Kernel ========"); + + string? endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string? modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string? apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || modelId is null || apiKey is null) + { + WriteLine("Azure OpenAI credentials not found. Skipping example."); + + return; + } + + // Create a kernel with a logger and Azure OpenAI chat completion service + // + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(modelId, endpoint, apiKey); + builder.Services.AddLogging(c => c.AddDebug().SetMinimumLevel(LogLevel.Trace)); + builder.Plugins.AddFromType(); + builder.Plugins.AddFromPromptDirectory("./../../../Plugins/WriterPlugin"); + Kernel kernel = builder.Build(); + // + + // Get the current time + // + var currentTime = await kernel.InvokeAsync("TimePlugin", "UtcNow"); + WriteLine(currentTime); + // + + // Write a poem with the WriterPlugin.ShortPoem function using the current time as input + // + var poemResult = await kernel.InvokeAsync("WriterPlugin", "ShortPoem", new() + { + { "input", currentTime } + }); + WriteLine(poemResult); + // + } + + public UsingTheKernel(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/BaseTest.cs b/dotnet/samples/KernelSyntaxExamples/BaseTest.cs new file mode 100644 index 000000000000..b2559c03ae6f --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/BaseTest.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; +using RepoUtils; +using Xunit.Abstractions; + +namespace Examples; + +public abstract class BaseTest +{ + protected ITestOutputHelper Output { get; } + + protected ILoggerFactory LoggerFactory { get; } + + protected BaseTest(ITestOutputHelper output) + { + this.Output = output; + this.LoggerFactory = new XunitLogger(output); + + LoadUserSecrets(); + } + + private static void LoadUserSecrets() + { + IConfigurationRoot configRoot = new ConfigurationBuilder() + .AddJsonFile("appsettings.Development.json", true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + TestConfiguration.Initialize(configRoot); + } + + /// + /// This method can be substituted by Console.WriteLine when used in Console apps. + /// + /// Target object to write + protected void WriteLine(object? target = null) + { + this.Output.WriteLine(target ?? string.Empty); + } + + /// + /// Current interface ITestOutputHelper does not have a Write method. This extension method adds it to make it analogous to Console.Write when used in Console apps. + /// + /// Target object to write + protected void Write(object? target = null) + { + this.Output.WriteLine(target ?? string.Empty); + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example01_MethodFunctions.cs b/dotnet/samples/KernelSyntaxExamples/Example01_MethodFunctions.cs new file mode 100644 index 000000000000..d3f113b5f89e --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example01_MethodFunctions.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Plugins.Core; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example01_MethodFunctions : BaseTest +{ + [Fact] + public Task RunAsync() + { + this.WriteLine("======== Functions ========"); + + // Load native plugin + var text = new TextPlugin(); + + // Use function without kernel + var result = text.Uppercase("ciao!"); + + this.WriteLine(result); + + return Task.CompletedTask; + } + + public Example01_MethodFunctions(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example01_NativeFunctions.cs b/dotnet/samples/KernelSyntaxExamples/Example01_NativeFunctions.cs deleted file mode 100644 index 50c2faed5548..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example01_NativeFunctions.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Plugins.Core; - -// ReSharper disable once InconsistentNaming -public static class Example01_NativeFunctions -{ - public static Task RunAsync() - { - Console.WriteLine("======== Functions ========"); - - // Load native plugin - var text = new TextPlugin(); - - // Use function without kernel - var result = text.Uppercase("ciao!"); - - Console.WriteLine(result); - - return Task.CompletedTask; - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example02_Pipeline.cs b/dotnet/samples/KernelSyntaxExamples/Example02_Pipeline.cs deleted file mode 100644 index 3e944b021bf1..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example02_Pipeline.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Plugins.Core; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example02_Pipeline -{ - private static readonly ILoggerFactory s_loggerFactory = ConsoleLogger.LoggerFactory; - - public static async Task RunAsync() - { - Console.WriteLine("======== Pipeline ========"); - - IKernel kernel = new KernelBuilder().WithLoggerFactory(s_loggerFactory).Build(); - - // Load native plugin - var textFunctions = kernel.ImportFunctions(new TextPlugin()); - - KernelResult result = await kernel.RunAsync(" i n f i n i t e s p a c e ", - textFunctions["TrimStart"], - textFunctions["TrimEnd"], - textFunctions["Uppercase"]); - - Console.WriteLine(result.GetValue()); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example03_Arguments.cs b/dotnet/samples/KernelSyntaxExamples/Example03_Arguments.cs new file mode 100644 index 000000000000..d157946bcae1 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example03_Arguments.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Globalization; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Plugins; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; +// This example shows how to use kernel arguments when invoking functions. +public class Example03_Arguments : BaseTest +{ + [Fact] + public async Task RunAsync() + { + this.WriteLine("======== Arguments ========"); + + Kernel kernel = new(); + var textPlugin = kernel.ImportPluginFromType(); + + var arguments = new KernelArguments() + { + ["input"] = "Today is: ", + ["day"] = DateTimeOffset.Now.ToString("dddd", CultureInfo.CurrentCulture) + }; + + // ** Different ways of executing functions with arguments ** + + // Specify and get the value type as generic parameter + string? resultValue = await kernel.InvokeAsync(textPlugin["AppendDay"], arguments); + this.WriteLine($"string -> {resultValue}"); + + // If you need to access the result metadata, you can use the non-generic version to get the FunctionResult + FunctionResult functionResult = await kernel.InvokeAsync(textPlugin["AppendDay"], arguments); + var metadata = functionResult.Metadata; + + // Specify the type from the FunctionResult + this.WriteLine($"FunctionResult.GetValue() -> {functionResult.GetValue()}"); + + // FunctionResult.ToString() automatically converts the result to string + this.WriteLine($"FunctionResult.ToString() -> {functionResult}"); + } + + public Example03_Arguments(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example03_Variables.cs b/dotnet/samples/KernelSyntaxExamples/Example03_Variables.cs deleted file mode 100644 index f44366ae70ba..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example03_Variables.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Globalization; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; -using Plugins; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example03_Variables -{ - private static readonly ILoggerFactory s_loggerFactory = ConsoleLogger.LoggerFactory; - - public static async Task RunAsync() - { - Console.WriteLine("======== Variables ========"); - - IKernel kernel = new KernelBuilder().WithLoggerFactory(s_loggerFactory).Build(); - var textFunctions = kernel.ImportFunctions(new StaticTextPlugin(), "text"); - - var variables = new ContextVariables("Today is: "); - variables.Set("day", DateTimeOffset.Now.ToString("dddd", CultureInfo.CurrentCulture)); - - KernelResult result = await kernel.RunAsync(variables, - textFunctions["AppendDay"], - textFunctions["Uppercase"]); - - Console.WriteLine(result.GetValue()); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example04_CombineLLMPromptsAndNativeCode.cs b/dotnet/samples/KernelSyntaxExamples/Example04_CombineLLMPromptsAndNativeCode.cs deleted file mode 100644 index 15acd518b4c5..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example04_CombineLLMPromptsAndNativeCode.cs +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Plugins.Web; -using Microsoft.SemanticKernel.Plugins.Web.Bing; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example04_CombineLLMPromptsAndNativeCode -{ - public static async Task RunAsync() - { - Console.WriteLine("======== LLMPrompts ========"); - - string openAIApiKey = TestConfiguration.OpenAI.ApiKey; - - if (openAIApiKey == null) - { - Console.WriteLine("OpenAI credentials not found. Skipping example."); - return; - } - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, openAIApiKey) - .Build(); - - string bingApiKey = TestConfiguration.Bing.ApiKey; - if (bingApiKey == null) - { - Console.WriteLine("Bing credentials not found. Skipping example."); - return; - } - - var bingConnector = new BingConnector(bingApiKey); - var bing = new WebSearchEnginePlugin(bingConnector); - var searchFunctions = kernel.ImportFunctions(bing, "bing"); - - // Load semantic plugins defined with prompt templates - string folder = RepoFiles.SamplePluginsPath(); - - var summarizeFunctions = kernel.ImportSemanticFunctionsFromDirectory(folder, "SummarizePlugin"); - - // Run - var ask = "What's the tallest building in South America"; - - var result1 = await kernel.RunAsync( - ask, - searchFunctions["Search"] - ); - - var result2 = await kernel.RunAsync( - ask, - searchFunctions["Search"], - summarizeFunctions["Summarize"] - ); - - var result3 = await kernel.RunAsync( - ask, - searchFunctions["Search"], - summarizeFunctions["Notegen"] - ); - - Console.WriteLine(ask + "\n"); - Console.WriteLine("Bing Answer: " + result1.GetValue() + "\n"); - Console.WriteLine("Summary: " + result2.GetValue() + "\n"); - Console.WriteLine("Notes: " + result3.GetValue() + "\n"); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs b/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs index 0237389c01d2..92ad2f7e895d 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example05_InlineFunctionDefinition.cs @@ -3,22 +3,25 @@ using System; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using RepoUtils; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example05_InlineFunctionDefinition +namespace Examples; + +public class Example05_InlineFunctionDefinition : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - Console.WriteLine("======== Inline Function Definition ========"); + this.WriteLine("======== Inline Function Definition ========"); string openAIModelId = TestConfiguration.OpenAI.ChatModelId; string openAIApiKey = TestConfiguration.OpenAI.ApiKey; - if (openAIModelId == null || openAIApiKey == null) + if (openAIModelId is null || openAIApiKey is null) { - Console.WriteLine("OpenAI credentials not found. Skipping example."); + this.WriteLine("OpenAI credentials not found. Skipping example."); return; } @@ -28,9 +31,8 @@ public static async Task RunAsync() * function inline if you like. */ - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( modelId: openAIModelId, apiKey: openAIApiKey) .Build(); @@ -49,17 +51,21 @@ Be creative and be funny. Let your imagination run wild. Event: {{$input}} "; - var excuseFunction = kernel.CreateSemanticFunction(promptTemplate, new OpenAIRequestSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); + var excuseFunction = kernel.CreateFunctionFromPrompt(promptTemplate, new OpenAIPromptExecutionSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); + + var result = await kernel.InvokeAsync(excuseFunction, new() { ["input"] = "I missed the F1 final race" }); + this.WriteLine(result.GetValue()); - var result = await kernel.RunAsync("I missed the F1 final race", excuseFunction); - Console.WriteLine(result.GetValue()); + result = await kernel.InvokeAsync(excuseFunction, new() { ["input"] = "sorry I forgot your birthday" }); + this.WriteLine(result.GetValue()); - result = await kernel.RunAsync("sorry I forgot your birthday", excuseFunction); - Console.WriteLine(result.GetValue()); + var fixedFunction = kernel.CreateFunctionFromPrompt($"Translate this date {DateTimeOffset.Now:f} to French format", new OpenAIPromptExecutionSettings() { MaxTokens = 100 }); - var fixedFunction = kernel.CreateSemanticFunction($"Translate this date {DateTimeOffset.Now:f} to French format", new OpenAIRequestSettings() { MaxTokens = 100 }); + result = await kernel.InvokeAsync(fixedFunction); + this.WriteLine(result.GetValue()); + } - result = await kernel.RunAsync(fixedFunction); - Console.WriteLine(result.GetValue()); + public Example05_InlineFunctionDefinition(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs b/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs index 91812152437e..72b5a8f5bb69 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example06_TemplateLanguage.cs @@ -1,45 +1,45 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using RepoUtils; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example06_TemplateLanguage +namespace Examples; + +public class Example06_TemplateLanguage : BaseTest { /// - /// Show how to invoke a Native Function written in C# - /// from a Semantic Function written in natural language + /// Show how to invoke a Method Function written in C# + /// from a Prompt Function written in natural language /// - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - Console.WriteLine("======== TemplateLanguage ========"); + this.WriteLine("======== TemplateLanguage ========"); string openAIModelId = TestConfiguration.OpenAI.ChatModelId; string openAIApiKey = TestConfiguration.OpenAI.ApiKey; if (openAIModelId == null || openAIApiKey == null) { - Console.WriteLine("OpenAI credentials not found. Skipping example."); + this.WriteLine("OpenAI credentials not found. Skipping example."); return; } - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( modelId: openAIModelId, apiKey: openAIApiKey) .Build(); // Load native plugin into the kernel function collection, sharing its functions with prompt templates // Functions loaded here are available as "time.*" - kernel.ImportFunctions(new TimePlugin(), "time"); + kernel.ImportPluginFromType("time"); - // Semantic Function invoking time.Date and time.Time native functions + // Prompt Function invoking time.Date and time.Time method functions const string FunctionDefinition = @" Today is: {{time.Date}} Current time is: {{time.Time}} @@ -50,18 +50,19 @@ Is it weekend time (weekend/not weekend)? "; // This allows to see the prompt before it's sent to OpenAI - Console.WriteLine("--- Rendered Prompt"); - var promptRenderer = new BasicPromptTemplateEngine(); - var renderedPrompt = await promptRenderer.RenderAsync(FunctionDefinition, kernel.CreateNewContext()); - Console.WriteLine(renderedPrompt); + this.WriteLine("--- Rendered Prompt"); + var promptTemplateFactory = new KernelPromptTemplateFactory(); + var promptTemplate = promptTemplateFactory.Create(new PromptTemplateConfig(FunctionDefinition)); + var renderedPrompt = await promptTemplate.RenderAsync(kernel); + this.WriteLine(renderedPrompt); - // Run the prompt / semantic function - var kindOfDay = kernel.CreateSemanticFunction(FunctionDefinition, new OpenAIRequestSettings() { MaxTokens = 100 }); + // Run the prompt / prompt function + var kindOfDay = kernel.CreateFunctionFromPrompt(FunctionDefinition, new OpenAIPromptExecutionSettings() { MaxTokens = 100 }); // Show the result - Console.WriteLine("--- Semantic Function result"); - var result = await kernel.RunAsync(kindOfDay); - Console.WriteLine(result.GetValue()); + this.WriteLine("--- Prompt Function result"); + var result = await kernel.InvokeAsync(kindOfDay); + this.WriteLine(result.GetValue()); /* OUTPUT: @@ -74,7 +75,7 @@ Is it weekend time (weekend/not weekend)? Is it morning, afternoon, evening, or night (morning/afternoon/evening/night)? Is it weekend time (weekend/not weekend)? - --- Semantic Function result + --- Prompt Function result { "date": "Friday, April 28, 2023", @@ -84,4 +85,8 @@ Is it weekend time (weekend/not weekend)? } */ } + + public Example06_TemplateLanguage(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGooglePlugins.cs b/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGooglePlugins.cs index 84c180a204c2..d2745f898b47 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGooglePlugins.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example07_BingAndGooglePlugins.cs @@ -3,36 +3,36 @@ using System; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Plugins.Web; using Microsoft.SemanticKernel.Plugins.Web.Bing; using Microsoft.SemanticKernel.Plugins.Web.Google; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using RepoUtils; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; /// /// The example shows how to use Bing and Google to search for current data /// you might want to import into your system, e.g. providing AI prompts with /// recent information, or for AI to generate recent information to display to users. /// -// ReSharper disable CommentTypo -// ReSharper disable once InconsistentNaming -public static class Example07_BingAndGooglePlugins +public class Example07_BingAndGooglePlugins : BaseTest { - public static async Task RunAsync() + [Fact(Skip = "Setup Credentials")] + public async Task RunAsync() { string openAIModelId = TestConfiguration.OpenAI.ChatModelId; string openAIApiKey = TestConfiguration.OpenAI.ApiKey; if (openAIModelId == null || openAIApiKey == null) { - Console.WriteLine("OpenAI credentials not found. Skipping example."); + this.WriteLine("OpenAI credentials not found. Skipping example."); return; } - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( modelId: openAIModelId, apiKey: openAIApiKey) .Build(); @@ -41,13 +41,13 @@ public static async Task RunAsync() string bingApiKey = TestConfiguration.Bing.ApiKey; if (bingApiKey == null) { - Console.WriteLine("Bing credentials not found. Skipping example."); + this.WriteLine("Bing credentials not found. Skipping example."); } else { var bingConnector = new BingConnector(bingApiKey); var bing = new WebSearchEnginePlugin(bingConnector); - kernel.ImportFunctions(bing, "bing"); + kernel.ImportPluginFromObject(bing, "bing"); await Example1Async(kernel, "bing"); await Example2Async(kernel); } @@ -58,7 +58,7 @@ public static async Task RunAsync() if (googleApiKey == null || googleSearchEngineId == null) { - Console.WriteLine("Google credentials not found. Skipping example."); + this.WriteLine("Google credentials not found. Skipping example."); } else { @@ -66,23 +66,24 @@ public static async Task RunAsync() apiKey: googleApiKey, searchEngineId: googleSearchEngineId); var google = new WebSearchEnginePlugin(googleConnector); - kernel.ImportFunctions(new WebSearchEnginePlugin(googleConnector), "google"); + kernel.ImportPluginFromObject(new WebSearchEnginePlugin(googleConnector), "google"); + // ReSharper disable once ArrangeThisQualifier await Example1Async(kernel, "google"); } } - private static async Task Example1Async(IKernel kernel, string searchPluginName) + private async Task Example1Async(Kernel kernel, string searchPluginName) { - Console.WriteLine("======== Bing and Google Search Plugins ========"); + this.WriteLine("======== Bing and Google Search Plugins ========"); // Run var question = "What's the largest building in the world?"; - var function = kernel.Functions.GetFunction(searchPluginName, "search"); - var result = await kernel.RunAsync(question, function); + var function = kernel.Plugins[searchPluginName]["search"]; + var result = await kernel.InvokeAsync(function, new() { ["query"] = question }); - Console.WriteLine(question); - Console.WriteLine($"----{searchPluginName}----"); - Console.WriteLine(result.GetValue()); + this.WriteLine(question); + this.WriteLine($"----{searchPluginName}----"); + this.WriteLine(result.GetValue()); /* OUTPUT: @@ -97,9 +98,9 @@ private static async Task Example1Async(IKernel kernel, string searchPluginName) */ } - private static async Task Example2Async(IKernel kernel) + private async Task Example2Async(Kernel kernel) { - Console.WriteLine("======== Use Search Plugin to answer user questions ========"); + this.WriteLine("======== Use Search Plugin to answer user questions ========"); const string SemanticFunction = @"Answer questions only when you know the facts or the information is provided. When you don't have sufficient information you reply with a list of commands to find the information needed. @@ -131,16 +132,17 @@ [EXAMPLE 3] [END OF EXAMPLES] [TASK] -Question: {{ $input }}. +Question: {{ $question }}. Answer: "; - var questions = "Who is the most followed person on TikTok right now? What's the exchange rate EUR:USD?"; - Console.WriteLine(questions); + var question = "Who is the most followed person on TikTok right now? What's the exchange rate EUR:USD?"; + this.WriteLine(question); - var oracle = kernel.CreateSemanticFunction(SemanticFunction, new OpenAIRequestSettings() { MaxTokens = 150, Temperature = 0, TopP = 1 }); + var oracle = kernel.CreateFunctionFromPrompt(SemanticFunction, new OpenAIPromptExecutionSettings() { MaxTokens = 150, Temperature = 0, TopP = 1 }); - var answer = await kernel.RunAsync(oracle, new(questions) + var answer = await kernel.InvokeAsync(oracle, new KernelArguments() { + ["question"] = question, ["externalInformation"] = string.Empty }); @@ -149,28 +151,30 @@ [END OF EXAMPLES] // If the answer contains commands, execute them using the prompt renderer. if (result.Contains("bing.search", StringComparison.OrdinalIgnoreCase)) { - var promptRenderer = new BasicPromptTemplateEngine(); + var promptTemplateFactory = new KernelPromptTemplateFactory(); + var promptTemplate = promptTemplateFactory.Create(new PromptTemplateConfig(result)); - Console.WriteLine("---- Fetching information from Bing..."); - var information = await promptRenderer.RenderAsync(result, kernel.CreateNewContext()); + this.WriteLine("---- Fetching information from Bing..."); + var information = await promptTemplate.RenderAsync(kernel); - Console.WriteLine("Information found:"); - Console.WriteLine(information); + this.WriteLine("Information found:"); + this.WriteLine(information); - // Run the semantic function again, now including information from Bing - answer = await kernel.RunAsync(oracle, new(questions) + // Run the prompt function again, now including information from Bing + answer = await kernel.InvokeAsync(oracle, new KernelArguments() { + ["question"] = question, // The rendered prompt contains the information retrieved from search engines ["externalInformation"] = information }); } else { - Console.WriteLine("AI had all the information, no need to query Bing."); + this.WriteLine("AI had all the information, no need to query Bing."); } - Console.WriteLine("---- ANSWER:"); - Console.WriteLine(answer.GetValue()); + this.WriteLine("---- ANSWER:"); + this.WriteLine(answer.GetValue()); /* OUTPUT: @@ -190,4 +194,8 @@ rate when sending money. Check send rates Convert Euro to US Dollar Convert US D * The exchange rate for EUR to USD is 1.1037097 US Dollars for 1 Euro. */ } + + public Example07_BingAndGooglePlugins(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs b/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs index 576b3dd13d52..df66d963fa15 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example08_RetryHandler.cs @@ -2,177 +2,60 @@ using System; using System.Net; -using System.Net.Http; -using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.Reliability.Basic; -using Polly; -using RepoUtils; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example08_RetryHandler -{ - public static async Task RunAsync() - { - await DefaultNoRetryAsync(); - - await ReliabilityBasicExtensionAsync(); - - await ReliabilityPollyExtensionAsync(); - - await CustomHandlerAsync(); - } - - private static async Task DefaultNoRetryAsync() - { - InfoLogger.Logger.LogInformation("============================== Kernel default behavior: No Retry =============================="); - var kernel = InitializeKernelBuilder() - .Build(); - - await ImportAndExecutePluginAsync(kernel); - } +namespace Examples; - private static async Task ReliabilityBasicExtensionAsync() +// This example shows how to use a retry handler within a Semantic Kernel +public class Example08_RetryHandler : BaseTest +{ + [Fact] + public async Task RunAsync() { - InfoLogger.Logger.LogInformation("============================== Using Reliability.Basic extension =============================="); - var retryConfig = new BasicRetryConfig + // Create a Kernel with the HttpClient + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Information)); + builder.Services.ConfigureHttpClientDefaults(c => { - MaxRetryCount = 3, - UseExponentialBackoff = true, - }; - retryConfig.RetryableStatusCodes.Add(HttpStatusCode.Unauthorized); - - var kernel = InitializeKernelBuilder() - .WithRetryBasic(retryConfig) - .Build(); - - await ImportAndExecutePluginAsync(kernel); - } - - private static async Task ReliabilityPollyExtensionAsync() - { - InfoLogger.Logger.LogInformation("============================== Using Reliability.Polly extension =============================="); - var kernel = InitializeKernelBuilder() - .WithRetryPolly(GetPollyPolicy(InfoLogger.LoggerFactory)) - .Build(); - - await ImportAndExecutePluginAsync(kernel); - } - - private static async Task CustomHandlerAsync() - { - InfoLogger.Logger.LogInformation("============================== Using a Custom Http Handler =============================="); - var kernel = InitializeKernelBuilder() - .WithHttpHandlerFactory(new MyCustomHandlerFactory()) - .Build(); - - await ImportAndExecutePluginAsync(kernel); - } - - private static KernelBuilder InitializeKernelBuilder() - { - return new KernelBuilder() - .WithLoggerFactory(InfoLogger.LoggerFactory) - // OpenAI settings - you can set the OpenAI.ApiKey to an invalid value to see the retry policy in play - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, "BAD_KEY"); - } - - private static AsyncPolicy GetPollyPolicy(ILoggerFactory? logger) - { - // Handle 429 and 401 errors - // Typically 401 would not be something we retry but for demonstration - // purposes we are doing so as it's easy to trigger when using an invalid key. - const int TooManyRequests = 429; - const int Unauthorized = 401; - - return Policy - .HandleResult(response => - (int)response.StatusCode is TooManyRequests or Unauthorized) - .WaitAndRetryAsync(new[] - { - TimeSpan.FromSeconds(2), - TimeSpan.FromSeconds(4), - TimeSpan.FromSeconds(8) - }, - (outcome, timespan, retryCount, _) - => InfoLogger.Logger.LogWarning("Error executing action [attempt {RetryCount} of 3], pausing {PausingMilliseconds}ms. Outcome: {StatusCode}", - retryCount, - timespan.TotalMilliseconds, - outcome.Result.StatusCode)); - } - - private static async Task ImportAndExecutePluginAsync(IKernel kernel) - { - // Load semantic plugin defined with prompt templates - string folder = RepoFiles.SamplePluginsPath(); - - kernel.ImportFunctions(new TimePlugin(), "time"); + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + }); + }); + builder.Services.AddOpenAIChatCompletion("gpt-4", "BAD_KEY"); // OpenAI settings - you can set the OpenAI.ApiKey to an invalid value to see the retry policy in play + Kernel kernel = builder.Build(); - var qaPlugin = kernel.ImportSemanticFunctionsFromDirectory( - folder, - "QAPlugin"); + var logger = kernel.LoggerFactory.CreateLogger(typeof(Example08_RetryHandler)); - var question = "How popular is Polly library?"; + const string Question = "How do I add a standard resilience handler in IHttpClientBuilder??"; + logger.LogInformation("Question: {Question}", Question); - InfoLogger.Logger.LogInformation("Question: {0}", question); - // To see the retry policy in play, you can set the OpenAI.ApiKey to an invalid value -#pragma warning disable CA1031 // Do not catch general exception types + // The call to OpenAI will fail and be retried a few times before eventually failing. + // Retrying can overcome transient problems and thus improves resiliency. try { - var answer = await kernel.RunAsync(question, qaPlugin["Question"]); - InfoLogger.Logger.LogInformation("Answer: {0}", answer.GetValue()); + // The InvokePromptAsync call will issue a request to OpenAI with an invalid API key. + // That will cause the request to fail with an HTTP status code 401. As the resilience + // handler is configured to retry on 401s, it'll reissue the request, and will do so + // multiple times until it hits the default retry limit, at which point this operation + // will throw an exception in response to the failure. All of the retries will be visible + // in the logging out to the console. + logger.LogInformation("Answer: {Result}", await kernel.InvokePromptAsync(Question)); } catch (Exception ex) { - InfoLogger.Logger.LogInformation("Error: {0}", ex.Message); - } -#pragma warning restore CA1031 // Do not catch general exception types - } - - // Basic custom retry handler factory - public sealed class MyCustomHandlerFactory : HttpHandlerFactory - { - } - - // Basic custom empty retry handler - public sealed class MyCustomHandler : DelegatingHandler - { - public MyCustomHandler(ILoggerFactory loggerFactory) - { - } - - protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - // Your custom http handling implementation - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.BadRequest) - { - Content = new StringContent("My custom bad request override") - }); + logger.LogInformation("Error: {Message}", ex.Message); } } - private static class InfoLogger + public Example08_RetryHandler(ITestOutputHelper output) : base(output) { - internal static ILogger Logger => LoggerFactory.CreateLogger("Example08_RetryHandler"); - internal static ILoggerFactory LoggerFactory => s_loggerFactory.Value; - private static readonly Lazy s_loggerFactory = new(LogBuilder); - - private static ILoggerFactory LogBuilder() - { - return Microsoft.Extensions.Logging.LoggerFactory.Create(builder => - { - builder.SetMinimumLevel(LogLevel.Information); - builder.AddFilter("Microsoft", LogLevel.Information); - builder.AddFilter("Microsoft.SemanticKernel", LogLevel.Critical); - builder.AddFilter("Microsoft.SemanticKernel.Reliability", LogLevel.Information); - builder.AddFilter("System", LogLevel.Information); - - builder.AddConsole(); - }); - } } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs b/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs index ff5c36564108..6574479ca4b3 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example09_FunctionTypes.cs @@ -1,226 +1,282 @@ // Copyright (c) Microsoft. All rights reserved. -// ReSharper disable once InconsistentNaming - using System; +using System.ComponentModel; +using System.Globalization; +using System.IO; +using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; +using Microsoft.SemanticKernel.ChatCompletion; using RepoUtils; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; -// ReSharper disable once InconsistentNaming -public static class Example09_FunctionTypes +public class Example09_FunctionTypes : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - Console.WriteLine("======== Native function types ========"); - - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) - .Build(); + this.WriteLine("======== Method Function types ========"); - var variables = new ContextVariables(); + var builder = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + builder.Services.AddLogging(services => services.AddConsole().SetMinimumLevel(LogLevel.Warning)); + builder.Services.AddSingleton(this.Output); + var kernel = builder.Build(); + kernel.Culture = new CultureInfo("pt-BR"); // Load native plugin into the kernel function collection, sharing its functions with prompt templates - var testFunctions = kernel.ImportFunctions(new LocalExamplePlugin(), "test"); + var plugin = kernel.ImportPluginFromType("Examples"); string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(folder, "SummarizePlugin"); - - // The kernel takes care of wiring the input appropriately - await kernel.RunAsync( - testFunctions["type01"], - testFunctions["type02"], - testFunctions["type03"], - testFunctions["type04"], - testFunctions["type05"], - testFunctions["type06"], - testFunctions["type07"], - testFunctions["type08"], - testFunctions["type09"], - testFunctions["type10"], - testFunctions["type11"], - testFunctions["type12"], - testFunctions["type13"], - testFunctions["type14"], - testFunctions["type15"], - testFunctions["type16"], - testFunctions["type17"], - testFunctions["type18"] - ); - - // Using Kernel.RunAsync - await kernel.RunAsync(testFunctions["type01"]); - await kernel.RunAsync(kernel.Functions.GetFunction("test", "type01")); - - await kernel.RunAsync(testFunctions["type02"]); - await kernel.RunAsync(kernel.Functions.GetFunction("test", "type02")); - - await kernel.RunAsync(testFunctions["type03"]); - await kernel.RunAsync(kernel.Functions.GetFunction("test", "type03")); - - await kernel.RunAsync(testFunctions["type04"], variables); - await kernel.RunAsync(variables, kernel.Functions.GetFunction("test", "type04")); - - await kernel.RunAsync(testFunctions["type05"], variables); - await kernel.RunAsync(variables, kernel.Functions.GetFunction("test", "type05")); - - await kernel.RunAsync(testFunctions["type06"], variables); - await kernel.RunAsync(variables, kernel.Functions.GetFunction("test", "type06")); - - await kernel.RunAsync(testFunctions["type07"], variables); - await kernel.RunAsync(variables, kernel.Functions.GetFunction("test", "type07")); - - await kernel.RunAsync("", testFunctions["type08"]); - await kernel.RunAsync("", kernel.Functions.GetFunction("test", "type08")); - - await kernel.RunAsync("", testFunctions["type09"]); - await kernel.RunAsync("", kernel.Functions.GetFunction("test", "type09")); - - await kernel.RunAsync("", testFunctions["type10"]); - await kernel.RunAsync("", kernel.Functions.GetFunction("test", "type10")); - - await kernel.RunAsync("", testFunctions["type11"]); - await kernel.RunAsync("", kernel.Functions.GetFunction("test", "type11")); - - await kernel.RunAsync(variables, testFunctions["type12"]); - await kernel.RunAsync(variables, kernel.Functions.GetFunction("test", "type12")); - - await kernel.RunAsync(testFunctions["type18"]); - await kernel.RunAsync(kernel.Functions.GetFunction("test", "type18")); + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "SummarizePlugin")); + + // Different ways to invoke a function (not limited to these examples) + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.NoInputWithVoidResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.NoInputTaskWithVoidResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.InputDateTimeWithStringResult)], new() { ["currentDate"] = DateTime.Now }); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.NoInputTaskWithStringResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.MultipleInputsWithVoidResult)], new() { ["x"] = "x string", ["y"] = 100, ["z"] = 1.5 }); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.ComplexInputWithStringResult)], new() { ["complexObject"] = new LocalExamplePlugin(this.Output) }); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.InputStringTaskWithStringResult)], new() { ["echoInput"] = "return this" }); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.InputStringTaskWithVoidResult)], new() { ["x"] = "x input" }); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.NoInputWithFunctionResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.NoInputTaskWithFunctionResult)]); + + // Injecting Parameters Examples + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.TaskInjectingKernelFunctionWithStringResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.TaskInjectingLoggerWithNoResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.TaskInjectingLoggerFactoryWithNoResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.TaskInjectingCultureInfoOrIFormatProviderWithStringResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.TaskInjectingCancellationTokenWithStringResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.TaskInjectingServiceSelectorWithStringResult)]); + await kernel.InvokeAsync(plugin[nameof(LocalExamplePlugin.TaskInjectingKernelWithInputTextAndStringResult)], + new() + { + ["textToSummarize"] = @"C# is a modern, versatile language by Microsoft, blending the efficiency of C++ + with Visual Basic's simplicity. It's ideal for a wide range of applications, + emphasizing type safety, modularity, and modern programming paradigms." + }); + + // You can also use the kernel.Plugins collection to invoke a function + await kernel.InvokeAsync(kernel.Plugins["Examples"][nameof(LocalExamplePlugin.NoInputWithVoidResult)]); + } + + public Example09_FunctionTypes(ITestOutputHelper output) : base(output) + { } } +// Task functions when are imported as plugins loose the "Async" suffix if present. +#pragma warning disable IDE1006 // Naming Styles public class LocalExamplePlugin { - [SKFunction] - public void Type01() + private readonly ITestOutputHelper _output; + + public LocalExamplePlugin(ITestOutputHelper output) { - Console.WriteLine("Running function type 1"); + this._output = output; } - [SKFunction] - public string Type02() + /// + /// Example of using a void function with no input + /// + [KernelFunction] + public void NoInputWithVoidResult() { - Console.WriteLine("Running function type 2"); - return ""; + this._output.WriteLine($"Running {nameof(this.NoInputWithVoidResult)} -> No input"); } - [SKFunction] - public async Task Type03Async() + /// + /// Example of using a void task function with no input + /// + [KernelFunction] + public Task NoInputTaskWithVoidResult() { - await Task.Delay(0); - Console.WriteLine("Running function type 3"); - return ""; + this._output.WriteLine($"Running {nameof(this.NoInputTaskWithVoidResult)} -> No input"); + return Task.CompletedTask; } - [SKFunction] - public void Type04(SKContext context) + /// + /// Example of using a function with a DateTime input and a string result + /// + [KernelFunction] + public string InputDateTimeWithStringResult(DateTime currentDate) { - Console.WriteLine("Running function type 4"); + var result = currentDate.ToString(CultureInfo.InvariantCulture); + this._output.WriteLine($"Running {nameof(this.InputDateTimeWithStringResult)} -> [currentDate = {currentDate}] -> result: {result}"); + return result; } - [SKFunction] - public string Type05(SKContext context) + /// + /// Example of using a Task function with no input and a string result + /// + [KernelFunction] + public Task NoInputTaskWithStringResult() { - Console.WriteLine("Running function type 5"); - return ""; + var result = "string result"; + this._output.WriteLine($"Running {nameof(this.NoInputTaskWithStringResult)} -> No input -> result: {result}"); + return Task.FromResult(result); } - [SKFunction] - public async Task Type06Async(SKContext context) + /// + /// Example passing multiple parameters with multiple types + /// + [KernelFunction] + public void MultipleInputsWithVoidResult(string x, int y, double z) { - var summarizer = context.Functions.GetFunction("SummarizePlugin", "Summarize"); - var summary = await context.Runner.RunAsync(summarizer, new ContextVariables("blah blah blah")); + this._output.WriteLine($"Running {nameof(this.MultipleInputsWithVoidResult)} -> input: [x = {x}, y = {y}, z = {z}]"); + } - Console.WriteLine($"Running function type 6 [{summary.GetValue()}]"); - return ""; + /// + /// Example passing a complex object and returning a string result + /// + [KernelFunction] + public string ComplexInputWithStringResult(object complexObject) + { + var result = complexObject.GetType().Name; + this._output.WriteLine($"Running {nameof(this.ComplexInputWithStringResult)} -> input: [complexObject = {complexObject}] -> result: {result}"); + return result; } - [SKFunction] - public async Task Type07Async(SKContext context) + /// + /// Example using an async task function echoing the input + /// + [KernelFunction] + public Task InputStringTaskWithStringResult(string echoInput) { - await Task.Delay(0); - Console.WriteLine("Running function type 7"); - return context; + this._output.WriteLine($"Running {nameof(this.InputStringTaskWithStringResult)} -> input: [echoInput = {echoInput}] -> result: {echoInput}"); + return Task.FromResult(echoInput); } - [SKFunction] - public void Type08(string x) + /// + /// Example using an async void task with string input + /// + [KernelFunction] + public Task InputStringTaskWithVoidResult(string x) { - Console.WriteLine("Running function type 8"); + this._output.WriteLine($"Running {nameof(this.InputStringTaskWithVoidResult)} -> input: [x = {x}]"); + return Task.CompletedTask; } - [SKFunction] - public string Type09(string x) + /// + /// Example using a function to return the result of another inner function + /// + [KernelFunction] + public FunctionResult NoInputWithFunctionResult() { - Console.WriteLine("Running function type 9"); - return ""; + var myInternalFunction = KernelFunctionFactory.CreateFromMethod(() => { }); + var result = new FunctionResult(myInternalFunction); + this._output.WriteLine($"Running {nameof(this.NoInputWithFunctionResult)} -> No input -> result: {result.GetType().Name}"); + return result; } - [SKFunction] - public async Task Type10Async(string x) + /// + /// Example using a task function to return the result of another kernel function + /// + [KernelFunction] + public async Task NoInputTaskWithFunctionResult(Kernel kernel) { - await Task.Delay(0); - Console.WriteLine("Running function type 10"); - return ""; + var result = await kernel.InvokeAsync(kernel.Plugins["Examples"][nameof(this.NoInputWithVoidResult)]); + this._output.WriteLine($"Running {nameof(this.NoInputTaskWithFunctionResult)} -> Injected kernel -> result: {result.GetType().Name}"); + return result; } - [SKFunction] - public void Type11(string x, SKContext context) + /// + /// Example how to inject Kernel in your function + /// This example uses the injected kernel to invoke a plugin from within another function + /// + [KernelFunction] + public async Task TaskInjectingKernelWithInputTextAndStringResult(Kernel kernel, string textToSummarize) { - Console.WriteLine("Running function type 11"); + var summary = await kernel.InvokeAsync(kernel.Plugins["SummarizePlugin"]["Summarize"], new() { ["input"] = textToSummarize }); + this._output.WriteLine($"Running {nameof(this.TaskInjectingKernelWithInputTextAndStringResult)} -> Injected kernel + input: [textToSummarize: {textToSummarize[..15]}...{textToSummarize[^15..]}] -> result: {summary}"); + return summary!; } - [SKFunction] - public string Type12(string x, SKContext context) + /// + /// Example how to inject the executing KernelFunction as a parameter + /// + [KernelFunction, Description("Example function injecting itself as a parameter")] + public async Task TaskInjectingKernelFunctionWithStringResult(KernelFunction executingFunction) { - Console.WriteLine("Running function type 12"); - return ""; + var result = $"Name: {executingFunction.Name}, Description: {executingFunction.Description}"; + this._output.WriteLine($"Running {nameof(this.TaskInjectingKernelWithInputTextAndStringResult)} -> Injected Function -> result: {result}"); + return result; } - [SKFunction] - public async Task Type13Async(string x, SKContext context) + /// + /// Example how to inject ILogger in your function + /// + [KernelFunction] + public Task TaskInjectingLoggerWithNoResult(ILogger logger) { - await Task.Delay(0); - Console.WriteLine("Running function type 13"); - return ""; + logger.LogWarning("Running {FunctionName} -> Injected Logger", nameof(this.TaskInjectingLoggerWithNoResult)); + this._output.WriteLine($"Running {nameof(this.TaskInjectingKernelWithInputTextAndStringResult)} -> Injected Logger"); + return Task.CompletedTask; } - [SKFunction] - public async Task Type14Async(string x, SKContext context) + /// + /// Example how to inject ILoggerFactory in your function + /// + [KernelFunction] + public Task TaskInjectingLoggerFactoryWithNoResult(ILoggerFactory loggerFactory) { - await Task.Delay(0); - Console.WriteLine("Running function type 14"); - return context; + loggerFactory + .CreateLogger() + .LogWarning("Running {FunctionName} -> Injected Logger", nameof(this.TaskInjectingLoggerWithNoResult)); + + this._output.WriteLine($"Running {nameof(this.TaskInjectingKernelWithInputTextAndStringResult)} -> Injected Logger"); + return Task.CompletedTask; } - [SKFunction] - public async Task Type15Async(string x) + /// + /// Example how to inject a service selector in your function and use a specific service + /// + [KernelFunction] + public async Task TaskInjectingServiceSelectorWithStringResult(Kernel kernel, KernelFunction function, KernelArguments arguments, IAIServiceSelector serviceSelector) { - await Task.Delay(0); - Console.WriteLine("Running function type 15"); + ChatMessageContent? chatMessageContent = null; + if (serviceSelector.TrySelectAIService(kernel, function, arguments, out var chatCompletion, out var executionSettings)) + { + chatMessageContent = await chatCompletion.GetChatMessageContentAsync(new ChatHistory("How much is 5 + 5 ?"), executionSettings); + } + + var result = chatMessageContent?.Content; + this._output.WriteLine($"Running {nameof(this.TaskInjectingKernelWithInputTextAndStringResult)} -> Injected Kernel, KernelFunction, KernelArguments, Service Selector -> result: {result}"); + return result ?? string.Empty; } - [SKFunction] - public async Task Type16Async(SKContext context) + /// + /// Example how to inject CultureInfo or IFormatProvider in your function + /// + [KernelFunction] + public async Task TaskInjectingCultureInfoOrIFormatProviderWithStringResult(CultureInfo cultureInfo, IFormatProvider formatProvider) { - await Task.Delay(0); - Console.WriteLine("Running function type 16"); + var result = $"Culture Name: {cultureInfo.Name}, FormatProvider Equals CultureInfo?: {formatProvider.Equals(cultureInfo)}"; + this._output.WriteLine($"Running {nameof(this.TaskInjectingCultureInfoOrIFormatProviderWithStringResult)} -> Injected CultureInfo, IFormatProvider -> result: {result}"); + return result; } - [SKFunction] - public async Task Type17Async(string x, SKContext context) + /// + /// Example how to inject current CancellationToken in your function + /// + [KernelFunction] + public async Task TaskInjectingCancellationTokenWithStringResult(CancellationToken cancellationToken) { - await Task.Delay(0); - Console.WriteLine("Running function type 17"); + var result = $"Cancellation resquested: {cancellationToken.IsCancellationRequested}"; + this._output.WriteLine($"Running {nameof(this.TaskInjectingCultureInfoOrIFormatProviderWithStringResult)} -> Injected Cancellation Token -> result: {result}"); + return result; } - [SKFunction] - public async Task Type18Async() + public override string ToString() { - await Task.Delay(0); - Console.WriteLine("Running function type 18"); + return "Complex type result ToString override"; } } +#pragma warning restore IDE1006 // Naming Styles diff --git a/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllPluginsAndFunctions.cs b/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllPluginsAndFunctions.cs index 1cd68f4c0fdf..6ddf492d898b 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllPluginsAndFunctions.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example10_DescribeAllPluginsAndFunctions.cs @@ -1,62 +1,61 @@ // Copyright (c) Microsoft. All rights reserved. -using System; +using System.IO; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Plugins.Core; using Plugins; using RepoUtils; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example10_DescribeAllPluginsAndFunctions +namespace Examples; + +public class Example10_DescribeAllPluginsAndFunctions : BaseTest { /// /// Print a list of all the functions imported into the kernel, including function descriptions, /// list of parameters, parameters descriptions, etc. /// See the end of the file for a sample of what the output looks like. /// - public static Task RunAsync() + [Fact] + public Task RunAsync() { - Console.WriteLine("======== Describe all plugins and functions ========"); - - var kernel = new KernelBuilder() - .WithOpenAIChatCompletionService( + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey) .Build(); // Import a native plugin - var staticText = new StaticTextPlugin(); - kernel.ImportFunctions(staticText, "StaticTextPlugin"); + kernel.ImportPluginFromType(); // Import another native plugin - var text = new TextPlugin(); - kernel.ImportFunctions(text, "AnotherTextPlugin"); + kernel.ImportPluginFromType("AnotherTextPlugin"); // Import a semantic plugin string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(folder, "SummarizePlugin"); + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "SummarizePlugin")); - // Define a semantic function inline, without naming - var sFun1 = kernel.CreateSemanticFunction("tell a joke about {{$input}}", new OpenAIRequestSettings() { MaxTokens = 150 }); + // Define a prompt function inline, without naming + var sFun1 = kernel.CreateFunctionFromPrompt("tell a joke about {{$input}}", new OpenAIPromptExecutionSettings() { MaxTokens = 150 }); - // Define a semantic function inline, with plugin name - var sFun2 = kernel.CreateSemanticFunction( + // Define a prompt function inline, with plugin name + var sFun2 = kernel.CreateFunctionFromPrompt( "write a novel about {{$input}} in {{$language}} language", - new OpenAIRequestSettings() { MaxTokens = 150 }, - pluginName: "Writing", + new OpenAIPromptExecutionSettings() { MaxTokens = 150 }, functionName: "Novel", description: "Write a bedtime story"); - var functions = kernel.Functions.GetFunctionViews(); + var functions = kernel.Plugins.GetFunctionsMetadata(); - Console.WriteLine("*****************************************"); - Console.WriteLine("****** Registered plugins and functions ******"); - Console.WriteLine("*****************************************"); - Console.WriteLine(); + WriteLine("**********************************************"); + WriteLine("****** Registered plugins and functions ******"); + WriteLine("**********************************************"); + WriteLine(); - foreach (FunctionView func in functions) + foreach (KernelFunctionMetadata func in functions) { PrintFunction(func); } @@ -64,30 +63,34 @@ public static Task RunAsync() return Task.CompletedTask; } - private static void PrintFunction(FunctionView func) + private void PrintFunction(KernelFunctionMetadata func) { - Console.WriteLine($" {func.Name}: {func.Description}"); + WriteLine($"Plugin: {func.PluginName}"); + WriteLine($" {func.Name}: {func.Description}"); if (func.Parameters.Count > 0) { - Console.WriteLine(" Params:"); + WriteLine(" Params:"); foreach (var p in func.Parameters) { - Console.WriteLine($" - {p.Name}: {p.Description}"); - Console.WriteLine($" default: '{p.DefaultValue}'"); + WriteLine($" - {p.Name}: {p.Description}"); + WriteLine($" default: '{p.DefaultValue}'"); } } - Console.WriteLine(); + WriteLine(); + } + + public Example10_DescribeAllPluginsAndFunctions(ITestOutputHelper output) : base(output) + { } } -#pragma warning disable CS1587 // XML comment is not placed on a valid language element /** Sample output: -***************************************** -****** Native plugins and functions ****** -***************************************** +********************************************** +****** Registered plugins and functions ****** +********************************************** Plugin: StaticTextPlugin Uppercase: Change all string chars to uppercase @@ -95,6 +98,7 @@ private static void PrintFunction(FunctionView func) - input: Text to uppercase default: '' +Plugin: StaticTextPlugin AppendDay: Append the day variable Params: - input: Text to append to @@ -102,88 +106,78 @@ private static void PrintFunction(FunctionView func) - day: Value of the day to append default: '' -Plugin: TextPlugin - Uppercase: Convert a string to uppercase. - Params: - - input: Text to uppercase - default: '' - +Plugin: AnotherTextPlugin Trim: Trim whitespace from the start and end of a string. Params: - - input: Text to edit + - input: default: '' +Plugin: AnotherTextPlugin TrimStart: Trim whitespace from the start of a string. Params: - - input: Text to edit + - input: default: '' +Plugin: AnotherTextPlugin TrimEnd: Trim whitespace from the end of a string. Params: - - input: Text to edit + - input: default: '' - Lowercase: Convert a string to lowercase. +Plugin: AnotherTextPlugin + Uppercase: Convert a string to uppercase. Params: - - input: Text to lowercase + - input: default: '' -***************************************** -***** Semantic plugins and functions ***** -***************************************** - -Plugin: _GLOBAL_FUNCTIONS_ - funcce97d27e3d0b4897acf6122e41430695: Generic function, unknown purpose +Plugin: AnotherTextPlugin + Lowercase: Convert a string to lowercase. Params: - input: default: '' -Plugin: Writing - Novel: Write a bedtime story +Plugin: AnotherTextPlugin + Length: Get the length of a string. Params: - input: default: '' - - language: - default: '' -Plugin: SummarizePlugin - Topics: Analyze given text or document and extract key topics worth remembering +Plugin: AnotherTextPlugin + Concat: Concat two strings into one. Params: - - input: + - input: First input to concatenate with + default: '' + - input2: Second input to concatenate with default: '' - Summarize: Summarize given text or any text document +Plugin: AnotherTextPlugin + Echo: Echo the input string. Useful for capturing plan input for use in multiple functions. Params: - - input: Text to summarize + - text: Input string to echo. default: '' +Plugin: SummarizePlugin MakeAbstractReadable: Given a scientific white paper abstract, rewrite it to make it more readable Params: - input: default: '' - TopicsMore: Generate list of topics for long length content - Params: - - input: Block of text to analyze - default: '' - - previousResults: List of topics found from previous blocks of text - default: '' - +Plugin: SummarizePlugin Notegen: Automatically generate compact notes for any text or text document. Params: - input: default: '' - ActionItems: unknown function - - SummarizeMore: Summarize given text or any text document +Plugin: SummarizePlugin + Summarize: Summarize given text or any text document Params: - - input: Block of text to analyze - default: '' - - previousResults: Overview generated from previous blocks of text + - input: Text to summarize default: '' - - conversationType: Text type, e.g. chat, email thread, document + +Plugin: SummarizePlugin + Topics: Analyze given text or document and extract key topics worth remembering + Params: + - input: default: '' */ -#pragma warning restore CS1587 // XML comment is not placed on a valid language element diff --git a/dotnet/samples/KernelSyntaxExamples/Example11_WebSearchQueries.cs b/dotnet/samples/KernelSyntaxExamples/Example11_WebSearchQueries.cs index 888f78a3ed85..b84fcf69c095 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example11_WebSearchQueries.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example11_WebSearchQueries.cs @@ -1,32 +1,42 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Threading.Tasks; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Plugins.Web; -using RepoUtils; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example11_WebSearchQueries +namespace Examples; + +public class Example11_WebSearchQueries : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - Console.WriteLine("======== WebSearchQueries ========"); + WriteLine("======== WebSearchQueries ========"); - IKernel kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); + Kernel kernel = new(); // Load native plugins - var plugin = new SearchUrlPlugin(); - var bing = kernel.ImportFunctions(plugin, "search"); + var bing = kernel.ImportPluginFromType("search"); // Run var ask = "What's the tallest building in Europe?"; - var result = await kernel.RunAsync( - ask, - bing["BingSearchUrl"] - ); + var result = await kernel.InvokeAsync(bing["BingSearchUrl"], new() { ["query"] = ask }); + + WriteLine(ask + "\n"); + WriteLine(result.GetValue()); - Console.WriteLine(ask + "\n"); - Console.WriteLine(result.GetValue()); + /* Expected output: + * ======== WebSearchQueries ======== + * What's the tallest building in Europe? + * + * https://www.bing.com/search?q=What%27s%20the%20tallest%20building%20in%20Europe%3F + * == DONE == + */ + } + + public Example11_WebSearchQueries(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example12_SequentialPlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example12_SequentialPlanner.cs deleted file mode 100644 index d74748c38b5b..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example12_SequentialPlanner.cs +++ /dev/null @@ -1,358 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Planners; -using Microsoft.SemanticKernel.Planning; -using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.Plugins.Memory; -using Plugins; -using RepoUtils; - -// ReSharper disable CommentTypo -// ReSharper disable once InconsistentNaming -internal static class Example12_SequentialPlanner -{ - public static async Task RunAsync() - { - await PoetrySamplesAsync(); - await EmailSamplesWithRecallAsync(); - await BookSamplesAsync(); - await MemorySampleAsync(); - await PlanNotPossibleSampleAsync(); - } - - private static async Task PlanNotPossibleSampleAsync() - { - Console.WriteLine("======== Sequential Planner - Plan Not Possible ========"); - var kernel = InitializeKernelAndPlanner(out var planner); - - // Load additional plugins to enable planner but not enough for the given goal. - string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(folder, "SummarizePlugin"); - - try - { - await planner.CreatePlanAsync("Write a poem about John Doe, then translate it into Italian."); - } - catch (SKException e) - { - Console.WriteLine(e.Message); - // Create plan error: Not possible to create plan for goal with available functions. - // Goal:Write a poem about John Doe, then translate it into Italian. - // Functions: - // SummarizePlugin.MakeAbstractReadable: - // description: Given a scientific white paper abstract, rewrite it to make it more readable - // inputs: - // - input: - - // SummarizePlugin.Notegen: - // description: Automatically generate compact notes for any text or text document. - // inputs: - // - input: - - // SummarizePlugin.Summarize: - // description: Summarize given text or any text document - // inputs: - // - input: Text to summarize - - // SummarizePlugin.Topics: - // description: Analyze given text or document and extract key topics worth remembering - // inputs: - // - input: - } - } - - private static async Task PoetrySamplesAsync() - { - Console.WriteLine("======== Sequential Planner - Create and Execute Poetry Plan ========"); - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .Build(); - - string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(folder, - "SummarizePlugin", - "WriterPlugin"); - - var planner = new SequentialPlanner(kernel); - - var plan = await planner.CreatePlanAsync("Write a poem about John Doe, then translate it into Italian."); - - // Original plan: - // Goal: Write a poem about John Doe, then translate it into Italian. - - // Steps: - // - WriterPlugin.ShortPoem INPUT='John Doe is a friendly guy who likes to help others and enjoys reading books.' => - // - WriterPlugin.Translate language='Italian' INPUT='' => - - Console.WriteLine("Original plan:"); - Console.WriteLine(plan.ToPlanWithGoalString()); - - var result = await kernel.RunAsync(plan); - - Console.WriteLine("Result:"); - Console.WriteLine(result.GetValue()); - } - - private static async Task EmailSamplesWithRecallAsync() - { - Console.WriteLine("======== Sequential Planner - Create and Execute Email Plan ========"); - var kernel = InitializeKernelAndPlanner(out var planner, 512); - kernel.ImportFunctions(new EmailPlugin(), "email"); - - // Load additional plugins to enable planner to do non-trivial asks. - string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(folder, - "SummarizePlugin", - "WriterPlugin"); - - var plan = await planner.CreatePlanAsync("Summarize an input, translate to french, and e-mail to John Doe"); - - // Original plan: - // Goal: Summarize an input, translate to french, and e-mail to John Doe - - // Steps: - // - SummarizePlugin.Summarize INPUT='' => - // - WriterPlugin.Translate language='French' INPUT='' => TRANSLATED_SUMMARY - // - email.GetEmailAddress INPUT='John Doe' => EMAIL_ADDRESS - // - email.SendEmail INPUT='$TRANSLATED_SUMMARY' email_address='$EMAIL_ADDRESS' => - - Console.WriteLine("Original plan:"); - Console.WriteLine(plan.ToPlanWithGoalString()); - - // Serialize plan before execution for saving to memory on success. - var originalPlan = plan.ToJson(); - - var input = - "Once upon a time, in a faraway kingdom, there lived a kind and just king named Arjun. " + - "He ruled over his kingdom with fairness and compassion, earning him the love and admiration of his people. " + - "However, the kingdom was plagued by a terrible dragon that lived in the nearby mountains and terrorized the nearby villages, " + - "burning their crops and homes. The king had tried everything to defeat the dragon, but to no avail. " + - "One day, a young woman named Mira approached the king and offered to help defeat the dragon. She was a skilled archer " + - "and claimed that she had a plan to defeat the dragon once and for all. The king was skeptical, but desperate for a solution, " + - "so he agreed to let her try. Mira set out for the dragon's lair and, with the help of her trusty bow and arrow, " + - "she was able to strike the dragon with a single shot through its heart, killing it instantly. The people rejoiced " + - "and the kingdom was at peace once again. The king was so grateful to Mira that he asked her to marry him and she agreed. " + - "They ruled the kingdom together, ruling with fairness and compassion, just as Arjun had done before. They lived " + - "happily ever after, with the people of the kingdom remembering Mira as the brave young woman who saved them from the dragon."; - await ExecutePlanAsync(kernel, plan, input, 5); - - Console.WriteLine("======== Sequential Planner - Find and Execute Saved Plan ========"); - - // Save the plan for future use - var semanticMemory = InitializeMemory(); - await semanticMemory.SaveInformationAsync( - "plans", - id: Guid.NewGuid().ToString(), - text: plan.Description, // This is the goal used to create the plan - description: originalPlan); - - var goal = "Write summary in french and e-mail John Doe"; - - Console.WriteLine($"Goal: {goal}"); - Console.WriteLine("Searching for saved plan..."); - - Plan? restoredPlan = null; - var memories = semanticMemory.SearchAsync("plans", goal, limit: 1, minRelevanceScore: 0.5); - await foreach (MemoryQueryResult memory in memories) - { - Console.WriteLine($"Restored plan (relevance={memory.Relevance}):"); - - // Deseriliaze the plan from the description - restoredPlan = Plan.FromJson(memory.Metadata.Description, kernel.Functions); - - Console.WriteLine(restoredPlan.ToPlanWithGoalString()); - Console.WriteLine(); - - break; - } - - if (restoredPlan is not null) - { - var newInput = - "Far in the future, on a planet lightyears away, 15 year old Remy lives a normal life. He goes to school, " + - "hangs out with his friends, and tries to avoid trouble. But when he stumbles across a secret that threatens to destroy " + - "everything he knows, he's forced to go on the run. With the help of a mysterious girl named Eve, he must evade the ruthless " + - "agents of the Galactic Federation, and uncover the truth about his past. But the more he learns, the more he realizes that " + - "he's not just an ordinary boy."; - - var result = await kernel.RunAsync(restoredPlan, new(newInput)); - - Console.WriteLine("Result:"); - Console.WriteLine(result.GetValue()); - } - } - - private static async Task BookSamplesAsync() - { - Console.WriteLine("======== Sequential Planner - Create and Execute Book Creation Plan ========"); - var kernel = InitializeKernelAndPlanner(out var planner); - - // Load additional plugins to enable planner to do non-trivial asks. - string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(folder, "WriterPlugin"); - kernel.ImportSemanticFunctionsFromDirectory(folder, "MiscPlugin"); - - var originalPlan = await planner.CreatePlanAsync("Create a book with 3 chapters about a group of kids in a club called 'The Thinking Caps.'"); - - // Original plan: - // Goal: Create a book with 3 chapters about a group of kids in a club called 'The Thinking Caps.' - - // Steps: - // - WriterPlugin.NovelOutline chapterCount='3' INPUT='A group of kids in a club called 'The Thinking Caps' that solve mysteries and puzzles using their creativity and logic.' endMarker='' => OUTLINE - // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='0' => CHAPTER_1_SYNOPSIS - // - WriterPlugin.NovelChapter chapterIndex='1' previousChapter='' INPUT='$CHAPTER_1_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_1 - // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='1' => CHAPTER_2_SYNOPSIS - // - WriterPlugin.NovelChapter chapterIndex='2' previousChapter='$CHAPTER_1_SYNOPSIS' INPUT='$CHAPTER_2_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_2 - // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='2' => CHAPTER_3_SYNOPSIS - // - WriterPlugin.NovelChapter chapterIndex='3' previousChapter='$CHAPTER_2_SYNOPSIS' INPUT='$CHAPTER_3_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_3 - - Console.WriteLine("Original plan:"); - Console.WriteLine(originalPlan.ToPlanWithGoalString()); - - Stopwatch sw = new(); - sw.Start(); - await ExecutePlanAsync(kernel, originalPlan); - } - - private static async Task MemorySampleAsync() - { - Console.WriteLine("======== Sequential Planner - Create and Execute Plan using Memory ========"); - - var kernel = InitializeKernel(); - var memory = InitializeMemory(); - - string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(folder, - "SummarizePlugin", - "WriterPlugin", - "CalendarPlugin", - "ChatPlugin", - "ChildrensBookPlugin", - "ClassificationPlugin", - "CodingPlugin", - "FunPlugin", - "IntentDetectionPlugin", - "MiscPlugin", - "QAPlugin"); - - kernel.ImportFunctions(new EmailPlugin(), "email"); - kernel.ImportFunctions(new StaticTextPlugin(), "statictext"); - kernel.ImportFunctions(new TextPlugin(), "coretext"); - - var goal = "Create a book with 3 chapters about a group of kids in a club called 'The Thinking Caps.'"; - - // IMPORTANT: To use memory and embeddings to find relevant plugins in the planner, set the 'Memory' property on the planner config. - var planner = new SequentialPlanner(kernel, new SequentialPlannerConfig { SemanticMemoryConfig = new() { RelevancyThreshold = 0.5, Memory = memory } }); - - var plan = await planner.CreatePlanAsync(goal); - - Console.WriteLine("Original plan:"); - Console.WriteLine(plan.ToPlanWithGoalString()); - } - - private static IKernel InitializeKernelAndPlanner(out SequentialPlanner planner, int maxTokens = 1024) - { - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .Build(); - - planner = new SequentialPlanner(kernel, new SequentialPlannerConfig { MaxTokens = maxTokens }); - - return kernel; - } - - private static IKernel InitializeKernel() - { - // IMPORTANT: Register an embedding generation service and a memory store. The Planner will - // use these to generate and store embeddings for the function descriptions. - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .WithAzureTextEmbeddingGenerationService( - TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - TestConfiguration.AzureOpenAIEmbeddings.Endpoint, - TestConfiguration.AzureOpenAIEmbeddings.ApiKey) - .Build(); - - return kernel; - } - - private static SemanticTextMemory InitializeMemory() - { - var memoryStorage = new VolatileMemoryStore(); - - var textEmbeddingGenerator = new AzureTextEmbeddingGeneration( - modelId: TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - endpoint: TestConfiguration.AzureOpenAIEmbeddings.Endpoint, - apiKey: TestConfiguration.AzureOpenAIEmbeddings.ApiKey); - - var memory = new SemanticTextMemory(memoryStorage, textEmbeddingGenerator); - - return memory; - } - - private static async Task ExecutePlanAsync( - IKernel kernel, - Plan plan, - string input = "", - int maxSteps = 10) - { - Stopwatch sw = new(); - sw.Start(); - - // loop until complete or at most N steps - try - { - for (int step = 1; plan.HasNextStep && step < maxSteps; step++) - { - if (string.IsNullOrEmpty(input)) - { - await plan.InvokeNextStepAsync(kernel.CreateNewContext()); - // or await kernel.StepAsync(plan); - } - else - { - plan = await kernel.StepAsync(input, plan); - input = string.Empty; - } - - if (!plan.HasNextStep) - { - Console.WriteLine($"Step {step} - COMPLETE!"); - Console.WriteLine(plan.State.ToString()); - break; - } - - Console.WriteLine($"Step {step} - Results so far:"); - Console.WriteLine(plan.State.ToString()); - } - } - catch (SKException e) - { - Console.WriteLine("Step - Execution failed:"); - Console.WriteLine(e.Message); - } - - sw.Stop(); - Console.WriteLine($"Execution complete in {sw.ElapsedMilliseconds} ms!"); - return plan; - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummaryPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummaryPlugin.cs index 7dc25a40f998..bb1cc2b807c1 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummaryPlugin.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example13_ConversationSummaryPlugin.cs @@ -1,16 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Plugins.Core; -using RepoUtils; +using xRetry; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming +namespace Examples; -internal static class Example13_ConversationSummaryPlugin +public class Example13_ConversationSummaryPlugin : BaseTest { private const string ChatTranscript = @" @@ -120,76 +118,74 @@ internal static class Example13_ConversationSummaryPlugin Jane: Goodbye! "; - public static async Task RunAsync() + [RetryFact(typeof(HttpOperationException))] + public async Task RunAsync() { await ConversationSummaryPluginAsync(); await GetConversationActionItemsAsync(); await GetConversationTopicsAsync(); } - private static async Task ConversationSummaryPluginAsync() + private async Task ConversationSummaryPluginAsync() { - Console.WriteLine("======== SamplePlugins - Conversation Summary Plugin - Summarize ========"); - IKernel kernel = InitializeKernel(); + WriteLine("======== SamplePlugins - Conversation Summary Plugin - Summarize ========"); + Kernel kernel = InitializeKernel(); - IDictionary conversationSummaryPlugin = - kernel.ImportFunctions(new ConversationSummaryPlugin(kernel)); + KernelPlugin conversationSummaryPlugin = kernel.ImportPluginFromType(); - KernelResult summary = await kernel.RunAsync( - ChatTranscript, - conversationSummaryPlugin["SummarizeConversation"]); + FunctionResult summary = await kernel.InvokeAsync( + conversationSummaryPlugin["SummarizeConversation"], new() { ["input"] = ChatTranscript }); - Console.WriteLine("Generated Summary:"); - Console.WriteLine(summary.GetValue()); + WriteLine("Generated Summary:"); + WriteLine(summary.GetValue()); } - private static async Task GetConversationActionItemsAsync() + private async Task GetConversationActionItemsAsync() { - Console.WriteLine("======== SamplePlugins - Conversation Summary Plugin - Action Items ========"); - IKernel kernel = InitializeKernel(); + WriteLine("======== SamplePlugins - Conversation Summary Plugin - Action Items ========"); + Kernel kernel = InitializeKernel(); - IDictionary conversationSummary = - kernel.ImportFunctions(new ConversationSummaryPlugin(kernel)); + KernelPlugin conversationSummary = kernel.ImportPluginFromType(); - KernelResult summary = await kernel.RunAsync( - ChatTranscript, - conversationSummary["GetConversationActionItems"]); + FunctionResult summary = await kernel.InvokeAsync( + conversationSummary["GetConversationActionItems"], new() { ["input"] = ChatTranscript }); - Console.WriteLine("Generated Action Items:"); - Console.WriteLine(summary.GetValue()); + WriteLine("Generated Action Items:"); + WriteLine(summary.GetValue()); } - private static async Task GetConversationTopicsAsync() + private async Task GetConversationTopicsAsync() { - Console.WriteLine("======== SamplePlugins - Conversation Summary Plugin - Topics ========"); - IKernel kernel = InitializeKernel(); + WriteLine("======== SamplePlugins - Conversation Summary Plugin - Topics ========"); + Kernel kernel = InitializeKernel(); - IDictionary conversationSummary = - kernel.ImportFunctions(new ConversationSummaryPlugin(kernel)); + KernelPlugin conversationSummary = kernel.ImportPluginFromType(); - KernelResult summary = await kernel.RunAsync( - ChatTranscript, - conversationSummary["GetConversationTopics"]); + FunctionResult summary = await kernel.InvokeAsync( + conversationSummary["GetConversationTopics"], new() { ["input"] = ChatTranscript }); - Console.WriteLine("Generated Topics:"); - Console.WriteLine(summary.GetValue()); + WriteLine("Generated Topics:"); + WriteLine(summary.GetValue()); } - private static IKernel InitializeKernel() + private Kernel InitializeKernel() { - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .Build(); + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); return kernel; } + + public Example13_ConversationSummaryPlugin(ITestOutputHelper output) : base(output) + { + } } -// ReSharper disable CommentTypo /* Example Output: ======== SamplePlugins - Conversation Summary Plugin - Summarize ======== @@ -269,4 +265,3 @@ private static IKernel InitializeKernel() } */ -// ReSharper restore CommentTypo diff --git a/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs b/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs index e47ab6e5ce19..dc5b52d1eee7 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs @@ -3,50 +3,49 @@ using System; using System.Collections.Generic; using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; +using Microsoft.SemanticKernel.Connectors.AzureAISearch; +using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory; -using RepoUtils; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; /* The files contains two examples about SK Semantic Memory. * - * 1. Memory using Azure Cognitive Search. + * 1. Memory using Azure AI Search. * 2. Memory using a custom embedding generator and vector engine. * * Semantic Memory allows to store your data like traditional DBs, * adding the ability to query it using natural language. */ - -// ReSharper disable once InconsistentNaming -public static class Example14_SemanticMemory +public class Example14_SemanticMemory : BaseTest { private const string MemoryCollectionName = "SKGitHub"; - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - Console.WriteLine("=============================================================="); - Console.WriteLine("======== Semantic Memory using Azure Cognitive Search ========"); - Console.WriteLine("=============================================================="); + WriteLine("=============================================================="); + WriteLine("======== Semantic Memory using Azure AI Search ========"); + WriteLine("=============================================================="); - /* This example leverages Azure Cognitive Search to provide SK with Semantic Memory. + /* This example leverages Azure AI Search to provide SK with Semantic Memory. * - * Azure Cognitive Search automatically indexes your data semantically, so you don't + * Azure AI Search automatically indexes your data semantically, so you don't * need to worry about embedding generation. */ var memoryWithACS = new MemoryBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) - .WithMemoryStore(new AzureCognitiveSearchMemoryStore(TestConfiguration.ACS.Endpoint, TestConfiguration.ACS.ApiKey)) + .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) + .WithMemoryStore(new AzureAISearchMemoryStore(TestConfiguration.AzureAISearch.Endpoint, TestConfiguration.AzureAISearch.ApiKey)) .Build(); await RunExampleAsync(memoryWithACS); - Console.WriteLine("===================================================="); - Console.WriteLine("======== Semantic Memory (volatile, in RAM) ========"); - Console.WriteLine("===================================================="); + WriteLine("===================================================="); + WriteLine("======== Semantic Memory (volatile, in RAM) ========"); + WriteLine("===================================================="); /* You can build your own semantic memory combining an Embedding Generator * with a Memory storage that supports search by similarity (ie semantic search). @@ -58,15 +57,14 @@ public static async Task RunAsync() */ var memoryWithCustomDb = new MemoryBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) + .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey) .WithMemoryStore(new VolatileMemoryStore()) .Build(); await RunExampleAsync(memoryWithCustomDb); } - public static async Task RunExampleAsync(ISemanticTextMemory memory) + private async Task RunExampleAsync(ISemanticTextMemory memory) { await StoreMemoryAsync(memory); @@ -105,36 +103,36 @@ public static async Task RunExampleAsync(ISemanticTextMemory memory) */ } - private static async Task SearchMemoryAsync(ISemanticTextMemory memory, string query) + private async Task SearchMemoryAsync(ISemanticTextMemory memory, string query) { - Console.WriteLine("\nQuery: " + query + "\n"); + WriteLine("\nQuery: " + query + "\n"); var memoryResults = memory.SearchAsync(MemoryCollectionName, query, limit: 2, minRelevanceScore: 0.5); int i = 0; await foreach (MemoryQueryResult memoryResult in memoryResults) { - Console.WriteLine($"Result {++i}:"); - Console.WriteLine(" URL: : " + memoryResult.Metadata.Id); - Console.WriteLine(" Title : " + memoryResult.Metadata.Description); - Console.WriteLine(" Relevance: " + memoryResult.Relevance); - Console.WriteLine(); + WriteLine($"Result {++i}:"); + WriteLine(" URL: : " + memoryResult.Metadata.Id); + WriteLine(" Title : " + memoryResult.Metadata.Description); + WriteLine(" Relevance: " + memoryResult.Relevance); + WriteLine(); } - Console.WriteLine("----------------------"); + WriteLine("----------------------"); } - private static async Task StoreMemoryAsync(ISemanticTextMemory memory) + private async Task StoreMemoryAsync(ISemanticTextMemory memory) { /* Store some data in the semantic memory. * - * When using Azure Cognitive Search the data is automatically indexed on write. + * When using Azure AI Search the data is automatically indexed on write. * * When using the combination of VolatileStore and Embedding generation, SK takes * care of creating and storing the index */ - Console.WriteLine("\nAdding some GitHub file URLs and their descriptions to the semantic memory."); + WriteLine("\nAdding some GitHub file URLs and their descriptions to the semantic memory."); var githubFiles = SampleData(); var i = 0; foreach (var entry in githubFiles) @@ -149,7 +147,7 @@ await memory.SaveReferenceAsync( Console.Write($" #{++i} saved."); } - Console.WriteLine("\n----------------------"); + WriteLine("\n----------------------"); } private static Dictionary SampleData() @@ -166,10 +164,10 @@ private static Dictionary SampleData() = "Sample demonstrating how to create a chat plugin interfacing with ChatGPT", ["https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel/Memory/VolatileMemoryStore.cs"] = "C# class that defines a volatile embedding store", - ["https://github.com/microsoft/semantic-kernel/blob/main/samples/dotnet/KernelHttpServer/README.md"] - = "README: How to set up a Semantic Kernel Service API using Azure Function Runtime v4", - ["https://github.com/microsoft/semantic-kernel/blob/main/samples/apps/chat-summary-webapp-react/README.md"] - = "README: README associated with a sample chat summary react-based webapp", }; } + + public Example14_SemanticMemory(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs index b9afec064937..801032dfe8dd 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs @@ -1,34 +1,37 @@ // Copyright (c) Microsoft. All rights reserved. -using System; -using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; -using Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma; -using Microsoft.SemanticKernel.Connectors.Memory.DuckDB; -using Microsoft.SemanticKernel.Connectors.Memory.Kusto; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone; -using Microsoft.SemanticKernel.Connectors.Memory.Postgres; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant; -using Microsoft.SemanticKernel.Connectors.Memory.Redis; -using Microsoft.SemanticKernel.Connectors.Memory.Sqlite; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate; +using Microsoft.SemanticKernel.Connectors.AzureAISearch; +using Microsoft.SemanticKernel.Connectors.Chroma; +using Microsoft.SemanticKernel.Connectors.DuckDB; +using Microsoft.SemanticKernel.Connectors.Kusto; +using Microsoft.SemanticKernel.Connectors.MongoDB; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.Pinecone; +using Microsoft.SemanticKernel.Connectors.Postgres; +using Microsoft.SemanticKernel.Connectors.Qdrant; +using Microsoft.SemanticKernel.Connectors.Redis; +using Microsoft.SemanticKernel.Connectors.Sqlite; +using Microsoft.SemanticKernel.Connectors.Weaviate; using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Plugins.Memory; using Npgsql; -using Pgvector.Npgsql; using RepoUtils; using StackExchange.Redis; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example15_TextMemoryPlugin +namespace Examples; + +public class Example15_TextMemoryPlugin : BaseTest { private const string MemoryCollectionName = "aboutMe"; - public static async Task RunAsync(CancellationToken cancellationToken = default) + [Theory] + [InlineData("Volatile")] + [InlineData("AzureAISearch")] + public async Task RunAsync(string provider) { IMemoryStore store; @@ -37,7 +40,11 @@ public static async Task RunAsync(CancellationToken cancellationToken = default) /////////////////////////////////////////////////////////////////////////////////////////// // Volatile Memory Store - an in-memory store that is not persisted - store = new VolatileMemoryStore(); + switch (provider) + { + case "AzureAISearch": store = CreateSampleAzureAISearchMemoryStore(); break; + default: store = new VolatileMemoryStore(); break; + } // Sqlite Memory Store - a file-based store that persists data in a Sqlite database // store = await CreateSampleSqliteMemoryStoreAsync(); @@ -45,8 +52,11 @@ public static async Task RunAsync(CancellationToken cancellationToken = default) // DuckDB Memory Store - a file-based store that persists data in a DuckDB database // store = await CreateSampleDuckDbMemoryStoreAsync(); - // Azure Cognitive Search Memory Store - a store that persists data in a hosted Azure Cognitive Search database - // store = CreateSampleAzureCognitiveSearchMemoryStore(); + // MongoDB Memory Store - a store that persists data in a MongoDB database + // store = CreateSampleMongoDBMemoryStore(); + + // Azure AI Search Memory Store - a store that persists data in a hosted Azure AI Search database + // store = CreateSampleAzureAISearchMemoryStore(); // Qdrant Memory Store - a store that persists data in a local or remote Qdrant database // store = CreateSampleQdrantMemoryStore(); @@ -69,52 +79,58 @@ public static async Task RunAsync(CancellationToken cancellationToken = default) // Kusto Memory Store // store = CreateSampleKustoMemoryStore(); - await RunWithStoreAsync(store, cancellationToken); + await RunWithStoreAsync(store); } - private static async Task CreateSampleSqliteMemoryStoreAsync() + private async Task CreateSampleSqliteMemoryStoreAsync() { IMemoryStore store = await SqliteMemoryStore.ConnectAsync("memories.sqlite"); return store; } - private static async Task CreateSampleDuckDbMemoryStoreAsync() + private async Task CreateSampleDuckDbMemoryStoreAsync() { IMemoryStore store = await DuckDBMemoryStore.ConnectAsync("memories.duckdb"); return store; } - private static IMemoryStore CreateSampleAzureCognitiveSearchMemoryStore() + private IMemoryStore CreateSampleMongoDBMemoryStore() + { + IMemoryStore store = new MongoDBMemoryStore(TestConfiguration.MongoDB.ConnectionString, "memoryPluginExample"); + return store; + } + + private IMemoryStore CreateSampleAzureAISearchMemoryStore() { - IMemoryStore store = new AzureCognitiveSearchMemoryStore(TestConfiguration.ACS.Endpoint, TestConfiguration.ACS.ApiKey); + IMemoryStore store = new AzureAISearchMemoryStore(TestConfiguration.AzureAISearch.Endpoint, TestConfiguration.AzureAISearch.ApiKey); return store; } - private static IMemoryStore CreateSampleChromaMemoryStore() + private IMemoryStore CreateSampleChromaMemoryStore() { IMemoryStore store = new ChromaMemoryStore(TestConfiguration.Chroma.Endpoint, ConsoleLogger.LoggerFactory); return store; } - private static IMemoryStore CreateSampleQdrantMemoryStore() + private IMemoryStore CreateSampleQdrantMemoryStore() { IMemoryStore store = new QdrantMemoryStore(TestConfiguration.Qdrant.Endpoint, 1536, ConsoleLogger.LoggerFactory); return store; } - private static IMemoryStore CreateSamplePineconeMemoryStore() + private IMemoryStore CreateSamplePineconeMemoryStore() { IMemoryStore store = new PineconeMemoryStore(TestConfiguration.Pinecone.Environment, TestConfiguration.Pinecone.ApiKey, ConsoleLogger.LoggerFactory); return store; } - private static IMemoryStore CreateSampleWeaviateMemoryStore() + private IMemoryStore CreateSampleWeaviateMemoryStore() { IMemoryStore store = new WeaviateMemoryStore(TestConfiguration.Weaviate.Endpoint, TestConfiguration.Weaviate.ApiKey); return store; } - private static async Task CreateSampleRedisMemoryStoreAsync() + private async Task CreateSampleRedisMemoryStoreAsync() { string configuration = TestConfiguration.Redis.Configuration; ConnectionMultiplexer connectionMultiplexer = await ConnectionMultiplexer.ConnectAsync(configuration); @@ -139,16 +155,15 @@ private static IMemoryStore CreateSampleKustoMemoryStore() return store; } - private static async Task RunWithStoreAsync(IMemoryStore memoryStore, CancellationToken cancellationToken) + private async Task RunWithStoreAsync(IMemoryStore memoryStore) { - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) - .WithOpenAITextEmbeddingGenerationService(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey) + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .AddOpenAITextEmbeddingGeneration(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey) .Build(); // Create an embedding generator to use for semantic memory. - var embeddingGenerator = new OpenAITextEmbeddingGeneration(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey); + var embeddingGenerator = new OpenAITextEmbeddingGenerationService(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey); // The combination of the text embedding generator and the memory store makes up the 'SemanticTextMemory' object used to // store and retrieve memories. @@ -159,57 +174,56 @@ private static async Task RunWithStoreAsync(IMemoryStore memoryStore, Cancellati // // This is a simple way to store memories from a code perspective, without using the Kernel. ///////////////////////////////////////////////////////////////////////////////////////////////////// - Console.WriteLine("== PART 1a: Saving Memories through the ISemanticTextMemory object =="); + WriteLine("== PART 1a: Saving Memories through the ISemanticTextMemory object =="); - Console.WriteLine("Saving memory with key 'info1': \"My name is Andrea\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "My name is Andrea", cancellationToken: cancellationToken); + WriteLine("Saving memory with key 'info1': \"My name is Andrea\""); + await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info1", text: "My name is Andrea"); - Console.WriteLine("Saving memory with key 'info2': \"I work as a tourist operator\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "I work as a tourist operator", cancellationToken: cancellationToken); + WriteLine("Saving memory with key 'info2': \"I work as a tourist operator\""); + await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info2", text: "I work as a tourist operator"); - Console.WriteLine("Saving memory with key 'info3': \"I've been living in Seattle since 2005\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "I've been living in Seattle since 2005", cancellationToken: cancellationToken); + WriteLine("Saving memory with key 'info3': \"I've been living in Seattle since 2005\""); + await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info3", text: "I've been living in Seattle since 2005"); - Console.WriteLine("Saving memory with key 'info4': \"I visited France and Italy five times since 2015\""); - await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "I visited France and Italy five times since 2015", cancellationToken: cancellationToken); + WriteLine("Saving memory with key 'info4': \"I visited France and Italy five times since 2015\""); + await textMemory.SaveInformationAsync(MemoryCollectionName, id: "info4", text: "I visited France and Italy five times since 2015"); // Retrieve a memory - Console.WriteLine("== PART 1b: Retrieving Memories through the ISemanticTextMemory object =="); - MemoryQueryResult? lookup = await textMemory.GetAsync(MemoryCollectionName, "info1", cancellationToken: cancellationToken); - Console.WriteLine("Memory with key 'info1':" + lookup?.Metadata.Text ?? "ERROR: memory not found"); - Console.WriteLine(); + WriteLine("== PART 1b: Retrieving Memories through the ISemanticTextMemory object =="); + MemoryQueryResult? lookup = await textMemory.GetAsync(MemoryCollectionName, "info1"); + WriteLine("Memory with key 'info1':" + lookup?.Metadata.Text ?? "ERROR: memory not found"); + WriteLine(); ///////////////////////////////////////////////////////////////////////////////////////////////////// // PART 2: Create TextMemoryPlugin, store and retrieve memories through the Kernel. // - // This enables semantic functions and the AI (via Planners) to access memories + // This enables prompt functions and the AI (via Planners) to access memories ///////////////////////////////////////////////////////////////////////////////////////////////////// - Console.WriteLine("== PART 2a: Saving Memories through the Kernel with TextMemoryPlugin and the 'Save' function =="); + WriteLine("== PART 2a: Saving Memories through the Kernel with TextMemoryPlugin and the 'Save' function =="); // Import the TextMemoryPlugin into the Kernel for other functions - var memoryPlugin = new TextMemoryPlugin(textMemory); - var memoryFunctions = kernel.ImportFunctions(memoryPlugin); + var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); // Save a memory with the Kernel - Console.WriteLine("Saving memory with key 'info5': \"My family is from New York\""); - await kernel.RunAsync(memoryFunctions["Save"], new() + WriteLine("Saving memory with key 'info5': \"My family is from New York\""); + await kernel.InvokeAsync(memoryPlugin["Save"], new() { + [TextMemoryPlugin.InputParam] = "My family is from New York", [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, [TextMemoryPlugin.KeyParam] = "info5", - ["input"] = "My family is from New York" - }, cancellationToken); + }); // Retrieve a specific memory with the Kernel - Console.WriteLine("== PART 2b: Retrieving Memories through the Kernel with TextMemoryPlugin and the 'Retrieve' function =="); - var result = await kernel.RunAsync(memoryFunctions["Retrieve"], new() + WriteLine("== PART 2b: Retrieving Memories through the Kernel with TextMemoryPlugin and the 'Retrieve' function =="); + var result = await kernel.InvokeAsync(memoryPlugin["Retrieve"], new KernelArguments() { [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, [TextMemoryPlugin.KeyParam] = "info5" - }, cancellationToken); + }); - Console.WriteLine("Memory with key 'info5':" + result.GetValue() ?? "ERROR: memory not found"); - Console.WriteLine(); + WriteLine("Memory with key 'info5':" + result.GetValue() ?? "ERROR: memory not found"); + WriteLine(); ///////////////////////////////////////////////////////////////////////////////////////////////////// // PART 3: Recall similar ideas with semantic search @@ -217,35 +231,34 @@ private static async Task RunWithStoreAsync(IMemoryStore memoryStore, Cancellati // Uses AI Embeddings for fuzzy lookup of memories based on intent, rather than a specific key. ///////////////////////////////////////////////////////////////////////////////////////////////////// - Console.WriteLine("== PART 3: Recall (similarity search) with AI Embeddings =="); + WriteLine("== PART 3: Recall (similarity search) with AI Embeddings =="); - Console.WriteLine("== PART 3a: Recall (similarity search) with ISemanticTextMemory =="); - Console.WriteLine("Ask: where did I grow up?"); + WriteLine("== PART 3a: Recall (similarity search) with ISemanticTextMemory =="); + WriteLine("Ask: where did I grow up?"); await foreach (var answer in textMemory.SearchAsync( collection: MemoryCollectionName, query: "where did I grow up?", limit: 2, minRelevanceScore: 0.79, - withEmbeddings: true, - cancellationToken: cancellationToken)) + withEmbeddings: true)) { - Console.WriteLine($"Answer: {answer.Metadata.Text}"); + WriteLine($"Answer: {answer.Metadata.Text}"); } - Console.WriteLine("== PART 3b: Recall (similarity search) with Kernel and TextMemoryPlugin 'Recall' function =="); - Console.WriteLine("Ask: where do I live?"); + WriteLine("== PART 3b: Recall (similarity search) with Kernel and TextMemoryPlugin 'Recall' function =="); + WriteLine("Ask: where do I live?"); - result = await kernel.RunAsync(memoryFunctions["Recall"], new() + result = await kernel.InvokeAsync(memoryPlugin["Recall"], new() { + [TextMemoryPlugin.InputParam] = "Ask: where do I live?", [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, [TextMemoryPlugin.LimitParam] = "2", [TextMemoryPlugin.RelevanceParam] = "0.79", - ["input"] = "Ask: where do I live?" - }, cancellationToken); + }); - Console.WriteLine($"Answer: {result.GetValue()}"); - Console.WriteLine(); + WriteLine($"Answer: {result.GetValue()}"); + WriteLine(); /* Output: @@ -260,15 +273,15 @@ private static async Task RunWithStoreAsync(IMemoryStore memoryStore, Cancellati */ ///////////////////////////////////////////////////////////////////////////////////////////////////// - // PART 3: TextMemoryPlugin Recall in a Semantic Function + // PART 4: TextMemoryPlugin Recall in a Prompt Function // // Looks up related memories when rendering a prompt template, then sends the rendered prompt to - // the text completion model to answer a natural language query. + // the text generation model to answer a natural language query. ///////////////////////////////////////////////////////////////////////////////////////////////////// - Console.WriteLine("== PART 4: Using TextMemoryPlugin 'Recall' function in a Semantic Function =="); + WriteLine("== PART 4: Using TextMemoryPlugin 'Recall' function in a Prompt Function =="); - // Build a semantic function that uses memory to find facts + // Build a prompt function that uses memory to find facts const string RecallFunctionDefinition = @" Consider only the facts below when answering questions: @@ -282,17 +295,18 @@ END FACTS Answer: "; - var aboutMeOracle = kernel.CreateSemanticFunction(RecallFunctionDefinition, new OpenAIRequestSettings() { MaxTokens = 100 }); + var aboutMeOracle = kernel.CreateFunctionFromPrompt(RecallFunctionDefinition, new OpenAIPromptExecutionSettings() { MaxTokens = 100 }); - result = await kernel.RunAsync(aboutMeOracle, new() + result = await kernel.InvokeAsync(aboutMeOracle, new() { + [TextMemoryPlugin.InputParam] = "Do I live in the same town where I grew up?", [TextMemoryPlugin.CollectionParam] = MemoryCollectionName, + [TextMemoryPlugin.LimitParam] = "2", [TextMemoryPlugin.RelevanceParam] = "0.79", - ["input"] = "Do I live in the same town where I grew up?" - }, cancellationToken); + }); - Console.WriteLine("Ask: Do I live in the same town where I grew up?"); - Console.WriteLine($"Answer: {result.GetValue()}"); + WriteLine("Ask: Do I live in the same town where I grew up?"); + WriteLine($"Answer: {result.GetValue()}"); /* Approximate Output: @@ -304,25 +318,29 @@ END FACTS // ///////////////////////////////////////////////////////////////////////////////////////////////////// - Console.WriteLine("== PART 5: Cleanup, deleting database collection =="); + WriteLine("== PART 5: Cleanup, deleting database collection =="); - Console.WriteLine("Printing Collections in DB..."); - var collections = memoryStore.GetCollectionsAsync(cancellationToken); + WriteLine("Printing Collections in DB..."); + var collections = memoryStore.GetCollectionsAsync(); await foreach (var collection in collections) { - Console.WriteLine(collection); + WriteLine(collection); } - Console.WriteLine(); + WriteLine(); - Console.WriteLine("Removing Collection {0}", MemoryCollectionName); - await memoryStore.DeleteCollectionAsync(MemoryCollectionName, cancellationToken); - Console.WriteLine(); + WriteLine($"Removing Collection {MemoryCollectionName}"); + await memoryStore.DeleteCollectionAsync(MemoryCollectionName); + WriteLine(); - Console.WriteLine($"Printing Collections in DB (after removing {MemoryCollectionName})..."); - collections = memoryStore.GetCollectionsAsync(cancellationToken); + WriteLine($"Printing Collections in DB (after removing {MemoryCollectionName})..."); + collections = memoryStore.GetCollectionsAsync(); await foreach (var collection in collections) { - Console.WriteLine(collection); + WriteLine(collection); } } + + public Example15_TextMemoryPlugin(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example16_CustomLLM.cs b/dotnet/samples/KernelSyntaxExamples/Example16_CustomLLM.cs index e5f6405f7824..7cb61dd0b9b8 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example16_CustomLLM.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example16_CustomLLM.cs @@ -3,163 +3,122 @@ using System; using System.Collections.Generic; using System.Runtime.CompilerServices; -using System.Text.Json; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Orchestration; -using RepoUtils; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; +using Xunit.Abstractions; -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously +namespace Examples; /** - * The following example shows how to plug into SK a custom text completion model. + * The following example shows how to plug a custom text generation model into SK. * - * This might be useful in a few scenarios, for example: + * To do this, this example uses a text generation service stub (MyTextGenerationService) and + * no actual model. + * + * Using a custom text generation model within SK can be useful in a few scenarios, for example: * - You are not using OpenAI or Azure OpenAI models * - You are using OpenAI/Azure OpenAI models but the models are behind a web service with a different API schema * - You want to use a local model * - * Note that all text completion models are deprecated by OpenAI and will be removed in a future release. + * Note that all OpenAI text generation models are deprecated and no longer available to new customers. * * Refer to example 33 for streaming chat completion. */ -public class MyTextCompletionService : ITextCompletion +public class Example16_CustomLLM : BaseTest { - public Task> GetCompletionsAsync(string text, AIRequestSettings? requestSettings, CancellationToken cancellationToken = default) + [Fact] + public async Task CustomTextGenerationWithKernelFunctionAsync() { - return Task.FromResult>(new List - { - new MyTextCompletionStreamingResult() - }); - } + WriteLine("\n======== Custom LLM - Text Completion - KernelFunction ========"); - public async IAsyncEnumerable GetStreamingCompletionsAsync(string text, AIRequestSettings? requestSettings, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - yield return new MyTextCompletionStreamingResult(); - } -} + IKernelBuilder builder = Kernel.CreateBuilder(); + // Add your text generation service as a singleton instance + builder.Services.AddKeyedSingleton("myService1", new MyTextGenerationService()); + // Add your text generation service as a factory method + builder.Services.AddKeyedSingleton("myService2", (_, _) => new MyTextGenerationService()); + Kernel kernel = builder.Build(); -public class MyTextCompletionStreamingResult : ITextStreamingResult, ITextResult -{ - private readonly ModelResult _modelResult = new(new - { - Content = Text, - Message = "This is my model raw response", - Tokens = Text.Split(' ').Length - }); + const string FunctionDefinition = "Write one paragraph on {{$input}}"; + var paragraphWritingFunction = kernel.CreateFunctionFromPrompt(FunctionDefinition); - private const string Text = @" ..output from your custom model... Example: -AI is awesome because it can help us solve complex problems, enhance our creativity, -and improve our lives in many ways. AI can perform tasks that are too difficult, -tedious, or dangerous for humans, such as diagnosing diseases, detecting fraud, or -exploring space. AI can also augment our abilities and inspire us to create new forms -of art, music, or literature. AI can also improve our well-being and happiness by -providing personalized recommendations, entertainment, and assistance. AI is awesome"; - - public ModelResult ModelResult => this._modelResult; - - public async Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - // Forcing a 2 sec delay (Simulating custom LLM lag) - await Task.Delay(2000, cancellationToken); + const string Input = "Why AI is awesome"; + WriteLine($"Function input: {Input}\n"); + var result = await paragraphWritingFunction.InvokeAsync(kernel, new() { ["input"] = Input }); - return Text; + WriteLine(result); } - public async IAsyncEnumerable GetCompletionStreamingAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + [Fact] + public async Task CustomTextGenerationAsync() { - yield return Environment.NewLine; + WriteLine("\n======== Custom LLM - Text Completion - Raw ========"); - // Your model logic here - var streamedOutput = Text.Split(' '); - foreach (string word in streamedOutput) - { - await Task.Delay(50, cancellationToken); - yield return $"{word} "; - } - } -} + const string Prompt = "Write one paragraph on why AI is awesome."; + var completionService = new MyTextGenerationService(); -// ReSharper disable StringLiteralTypo -// ReSharper disable once InconsistentNaming -public static class Example16_CustomLLM -{ - public static async Task RunAsync() - { - await CustomTextCompletionWithSKFunctionAsync(); + WriteLine($"Prompt: {Prompt}\n"); + var result = await completionService.GetTextContentAsync(Prompt); - await CustomTextCompletionAsync(); - await CustomTextCompletionStreamAsync(); + WriteLine(result); } - private static async Task CustomTextCompletionWithSKFunctionAsync() + [Fact] + public async Task CustomTextGenerationStreamAsync() { - Console.WriteLine("======== Custom LLM - Text Completion - SKFunction ========"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - // Add your text completion service as a singleton instance - .WithAIService("myService1", new MyTextCompletionService()) - // Add your text completion service as a factory method - .WithAIService("myService2", (log) => new MyTextCompletionService()) - .Build(); - - const string FunctionDefinition = "Does the text contain grammar errors (Y/N)? Text: {{$input}}"; + WriteLine("\n======== Custom LLM - Text Completion - Raw Streaming ========"); - var textValidationFunction = kernel.CreateSemanticFunction(FunctionDefinition); + const string Prompt = "Write one paragraph on why AI is awesome."; + var completionService = new MyTextGenerationService(); - var result = await textValidationFunction.InvokeAsync("I mised the training session this morning", kernel); - Console.WriteLine(result.GetValue()); + WriteLine($"Prompt: {Prompt}\n"); + await foreach (var message in completionService.GetStreamingTextContentsAsync(Prompt)) + { + Write(message); + } - // Details of the my custom model response - Console.WriteLine(JsonSerializer.Serialize( - result.GetModelResults(), - new JsonSerializerOptions() { WriteIndented = true } - )); + WriteLine(); } - private static async Task CustomTextCompletionAsync() + /// + /// Text generation service stub. + /// + private sealed class MyTextGenerationService : ITextGenerationService { - Console.WriteLine("======== Custom LLM - Text Completion - Raw ========"); - var completionService = new MyTextCompletionService(); - - var result = await completionService.CompleteAsync("I missed the training session this morning"); - - Console.WriteLine(result); - } + private const string LLMResultText = @"...output from your custom model... Example: +AI is awesome because it can help us solve complex problems, enhance our creativity, +and improve our lives in many ways. AI can perform tasks that are too difficult, +tedious, or dangerous for humans, such as diagnosing diseases, detecting fraud, or +exploring space. AI can also augment our abilities and inspire us to create new forms +of art, music, or literature. AI can also improve our well-being and happiness by +providing personalized recommendations, entertainment, and assistance. AI is awesome."; - private static async Task CustomTextCompletionStreamAsync() - { - Console.WriteLine("======== Custom LLM - Text Completion - Raw Streaming ========"); + public IReadOnlyDictionary Attributes => new Dictionary(); - IKernel kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); - ITextCompletion textCompletion = new MyTextCompletionService(); + public async IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (string word in LLMResultText.Split(' ', StringSplitOptions.RemoveEmptyEntries)) + { + await Task.Delay(50, cancellationToken); + cancellationToken.ThrowIfCancellationRequested(); - var prompt = "Write one paragraph why AI is awesome"; - await TextCompletionStreamAsync(prompt, textCompletion); - } + yield return new StreamingTextContent($"{word} "); + } + } - private static async Task TextCompletionStreamAsync(string prompt, ITextCompletion textCompletion) - { - var requestSettings = new OpenAIRequestSettings() - { - MaxTokens = 100, - FrequencyPenalty = 0, - PresencePenalty = 0, - Temperature = 1, - TopP = 0.5 - }; - - Console.WriteLine("Prompt: " + prompt); - await foreach (string message in textCompletion.CompleteStreamAsync(prompt, requestSettings)) + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) { - Console.Write(message); + return Task.FromResult>(new List + { + new(LLMResultText) + }); } + } - Console.WriteLine(); + public Example16_CustomLLM(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs b/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs index b583f9f85809..3115e2f49967 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example17_ChatGPT.cs @@ -1,21 +1,25 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/** - * The following example shows how to use Semantic Kernel with OpenAI ChatGPT API - */ -// ReSharper disable once InconsistentNaming -public static class Example17_ChatGPT +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// The following example shows how to use Semantic Kernel with OpenAI ChatGPT API +public class Example17_ChatGPT : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task OpenAIChatSampleAsync() { - await AzureOpenAIChatSampleAsync(); - await OpenAIChatSampleAsync(); + WriteLine("======== Open AI - ChatGPT ========"); + + OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + + await StartChatAsync(chatCompletionService); /* Output: @@ -46,41 +50,34 @@ I hope these suggestions are helpful! */ } - private static async Task OpenAIChatSampleAsync() - { - Console.WriteLine("======== Open AI - ChatGPT ========"); - - OpenAIChatCompletion openAIChatCompletion = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - - await StartChatAsync(openAIChatCompletion); - } - - private static async Task AzureOpenAIChatSampleAsync() + [Fact] + public async Task AzureOpenAIChatSampleAsync() { - Console.WriteLine("======== Azure Open AI - ChatGPT ========"); + WriteLine("======== Azure Open AI - ChatGPT ========"); - AzureChatCompletion azureChatCompletion = new( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); - await StartChatAsync(azureChatCompletion); + await StartChatAsync(chatCompletionService); } - private static async Task StartChatAsync(IChatCompletion chatGPT) + private async Task StartChatAsync(IChatCompletionService chatGPT) { - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); + WriteLine("Chat content:"); + WriteLine("------------------------"); - var chatHistory = chatGPT.CreateNewChat("You are a librarian, expert about books"); + var chatHistory = new ChatHistory("You are a librarian, expert about books"); // First user message chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); await MessageOutputAsync(chatHistory); // First bot assistant message - string reply = await chatGPT.GenerateMessageAsync(chatHistory); - chatHistory.AddAssistantMessage(reply); + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); await MessageOutputAsync(chatHistory); // Second user message @@ -88,21 +85,25 @@ private static async Task StartChatAsync(IChatCompletion chatGPT) await MessageOutputAsync(chatHistory); // Second bot assistant message - reply = await chatGPT.GenerateMessageAsync(chatHistory); - chatHistory.AddAssistantMessage(reply); + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); await MessageOutputAsync(chatHistory); } /// /// Outputs the last message of the chat history /// - private static Task MessageOutputAsync(ChatHistory chatHistory) + private Task MessageOutputAsync(ChatHistory chatHistory) { - var message = chatHistory.Messages.Last(); + var message = chatHistory.Last(); - Console.WriteLine($"{message.Role}: {message.Content}"); - Console.WriteLine("------------------------"); + WriteLine($"{message.Role}: {message.Content}"); + WriteLine("------------------------"); return Task.CompletedTask; } + + public Example17_ChatGPT(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs b/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs index 26bf8c1196f2..a78e891d4110 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example18_DallE.cs @@ -2,44 +2,36 @@ using System; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using RepoUtils; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.TextToImage; +using Xunit; +using Xunit.Abstractions; -/** - * The following example shows how to use Semantic Kernel with OpenAI Dall-E 2 to create images - */ +namespace Examples; -// ReSharper disable once InconsistentNaming -public static class Example18_DallE +// The following example shows how to use Semantic Kernel with OpenAI Dall-E 2 to create images +public class Example18_DallE : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task OpenAIDallEAsync() { - await OpenAIDallEAsync(); - await AzureOpenAIDallEAsync(); - } + WriteLine("======== OpenAI Dall-E 2 Text To Image ========"); - private static async Task OpenAIDallEAsync() - { - Console.WriteLine("======== OpenAI Dall-E 2 Image Generation ========"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - // Add your image generation service - .WithOpenAIImageGenerationService(TestConfiguration.OpenAI.ApiKey) - // Add your chat completion service - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAITextToImage(TestConfiguration.OpenAI.ApiKey) // Add your text to image service + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) // Add your chat completion service .Build(); - IImageGeneration dallE = kernel.GetService(); + ITextToImageService dallE = kernel.GetRequiredService(); var imageDescription = "A cute baby sea otter"; var image = await dallE.GenerateImageAsync(imageDescription, 256, 256); - Console.WriteLine(imageDescription); - Console.WriteLine("Image URL: " + image); + WriteLine(imageDescription); + WriteLine("Image URL: " + image); /* Output: @@ -48,34 +40,34 @@ A cute baby sea otter */ - Console.WriteLine("======== Chat with images ========"); + WriteLine("======== Chat with images ========"); - IChatCompletion chatGPT = kernel.GetService(); - var chatHistory = chatGPT.CreateNewChat( - "You're chatting with a user. Instead of replying directly to the user" + - " provide the description of an image that expresses what you want to say." + - " The user won't see your message, they will see only the image. The system " + - " generates an image using your description, so it's important you describe the image with details."); + var chatGPT = kernel.GetRequiredService(); + var chatHistory = new ChatHistory( + "You're chatting with a user. Instead of replying directly to the user" + + " provide the description of an image that expresses what you want to say." + + " The user won't see your message, they will see only the image. The system " + + " generates an image using your description, so it's important you describe the image with details."); var msg = "Hi, I'm from Tokyo, where are you from?"; chatHistory.AddUserMessage(msg); - Console.WriteLine("User: " + msg); + WriteLine("User: " + msg); - string reply = await chatGPT.GenerateMessageAsync(chatHistory); - chatHistory.AddAssistantMessage(reply); - image = await dallE.GenerateImageAsync(reply, 256, 256); - Console.WriteLine("Bot: " + image); - Console.WriteLine("Img description: " + reply); + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + image = await dallE.GenerateImageAsync(reply.Content!, 256, 256); + WriteLine("Bot: " + image); + WriteLine("Img description: " + reply); msg = "Oh, wow. Not sure where that is, could you provide more details?"; chatHistory.AddUserMessage(msg); - Console.WriteLine("User: " + msg); + WriteLine("User: " + msg); - reply = await chatGPT.GenerateMessageAsync(chatHistory); - chatHistory.AddAssistantMessage(reply); - image = await dallE.GenerateImageAsync(reply, 256, 256); - Console.WriteLine("Bot: " + image); - Console.WriteLine("Img description: " + reply); + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + image = await dallE.GenerateImageAsync(reply.Content!, 256, 256); + WriteLine("Bot: " + image); + WriteLine("Img description: " + reply); /* Output: @@ -90,24 +82,41 @@ A cute baby sea otter */ } - public static async Task AzureOpenAIDallEAsync() + [Fact(Skip = "Generating the Image can take too long and often break the test")] + public async Task AzureOpenAIDallEAsync() { - Console.WriteLine("========Azure OpenAI Dall-E 2 Image Generation ========"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - // Add your image generation service - .WithAzureOpenAIImageGenerationService(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey) - // Add your chat completion service - .WithAzureChatCompletionService(TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey) - .Build(); - - IImageGeneration dallE = kernel.GetService(); + WriteLine("========Azure OpenAI Dall-E 3 Text To Image ========"); + + var builder = Kernel.CreateBuilder() + .AddAzureOpenAITextToImage( // Add your text to image service + deploymentName: TestConfiguration.AzureOpenAI.ImageModelId, + endpoint: TestConfiguration.AzureOpenAI.ImageEndpoint, + apiKey: TestConfiguration.AzureOpenAI.ImageApiKey, + modelId: TestConfiguration.AzureOpenAI.ImageModelId, + apiVersion: "2023-12-01-preview") //Dall-E 3 is only supported in this version + .AddAzureOpenAIChatCompletion( // Add your chat completion service + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey); + + builder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry 5 times + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.MaxRetryAttempts = 5; + o.TotalRequestTimeout.Timeout = TimeSpan.FromSeconds(60); + }); + }); + + var kernel = builder.Build(); + + ITextToImageService dallE = kernel.GetRequiredService(); var imageDescription = "A cute baby sea otter"; - var image = await dallE.GenerateImageAsync(imageDescription, 256, 256); + var image = await dallE.GenerateImageAsync(imageDescription, 1024, 1024); - Console.WriteLine(imageDescription); - Console.WriteLine("Image URL: " + image); + WriteLine(imageDescription); + WriteLine("Image URL: " + image); /* Output: @@ -116,10 +125,10 @@ A cute baby sea otter */ - Console.WriteLine("======== Chat with images ========"); + WriteLine("======== Chat with images ========"); - IChatCompletion chatGPT = kernel.GetService(); - var chatHistory = (OpenAIChatHistory)chatGPT.CreateNewChat( + var chatGPT = kernel.GetRequiredService(); + var chatHistory = new ChatHistory( "You're chatting with a user. Instead of replying directly to the user" + " provide the description of an image that expresses what you want to say." + " The user won't see your message, they will see only the image. The system " + @@ -127,23 +136,23 @@ A cute baby sea otter var msg = "Hi, I'm from Tokyo, where are you from?"; chatHistory.AddUserMessage(msg); - Console.WriteLine("User: " + msg); + WriteLine("User: " + msg); - string reply = await chatGPT.GenerateMessageAsync(chatHistory); - chatHistory.AddAssistantMessage(reply); - image = await dallE.GenerateImageAsync(reply, 256, 256); - Console.WriteLine("Bot: " + image); - Console.WriteLine("Img description: " + reply); + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + image = await dallE.GenerateImageAsync(reply.Content!, 1024, 1024); + WriteLine("Bot: " + image); + WriteLine("Img description: " + reply); msg = "Oh, wow. Not sure where that is, could you provide more details?"; chatHistory.AddUserMessage(msg); - Console.WriteLine("User: " + msg); + WriteLine("User: " + msg); - reply = await chatGPT.GenerateMessageAsync(chatHistory); - chatHistory.AddAssistantMessage(reply); - image = await dallE.GenerateImageAsync(reply, 256, 256); - Console.WriteLine("Bot: " + image); - Console.WriteLine("Img description: " + reply); + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + image = await dallE.GenerateImageAsync(reply.Content!, 1024, 1024); + WriteLine("Bot: " + image); + WriteLine("Img description: " + reply); /* Output: @@ -157,4 +166,8 @@ A cute baby sea otter */ } + + public Example18_DallE(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs b/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs index 124f68c3f639..6e1ce7ec47f6 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example20_HuggingFace.cs @@ -1,42 +1,35 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using RepoUtils; +using Xunit; +using Xunit.Abstractions; -/** - * The following example shows how to use Semantic Kernel with HuggingFace API. - */ -// ReSharper disable once InconsistentNaming -public static class Example20_HuggingFace -{ - public static async Task RunAsync() - { - await RunInferenceApiExampleAsync(); - await RunLlamaExampleAsync(); - } +namespace Examples; +// The following example shows how to use Semantic Kernel with HuggingFace API. +public class Example20_HuggingFace : BaseTest +{ /// /// This example uses HuggingFace Inference API to access hosted models. /// More information here: /// - private static async Task RunInferenceApiExampleAsync() + [Fact] + public async Task RunInferenceApiExampleAsync() { - Console.WriteLine("\n======== HuggingFace Inference API example ========\n"); + WriteLine("\n======== HuggingFace Inference API example ========\n"); - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithHuggingFaceTextCompletionService( + Kernel kernel = Kernel.CreateBuilder() + .AddHuggingFaceTextGeneration( model: TestConfiguration.HuggingFace.ModelId, apiKey: TestConfiguration.HuggingFace.ApiKey) .Build(); - var questionAnswerFunction = kernel.CreateSemanticFunction("Question: {{$input}}; Answer:"); + var questionAnswerFunction = kernel.CreateFunctionFromPrompt("Question: {{$input}}; Answer:"); - var result = await kernel.RunAsync("What is New York?", questionAnswerFunction); + var result = await kernel.InvokeAsync(questionAnswerFunction, new() { ["input"] = "What is New York?" }); - Console.WriteLine(result.GetValue()); + WriteLine(result.GetValue()); } /// @@ -50,28 +43,32 @@ private static async Task RunInferenceApiExampleAsync() /// Note: Your Hugging Face account email address MUST match the email you provide on the Meta website, or your request will not be approved. /// /// - private static async Task RunLlamaExampleAsync() + [Fact(Skip = "Requires local model or Huggingface Pro subscription")] + public async Task RunLlamaExampleAsync() { - Console.WriteLine("\n======== HuggingFace Llama 2 example ========\n"); + WriteLine("\n======== HuggingFace Llama 2 example ========\n"); // HuggingFace Llama 2 model: https://huggingface.co/meta-llama/Llama-2-7b-hf const string Model = "meta-llama/Llama-2-7b-hf"; // HuggingFace local HTTP server endpoint - const string Endpoint = "http://localhost:5000/completions"; + // const string Endpoint = "http://localhost:5000/completions"; - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithHuggingFaceTextCompletionService( + Kernel kernel = Kernel.CreateBuilder() + .AddHuggingFaceTextGeneration( model: Model, - endpoint: Endpoint, + //endpoint: Endpoint, apiKey: TestConfiguration.HuggingFace.ApiKey) .Build(); - var questionAnswerFunction = kernel.CreateSemanticFunction("Question: {{$input}}; Answer:"); + var questionAnswerFunction = kernel.CreateFunctionFromPrompt("Question: {{$input}}; Answer:"); - var result = await kernel.RunAsync("What is New York?", questionAnswerFunction); + var result = await kernel.InvokeAsync(questionAnswerFunction, new() { ["input"] = "What is New York?" }); - Console.WriteLine(result.GetValue()); + WriteLine(result.GetValue()); + } + + public Example20_HuggingFace(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example21_ChatGPTPlugins.cs b/dotnet/samples/KernelSyntaxExamples/Example21_ChatGPTPlugins.cs deleted file mode 100644 index e0441d62e7b8..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example21_ChatGPTPlugins.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Orchestration; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example21_ChatGptPlugins -{ - public static async Task RunAsync() - { - await RunChatGptPluginAsync(); - } - - private static async Task RunChatGptPluginAsync() - { - var kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); - - //This HTTP client is optional. SK will fallback to a default internal one if omitted. - using HttpClient httpClient = new(); - - //Import a ChatGPT plugin via URI - var plugin = await kernel.ImportPluginFunctionsAsync("", new Uri(""), new OpenApiFunctionExecutionParameters(httpClient)); - - //Add arguments for required parameters, arguments for optional ones can be skipped. - var contextVariables = new ContextVariables(); - contextVariables.Set("", ""); - - //Run - var kernelResult = await kernel.RunAsync(contextVariables, plugin[""]); - - var result = kernelResult.GetValue(); - - Console.WriteLine("Function execution result: {0}", result?.Content?.ToString()); - Console.ReadLine(); - - //--------------- Example of using Klarna ChatGPT plugin ------------------------ - - //var kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); - - //var plugin = await kernel.ImportPluginFunctionsAsync("Klarna", new Uri("https://www.klarna.com/.well-known/ai-plugin.json")); - - //var contextVariables = new ContextVariables(); - //contextVariables.Set("q", "Laptop"); // A precise query that matches one very small category or product that needs to be searched for to find the products the user is looking for. If the user explicitly stated what they want, use that as a query. The query is as specific as possible to the product name or category mentioned by the user in its singular form, and don't contain any clarifiers like latest, newest, cheapest, budget, premium, expensive or similar. The query is always taken from the latest topic, if there is a new topic a new query is started. - //contextVariables.Set("size", "3"); // number of products returned - //contextVariables.Set("budget", "200"); // maximum price of the matching product in local currency, filters results - //contextVariables.Set("countryCode", "US");// ISO 3166 country code with 2 characters based on the user location. Currently, only US, GB, DE, SE and DK are supported. - - //var kernelResult = await kernel.RunAsync(contextVariables, plugin["productsUsingGET"]); - - //var result = kernelResult.GetValue(); - - //Console.WriteLine("Function execution result: {0}", result?.Content?.ToString()); - //Console.ReadLine(); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example21_OpenAIPlugins.cs b/dotnet/samples/KernelSyntaxExamples/Example21_OpenAIPlugins.cs new file mode 100644 index 000000000000..5f0c7a1d68ab --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example21_OpenAIPlugins.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example21_OpenAIPlugins : BaseTest +{ + /// + /// Generic template on how to call OpenAI plugins + /// + [Fact(Skip = "Run it only after filling the template below")] + public async Task RunOpenAIPluginAsync() + { + Kernel kernel = new(); + + // This HTTP client is optional. SK will fallback to a default internal one if omitted. + using HttpClient httpClient = new(); + + // Import an Open AI plugin via URI + var plugin = await kernel.ImportPluginFromOpenAIAsync("", new Uri(""), new OpenAIFunctionExecutionParameters(httpClient)); + + // Add arguments for required parameters, arguments for optional ones can be skipped. + var arguments = new KernelArguments { [""] = "" }; + + // Run + var functionResult = await kernel.InvokeAsync(plugin[""], arguments); + + var result = functionResult.GetValue(); + + WriteLine($"Function execution result: {result?.Content}"); + } + + [Fact] + public async Task CallKlarnaAsync() + { + Kernel kernel = new(); + + var plugin = await kernel.ImportPluginFromOpenAIAsync("Klarna", new Uri("https://www.klarna.com/.well-known/ai-plugin.json")); + + var arguments = new KernelArguments(); + arguments["q"] = "Laptop"; // Category or product that needs to be searched for. + arguments["size"] = "3"; // Number of products to return + arguments["budget"] = "200"; // Maximum price of the matching product in local currency + arguments["countryCode"] = "US";// ISO 3166 country code with 2 characters based on the user location. + // Currently, only US, GB, DE, SE and DK are supported. + + var functionResult = await kernel.InvokeAsync(plugin["productsUsingGET"], arguments); + + var result = functionResult.GetValue(); + + WriteLine($"Function execution result: {result?.Content}"); + } + + public Example21_OpenAIPlugins(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example22_OpenAIPlugin_AzureKeyVault.cs b/dotnet/samples/KernelSyntaxExamples/Example22_OpenAIPlugin_AzureKeyVault.cs new file mode 100644 index 000000000000..14e914a9e260 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example22_OpenAIPlugin_AzureKeyVault.cs @@ -0,0 +1,268 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.Mime; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Resources; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example22_OpenAIPlugin_AzureKeyVault : BaseTest +{ + private const string SecretName = "Foo"; + private const string SecretValue = "Bar"; + + /// + /// This example demonstrates how to connect an Azure Key Vault plugin to the Semantic Kernel. + /// To use this example, there are a few requirements: + /// 1. Register a client application with the Microsoft identity platform. + /// https://learn.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app + /// + /// 2. Create an Azure Key Vault + /// https://learn.microsoft.com/en-us/azure/key-vault/general/quick-create-portal + /// + /// 3. Add a permission for Azure Key Vault to your client application + /// https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-configure-app-access-web-apis + /// + /// 4. Set your Key Vault endpoint, client ID, and client secret as user secrets using: + /// dotnet user-secrets set "KeyVault:Endpoint" "your_endpoint" + /// dotnet user-secrets set "KeyVault:ClientId" "your_client_id" + /// dotnet user-secrets set "KeyVault:ClientSecret" "your_secret" + /// + /// 5. Replace your tenant ID with the "TENANT_ID" placeholder in dotnet/samples/KernelSyntaxExamples/Resources/22-ai-plugin.json + /// + [Fact(Skip = "Setup credentials")] + public async Task RunAsync() + { + var authenticationProvider = new OpenAIAuthenticationProvider( + new Dictionary>() + { + { + "login.microsoftonline.com", + new Dictionary() + { + { "client_id", TestConfiguration.KeyVault.ClientId }, + { "client_secret", TestConfiguration.KeyVault.ClientSecret }, + { "grant_type", "client_credentials" } + } + } + } + ); + + Kernel kernel = new(); + + var openApiSpec = EmbeddedResource.Read("22-openapi.json"); + using var messageStub = new HttpMessageHandlerStub(openApiSpec); + using var httpClient = new HttpClient(messageStub); + + // Import Open AI Plugin + var openAIManifest = EmbeddedResource.ReadStream("22-ai-plugin.json"); + var plugin = await kernel.ImportPluginFromOpenAIAsync( + "AzureKeyVaultPlugin", + openAIManifest!, + new OpenAIFunctionExecutionParameters + { + AuthCallback = authenticationProvider.AuthenticateRequestAsync, + HttpClient = httpClient, + EnableDynamicPayload = true, + ServerUrlOverride = new Uri(TestConfiguration.KeyVault.Endpoint) + }); + + await AddSecretToAzureKeyVaultAsync(kernel, plugin); + await GetSecretFromAzureKeyVaultWithRetryAsync(kernel, plugin); + } + + private async Task AddSecretToAzureKeyVaultAsync(Kernel kernel, KernelPlugin plugin) + { + // Add arguments for required parameters, arguments for optional ones can be skipped. + var arguments = new KernelArguments + { + ["secret-name"] = SecretName, + ["value"] = SecretValue, + ["api-version"] = "7.0", + ["enabled"] = "true", + }; + + // Run + var functionResult = await kernel.InvokeAsync(plugin["SetSecret"], arguments); + + var result = functionResult.GetValue(); + + Console.WriteLine("SetSecret function result: {0}", result?.Content?.ToString()); + } + + private static async Task GetSecretFromAzureKeyVaultWithRetryAsync(Kernel kernel, KernelPlugin plugin) + { + // Add arguments for required parameters, arguments for optional ones can be skipped. + var arguments = new KernelArguments(); + arguments["secret-name"] = SecretName; + arguments["api-version"] = "7.0"; + + // Run + var functionResult = await kernel.InvokeAsync(plugin["GetSecret"], arguments); + + var result = functionResult.GetValue(); + + Console.WriteLine("GetSecret function result: {0}", result?.Content?.ToString()); + } + + public Example22_OpenAIPlugin_AzureKeyVault(ITestOutputHelper output) : base(output) + { + } +} + +#region Utility Classes + +/// +/// Provides authentication for HTTP requests to OpenAI using OAuth or verification tokens. +/// +internal sealed class OpenAIAuthenticationProvider +{ + private readonly Dictionary> _oAuthValues; + private readonly Dictionary _credentials; + + /// + /// Creates an instance of the class. + /// + /// A dictionary containing OAuth values for each authentication scheme. + /// A dictionary containing credentials for each authentication scheme. + public OpenAIAuthenticationProvider(Dictionary>? oAuthValues = null, Dictionary? credentials = null) + { + this._oAuthValues = oAuthValues ?? new(); + this._credentials = credentials ?? new(); + } + + /// + /// Applies the authentication content to the provided HTTP request message. + /// + /// The HTTP request message. + /// Name of the plugin + /// The used to authenticate. + /// The cancellation token. + public async Task AuthenticateRequestAsync(HttpRequestMessage request, string pluginName, OpenAIAuthenticationConfig openAIAuthConfig, CancellationToken cancellationToken = default) + { + if (openAIAuthConfig.Type == OpenAIAuthenticationType.None) + { + return; + } + + string scheme = ""; + string credential = ""; + + if (openAIAuthConfig.Type == OpenAIAuthenticationType.OAuth) + { + var domainOAuthValues = this._oAuthValues[openAIAuthConfig.AuthorizationUrl!.Host] + ?? throw new KernelException("No OAuth values found for the provided authorization URL."); + + var values = new Dictionary(domainOAuthValues) { + { "scope", openAIAuthConfig.Scope ?? "" }, + }; + + using HttpContent? requestContent = openAIAuthConfig.AuthorizationContentType switch + { + "application/x-www-form-urlencoded" => new FormUrlEncodedContent(values), + "application/json" => new StringContent(JsonSerializer.Serialize(values), Encoding.UTF8, "application/json"), + _ => throw new KernelException($"Unsupported authorization content type: {openAIAuthConfig.AuthorizationContentType}"), + }; + + // Request the token + using var client = new HttpClient(); + using var authRequest = new HttpRequestMessage(HttpMethod.Post, openAIAuthConfig.AuthorizationUrl) { Content = requestContent }; + var response = await client.SendAsync(authRequest, cancellationToken).ConfigureAwait(false); + + response.EnsureSuccessStatusCode(); + + // Read the token + var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + OAuthTokenResponse? tokenResponse; + try + { + tokenResponse = JsonSerializer.Deserialize(responseContent); + } + catch (JsonException) + { + throw new KernelException($"Failed to deserialize token response from {openAIAuthConfig.AuthorizationUrl}."); + } + + // Get the token type and value + scheme = tokenResponse?.TokenType ?? throw new KernelException("No token type found in the response."); + credential = tokenResponse?.AccessToken ?? throw new KernelException("No access token found in the response."); + } + else + { + var token = openAIAuthConfig.VerificationTokens?[pluginName] + ?? throw new KernelException("No verification token found for the provided plugin name."); + + scheme = openAIAuthConfig.AuthorizationType.ToString(); + credential = token; + } + + request.Headers.Authorization = new AuthenticationHeaderValue(scheme, credential); + } +} + +/// +/// Represents the authentication section for an OpenAI plugin. +/// +internal sealed class OAuthTokenResponse +{ + /// + /// The type of access token. + /// + [JsonPropertyName("token_type")] + public string TokenType { get; set; } = ""; + + /// + /// The authorization scope. + /// + [JsonPropertyName("access_token")] + public string AccessToken { get; set; } = ""; +} + +internal sealed class HttpMessageHandlerStub : DelegatingHandler +{ + public HttpResponseMessage ResponseToReturn { get; set; } + + public HttpMessageHandlerStub(string responseToReturn) + { + this.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(responseToReturn, Encoding.UTF8, MediaTypeNames.Application.Json) + }; + } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request.RequestUri!.Scheme.Equals("file", StringComparison.OrdinalIgnoreCase)) + { + return this.ResponseToReturn; + } + + using var httpClient = new HttpClient(); + using var newRequest = new HttpRequestMessage() // construct a new request because the same one cannot be sent twice + { + Content = request.Content, + Method = request.Method, + RequestUri = request.RequestUri, + }; + + foreach (var header in request.Headers) + { + newRequest.Headers.Add(header.Key, header.Value); + } + return await httpClient.SendAsync(newRequest, cancellationToken).ConfigureAwait(false); + } +} + +#endregion diff --git a/dotnet/samples/KernelSyntaxExamples/Example22_OpenApiPlugin_AzureKeyVault.cs b/dotnet/samples/KernelSyntaxExamples/Example22_OpenApiPlugin_AzureKeyVault.cs deleted file mode 100644 index b92be76a5239..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example22_OpenApiPlugin_AzureKeyVault.cs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Functions.OpenAPI.Plugins; -using Microsoft.SemanticKernel.Orchestration; -using RepoUtils; - -#pragma warning disable CA1861 // Avoid constant arrays as arguments -// ReSharper disable once InconsistentNaming -public static class Example22_OpenApiPlugin_AzureKeyVault -{ - public static async Task RunAsync() - { - // To run this example, you must register a client application with the Microsoft identity platform. - // Instructions here: https://learn.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app - var authenticationProvider = new InteractiveMsalAuthenticationProvider( - TestConfiguration.KeyVault.ClientId, - TestConfiguration.KeyVault.TenantId, - new[] { "https://vault.azure.net/.default" }, - new Uri("http://localhost")); - - await GetSecretFromAzureKeyVaultWithRetryAsync(authenticationProvider); - - await AddSecretToAzureKeyVaultAsync(authenticationProvider); - } - - public static async Task GetSecretFromAzureKeyVaultWithRetryAsync(InteractiveMsalAuthenticationProvider authenticationProvider) - { - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithRetryBasic(new() - { - MaxRetryCount = 3, - UseExponentialBackoff = true - }) - .Build(); - - var type = typeof(PluginResourceNames); - var resourceName = $"{PluginResourceNames.AzureKeyVault}.openapi.json"; - - var stream = type.Assembly.GetManifestResourceStream(type, resourceName); - - // Import AI Plugin - var plugin = await kernel.ImportPluginFunctionsAsync( - PluginResourceNames.AzureKeyVault, - stream!, - new OpenApiFunctionExecutionParameters { AuthCallback = authenticationProvider.AuthenticateRequestAsync }); - - // Add arguments for required parameters, arguments for optional ones can be skipped. - var contextVariables = new ContextVariables(); - contextVariables.Set("server-url", TestConfiguration.KeyVault.Endpoint); - contextVariables.Set("secret-name", ""); - contextVariables.Set("api-version", "7.0"); - - // Run - var kernelResult = await kernel.RunAsync(contextVariables, plugin["GetSecret"]); - - var result = kernelResult.GetValue(); - - Console.WriteLine("GetSecret function result: {0}", result?.Content?.ToString()); - } - - public static async Task AddSecretToAzureKeyVaultAsync(InteractiveMsalAuthenticationProvider authenticationProvider) - { - var kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); - - var type = typeof(PluginResourceNames); - var resourceName = $"{PluginResourceNames.AzureKeyVault}.openapi.json"; - - var stream = type.Assembly.GetManifestResourceStream(type, resourceName); - - // Import AI Plugin - var plugin = await kernel.ImportPluginFunctionsAsync( - PluginResourceNames.AzureKeyVault, - stream!, - new OpenApiFunctionExecutionParameters - { - AuthCallback = authenticationProvider.AuthenticateRequestAsync, - EnableDynamicPayload = true - }); - - // Add arguments for required parameters, arguments for optional ones can be skipped. - var contextVariables = new ContextVariables(); - contextVariables.Set("server-url", TestConfiguration.KeyVault.Endpoint); - contextVariables.Set("secret-name", ""); - contextVariables.Set("api-version", "7.0"); - contextVariables.Set("value", ""); - contextVariables.Set("enabled", ""); - - // Run - var kernelResult = await kernel.RunAsync(contextVariables, plugin["SetSecret"]); - - var result = kernelResult.GetValue(); - - Console.WriteLine("SetSecret function result: {0}", result?.Content?.ToString()); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example23_OpenApiPlugin_Github.cs b/dotnet/samples/KernelSyntaxExamples/Example23_OpenApiPlugin_Github.cs deleted file mode 100644 index f7872ce320a5..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example23_OpenApiPlugin_Github.cs +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Orchestration; -using Newtonsoft.Json.Linq; -using RepoUtils; - -/// -/// Import and run GitHub Functions using OpenAPI Plugin. -/// To use this example, run: -/// dotnet user-secrets set "Github.PAT" "github_pat_..." -/// Make sure your GitHub PAT has read permissions set for Pull Requests. -/// Creating a PAT: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token -/// -// ReSharper disable once InconsistentNaming -public static class Example23_OpenApiPlugin_GitHub -{ - public static async Task RunAsync() - { - var authenticationProvider = new BearerAuthenticationProvider(() => { return Task.FromResult(TestConfiguration.Github.PAT); }); - Console.WriteLine("== Example23_OpenApiPlugin_GitHub =="); - var firstPRNumber = await ListPullRequestsFromGitHubAsync(authenticationProvider); - await GetPullRequestFromGitHubAsync(authenticationProvider, firstPRNumber); - } - - public static async Task ListPullRequestsFromGitHubAsync(BearerAuthenticationProvider authenticationProvider) - { - var kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); - - var plugin = await kernel.ImportPluginFunctionsAsync( - "GitHubPlugin", - "../../../../../../samples/dotnet/OpenApiPluginsExample/GitHubPlugin/openapi.json", - new OpenApiFunctionExecutionParameters { AuthCallback = authenticationProvider.AuthenticateRequestAsync }); - - // Add arguments for required parameters, arguments for optional ones can be skipped. - var contextVariables = new ContextVariables(); - contextVariables.Set("owner", "microsoft"); - contextVariables.Set("repo", "semantic-kernel"); - - // Run - var kernelResult = await kernel.RunAsync(contextVariables, plugin["PullList"]); - - Console.WriteLine("Successful GitHub List Pull Requests plugin response."); - var response = kernelResult.GetValue(); - if (response != null) - { - var pullRequests = JArray.Parse(response.Content?.ToString() ?? "null"); - - if (pullRequests != null && pullRequests.First != null) - { - var number = pullRequests.First["number"]; - return number?.ToString() ?? string.Empty; - } - } - Console.WriteLine("No pull requests found."); - - return string.Empty; - } - - public static async Task GetPullRequestFromGitHubAsync(BearerAuthenticationProvider authenticationProvider, string pullNumber) - { - var kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); - - var plugin = await kernel.ImportPluginFunctionsAsync( - "GitHubPlugin", - "../../../../../../samples/dotnet/OpenApiPluginsExample/GitHubPlugin/openapi.json", - new OpenApiFunctionExecutionParameters { AuthCallback = authenticationProvider.AuthenticateRequestAsync }); - - // Add arguments for required parameters, arguments for optional ones can be skipped. - var contextVariables = new ContextVariables(); - contextVariables.Set("owner", "microsoft"); - contextVariables.Set("repo", "semantic-kernel"); - contextVariables.Set("pull_number", pullNumber); - - // Run - var kernelResult = await kernel.RunAsync(contextVariables, plugin["PullsGet"]); - - Console.WriteLine("Successful GitHub Get Pull Request plugin response: {0}", kernelResult.GetValue()?.Content); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiPlugin_Jira.cs b/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiPlugin_Jira.cs index 41c4188e2146..c484d040722c 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiPlugin_Jira.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example24_OpenApiPlugin_Jira.cs @@ -2,34 +2,54 @@ using System; using System.Collections.Generic; +using System.Linq; using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Text.Json; +using System.Threading; using System.Threading.Tasks; +using Microsoft.Identity.Client; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Orchestration; - -using Newtonsoft.Json; -using RepoUtils; - -/// -/// This sample shows how to connect the Semantic Kernel to Jira as an Open Api plugin based on the Open Api schema. -/// This format of registering the plugin and its operations, and subsequently executing those operations can be applied -/// to an Open Api plugin that follows the Open Api Schema. -/// -// ReSharper disable once InconsistentNaming -public static class Example24_OpenApiPlugin_Jira +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example24_OpenApiPlugin_Jira : BaseTest { - public static async Task RunAsync() + private static readonly JsonSerializerOptions s_jsonOptionsCache = new() + { + WriteIndented = true + }; + + /// + /// This sample shows how to connect the Semantic Kernel to Jira as an Open API plugin based on the Open API schema. + /// This format of registering the plugin and its operations, and subsequently executing those operations can be applied + /// to an Open API plugin that follows the Open API Schema. + /// To use this example, there are a few requirements: + /// 1. You must have a Jira instance that you can authenticate to with your email and api key. + /// Follow the instructions here to get your api key: + /// https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/ + /// 2. You must create a new project in your Jira instance and create two issues named TEST-1 and TEST-2 respectively. + /// Follow the instructions here to create a new project and issues: + /// https://support.atlassian.com/jira-software-cloud/docs/create-a-new-project/ + /// https://support.atlassian.com/jira-software-cloud/docs/create-an-issue-and-a-sub-task/ + /// 3. You can find your domain under the "Products" tab in your account management page. + /// To go to your account management page, click on your profile picture in the top right corner of your Jira + /// instance then select "Manage account". + /// 4. Configure the secrets as described by the ReadMe.md in the dotnet/samples/KernelSyntaxExamples folder. + /// + [Fact(Skip = "Setup credentials")] + public async Task RunAsync() { - var kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); - var contextVariables = new ContextVariables(); + Kernel kernel = new(); // Change to a jira instance you have access to with your authentication credentials string serverUrl = $"https://{TestConfiguration.Jira.Domain}.atlassian.net/rest/api/latest/"; - contextVariables.Set("server-url", serverUrl); - IDictionary jiraFunctions; + KernelPlugin jiraFunctions; var tokenProvider = new BasicAuthenticationProvider(() => { string s = $"{TestConfiguration.Jira.Email}:{TestConfiguration.Jira.ApiKey}"; @@ -43,39 +63,201 @@ public static async Task RunAsync() if (useLocalFile) { var apiPluginFile = "./../../../Plugins/JiraPlugin/openapi.json"; - jiraFunctions = await kernel.ImportPluginFunctionsAsync("jiraPlugin", apiPluginFile, new OpenApiFunctionExecutionParameters(authCallback: tokenProvider.AuthenticateRequestAsync)); + jiraFunctions = await kernel.ImportPluginFromOpenApiAsync( + "jiraPlugin", + apiPluginFile, + new OpenApiFunctionExecutionParameters( + authCallback: tokenProvider.AuthenticateRequestAsync, + serverUrlOverride: new Uri(serverUrl) + ) + ); } else { var apiPluginRawFileURL = new Uri("https://raw.githubusercontent.com/microsoft/PowerPlatformConnectors/dev/certified-connectors/JIRA/apiDefinition.swagger.json"); - jiraFunctions = await kernel.ImportPluginFunctionsAsync("jiraPlugin", apiPluginRawFileURL, new OpenApiFunctionExecutionParameters(httpClient, tokenProvider.AuthenticateRequestAsync)); + jiraFunctions = await kernel.ImportPluginFromOpenApiAsync( + "jiraPlugin", + apiPluginRawFileURL, + new OpenApiFunctionExecutionParameters( + httpClient, tokenProvider.AuthenticateRequestAsync, + serverUrlOverride: new Uri(serverUrl) + ) + ); } + var arguments = new KernelArguments(); + // GetIssue Function + // Set Properties for the Get Issue operation in the openAPI.swagger.json + // Make sure the issue exists in your Jira instance or it will return a 404 + arguments["issueKey"] = "TEST-1"; + + // Run operation via the semantic kernel + var result = await kernel.InvokeAsync(jiraFunctions["GetIssue"], arguments); + + WriteLine("\n\n\n"); + var formattedContent = JsonSerializer.Serialize( + result.GetValue(), s_jsonOptionsCache); + WriteLine($"GetIssue jiraPlugin response: \n{formattedContent}"); + + // AddComment Function + arguments["issueKey"] = "TEST-2"; + arguments[RestApiOperation.PayloadArgumentName] = "{\"body\": \"Here is a rad comment\"}"; + + // Run operation via the semantic kernel + result = await kernel.InvokeAsync(jiraFunctions["AddComment"], arguments); + + WriteLine("\n\n\n"); + + formattedContent = JsonSerializer.Serialize(result.GetValue(), s_jsonOptionsCache); + WriteLine($"AddComment jiraPlugin response: \n{formattedContent}"); + } + + #region Example of authentication providers + + /// + /// Retrieves authentication content (e.g. username/password, API key) via the provided delegate and + /// applies it to HTTP requests using the "basic" authentication scheme. + /// + public class BasicAuthenticationProvider + { + private readonly Func> _credentials; + + /// + /// Creates an instance of the class. + /// + /// Delegate for retrieving credentials. + public BasicAuthenticationProvider(Func> credentials) + { + this._credentials = credentials; + } + + /// + /// Applies the authentication content to the provided HTTP request message. + /// + /// The HTTP request message. + /// The cancellation token. + public async Task AuthenticateRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken = default) + { + // Base64 encode + string encodedContent = Convert.ToBase64String(Encoding.UTF8.GetBytes(await this._credentials().ConfigureAwait(false))); + request.Headers.Authorization = new AuthenticationHeaderValue("Basic", encodedContent); + } + } + + /// + /// Retrieves a token via the provided delegate and applies it to HTTP requests using the + /// "bearer" authentication scheme. + /// + public class BearerAuthenticationProvider + { + private readonly Func> _bearerToken; + + /// + /// Creates an instance of the class. + /// + /// Delegate to retrieve the bearer token. + public BearerAuthenticationProvider(Func> bearerToken) { - // Set Properties for the Get Issue operation in the openAPI.swagger.json - contextVariables.Set("issueKey", "SKTES-2"); + this._bearerToken = bearerToken; + } - // Run operation via the semantic kernel - var result = await kernel.RunAsync(contextVariables, jiraFunctions["GetIssue"]); + /// + /// Applies the token to the provided HTTP request message. + /// + /// The HTTP request message. + public async Task AuthenticateRequestAsync(HttpRequestMessage request) + { + var token = await this._bearerToken().ConfigureAwait(false); + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); + } + } - Console.WriteLine("\n\n\n"); - var formattedContent = JsonConvert.SerializeObject(JsonConvert.DeserializeObject(result.GetValue()!), Formatting.Indented); - Console.WriteLine("GetIssue jiraPlugin response: \n{0}", formattedContent); + /// + /// Uses the Microsoft Authentication Library (MSAL) to authenticate HTTP requests. + /// + public class InteractiveMsalAuthenticationProvider : BearerAuthenticationProvider + { + /// + /// Creates an instance of the class. + /// + /// Client ID of the caller. + /// Tenant ID of the target resource. + /// Requested scopes. + /// Redirect URI. + public InteractiveMsalAuthenticationProvider(string clientId, string tenantId, string[] scopes, Uri redirectUri) + : base(() => GetTokenAsync(clientId, tenantId, scopes, redirectUri)) + { } - // AddComment Function + /// + /// Gets an access token using the Microsoft Authentication Library (MSAL). + /// + /// Client ID of the caller. + /// Tenant ID of the target resource. + /// Requested scopes. + /// Redirect URI. + /// Access token. + private static async Task GetTokenAsync(string clientId, string tenantId, string[] scopes, Uri redirectUri) { - // Set Properties for the AddComment operation in the openAPI.swagger.json - contextVariables.Set("issueKey", "SKTES-1"); - contextVariables.Set("body", "Here is a rad comment"); + IPublicClientApplication app = PublicClientApplicationBuilder.Create(clientId) + .WithRedirectUri(redirectUri.ToString()) + .WithTenantId(tenantId) + .Build(); - // Run operation via the semantic kernel - var result = await kernel.RunAsync(contextVariables, jiraFunctions["AddComment"]); + IEnumerable accounts = await app.GetAccountsAsync().ConfigureAwait(false); + AuthenticationResult result; + try + { + result = await app.AcquireTokenSilent(scopes, accounts.FirstOrDefault()) + .ExecuteAsync().ConfigureAwait(false); + } + catch (MsalUiRequiredException) + { + // A MsalUiRequiredException happened on AcquireTokenSilent. + // This indicates you need to call AcquireTokenInteractive to acquire a token + result = await app.AcquireTokenInteractive(scopes) + .ExecuteAsync().ConfigureAwait(false); + } - Console.WriteLine("\n\n\n"); - var formattedContent = JsonConvert.SerializeObject(JsonConvert.DeserializeObject(result.GetValue()!), Formatting.Indented); - Console.WriteLine("AddComment jiraPlugin response: \n{0}", formattedContent); + return result.AccessToken; } } + + /// + /// Retrieves authentication content (scheme and value) via the provided delegate and applies it to HTTP requests. + /// + public sealed class CustomAuthenticationProvider + { + private readonly Func> _header; + private readonly Func> _value; + + /// + /// Creates an instance of the class. + /// + /// Delegate for retrieving the header name. + /// Delegate for retrieving the value. + public CustomAuthenticationProvider(Func> header, Func> value) + { + this._header = header; + this._value = value; + } + + /// + /// Applies the header and value to the provided HTTP request message. + /// + /// The HTTP request message. + public async Task AuthenticateRequestAsync(HttpRequestMessage request) + { + var header = await this._header().ConfigureAwait(false); + var value = await this._value().ConfigureAwait(false); + request.Headers.Add(header, value); + } + } + + #endregion + + public Example24_OpenApiPlugin_Jira(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example25_ReadOnlyMemoryStore.cs b/dotnet/samples/KernelSyntaxExamples/Example25_ReadOnlyMemoryStore.cs index b6304e8725c9..9c54af7e751c 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example25_ReadOnlyMemoryStore.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example25_ReadOnlyMemoryStore.cs @@ -10,12 +10,11 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel.Memory; +using Xunit; +using Xunit.Abstractions; -#pragma warning disable CA2201 // System.Exception is not sufficiently specific - this is a sample -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously -#pragma warning disable CA1851 // Possible multiple enumerations of 'IEnumerable' collection +namespace Examples; -// ReSharper disable once InconsistentNaming /// /// This sample provides a custom implementation of that is read only. /// In this sample, the data is stored in a JSON string and deserialized into an @@ -23,26 +22,27 @@ /// of has a single collection, and thus does not need to be named. /// It also assumes that the JSON formatted data can be deserialized into objects. /// -public static class Example25_ReadOnlyMemoryStore +public class Example25_ReadOnlyMemoryStore : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { var store = new ReadOnlyMemoryStore(s_jsonVectorEntries); var embedding = new ReadOnlyMemory(new float[] { 22, 4, 6 }); - Console.WriteLine("Reading data from custom read-only memory store"); + WriteLine("Reading data from custom read-only memory store"); var memoryRecord = await store.GetAsync("collection", "key3"); if (memoryRecord != null) { - Console.WriteLine("ID = {0}, Embedding = {1}", memoryRecord.Metadata.Id, string.Join(", ", MemoryMarshal.ToEnumerable(memoryRecord.Embedding))); + WriteLine($"ID = {memoryRecord.Metadata.Id}, Embedding = {string.Join(", ", MemoryMarshal.ToEnumerable(memoryRecord.Embedding))}"); } - Console.WriteLine("Getting most similar vector to {0}", string.Join(", ", MemoryMarshal.ToEnumerable(embedding))); + WriteLine($"Getting most similar vector to {string.Join(", ", MemoryMarshal.ToEnumerable(embedding))}"); var result = await store.GetNearestMatchAsync("collection", embedding, 0.0); if (result.HasValue) { - Console.WriteLine("Embedding = {0}, Similarity = {1}", string.Join(", ", MemoryMarshal.ToEnumerable(result.Value.Item1.Embedding)), result.Value.Item2); + WriteLine($"ID = {string.Join(", ", MemoryMarshal.ToEnumerable(result.Value.Item1.Embedding))}, Embedding = {result.Value.Item2}"); } } @@ -241,4 +241,8 @@ public IAsyncEnumerable UpsertBatchAsync(string collectionName, IEnumera ""timestamp"": null } ]"; + + public Example25_ReadOnlyMemoryStore(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example26_AADAuth.cs b/dotnet/samples/KernelSyntaxExamples/Example26_AADAuth.cs index 3bd2c22d440c..3ad939e5f574 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example26_AADAuth.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example26_AADAuth.cs @@ -1,31 +1,32 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Threading.Tasks; using Azure.Identity; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using RepoUtils; +using Microsoft.SemanticKernel.ChatCompletion; +using Xunit; +using Xunit.Abstractions; -/** - * This example shows how to connect your app to Azure OpenAI using - * Azure Active Directory (AAD) authentication, as opposed to API keys. - * - * The example uses DefaultAzureCredential, which you can configure to support - * multiple authentication strategies: - * - * - Env vars present in Azure VMs - * - Azure Managed Identities - * - Shared tokens - * - etc. - */ +namespace Examples; -// ReSharper disable once InconsistentNaming -public static class Example26_AADAuth +/// +/// This example shows how to connect your app to Azure OpenAI using +/// Azure Active Directory(AAD) authentication, as opposed to API keys. +/// +/// The example uses , which you can configure to support +/// multiple authentication strategies: +/// +/// -Env vars present in Azure VMs +/// -Azure Managed Identities +/// -Shared tokens +/// -etc. +/// +public class Example26_AADAuth : BaseTest { - public static async Task RunAsync() + [Fact(Skip = "Setup credentials")] + public async Task RunAsync() { - Console.WriteLine("======== SK with AAD Auth ========"); + WriteLine("======== SK with AAD Auth ========"); // Optional: choose which authentication to support var authOptions = new DefaultAzureCredentialOptions @@ -42,25 +43,28 @@ public static async Task RunAsync() ExcludeAzurePowerShellCredential = true }; - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - // Add Azure chat completion service using DefaultAzureCredential AAD auth - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - new DefaultAzureCredential(authOptions)) + Kernel kernel = Kernel.CreateBuilder() + // Add Azure OpenAI chat completion service using DefaultAzureCredential AAD auth + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + credentials: new DefaultAzureCredential(authOptions)) .Build(); - IChatCompletion chatGPT = kernel.GetService(); - var chatHistory = chatGPT.CreateNewChat(); + IChatCompletionService chatGPT = kernel.GetRequiredService(); + var chatHistory = new ChatHistory(); // User message chatHistory.AddUserMessage("Tell me a joke about hourglasses"); // Bot reply - string reply = await chatGPT.GenerateMessageAsync(chatHistory); - Console.WriteLine(reply); + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + WriteLine(reply); /* Output: Why did the hourglass go to the doctor? Because it was feeling a little run down! */ } + + public Example26_AADAuth(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example27_PromptFunctionsUsingChatGPT.cs b/dotnet/samples/KernelSyntaxExamples/Example27_PromptFunctionsUsingChatGPT.cs new file mode 100644 index 000000000000..d2b83da3f517 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example27_PromptFunctionsUsingChatGPT.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example shows how to use GPT3.5 Chat model for prompts and prompt functions. +/// +public class Example27_PromptFunctionsUsingChatGPT : BaseTest +{ + [Fact] + public async Task RunAsync() + { + WriteLine("======== Using Chat GPT model for text generation ========"); + + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); + + var func = kernel.CreateFunctionFromPrompt( + "List the two planets closest to '{{$input}}', excluding moons, using bullet points."); + + var result = await func.InvokeAsync(kernel, new() { ["input"] = "Jupiter" }); + WriteLine(result.GetValue()); + + /* + Output: + - Saturn + - Uranus + */ + } + + public Example27_PromptFunctionsUsingChatGPT(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example27_SemanticFunctionsUsingChatGPT.cs b/dotnet/samples/KernelSyntaxExamples/Example27_SemanticFunctionsUsingChatGPT.cs deleted file mode 100644 index 5d8bda833f4f..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example27_SemanticFunctionsUsingChatGPT.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using RepoUtils; - -/** - * This example shows how to use GPT3.5 Chat model for prompts and semantic functions. - */ -// ReSharper disable once InconsistentNaming -public static class Example27_SemanticFunctionsUsingChatGPT -{ - public static async Task RunAsync() - { - Console.WriteLine("======== Using Chat GPT model for text completion ========"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService(TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey) - .Build(); - - var func = kernel.CreateSemanticFunction( - "List the two planets closest to '{{$input}}', excluding moons, using bullet points."); - - var result = await func.InvokeAsync("Jupiter", kernel); - Console.WriteLine(result.GetValue()); - - /* - Output: - - Saturn - - Uranus - */ - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example28_ActionPlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example28_ActionPlanner.cs deleted file mode 100644 index 2944164eb10b..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example28_ActionPlanner.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Planners; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example28_ActionPlanner -{ - public static async Task RunAsync() - { - Console.WriteLine("======== Action Planner ========"); - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .Build(); - - string samplesDirectory = RepoFiles.SamplePluginsPath(); - kernel.ImportSemanticFunctionsFromDirectory(samplesDirectory, "SummarizePlugin"); - kernel.ImportSemanticFunctionsFromDirectory(samplesDirectory, "WriterPlugin"); - kernel.ImportSemanticFunctionsFromDirectory(samplesDirectory, "FunPlugin"); - - // Create an optional config for the ActionPlanner. Use this to exclude plugins and functions if needed - var config = new ActionPlannerConfig(); - config.ExcludedFunctions.Add("MakeAbstractReadable"); - - // Create an instance of ActionPlanner. - // The ActionPlanner takes one goal and returns a single function to execute. - var planner = new ActionPlanner(kernel, config: config); - - // We're going to ask the planner to find a function to achieve this goal. - var goal = "Write a joke about Cleopatra in the style of Hulk Hogan."; - - // The planner returns a plan, consisting of a single function - // to execute and achieve the goal requested. - var plan = await planner.CreatePlanAsync(goal); - - // Execute the full plan (which is a single function) - var result = await plan.InvokeAsync(kernel); - - // Show the result, which should match the given goal - Console.WriteLine(result.GetValue()); - - /* Output: - * - * Cleopatra was a queen - * But she didn't act like one - * She was more like a teen - - * She was always on the scene - * And she loved to be seen - * But she didn't have a queenly bone in her body - */ - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs b/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs index 567eef956c1e..f82940dad591 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example30_ChatWithPrompts.cs @@ -4,114 +4,104 @@ using System.Globalization; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ChatCompletion; +using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using RepoUtils; using Resources; - -/** - * Scenario: - * - the user is reading a wikipedia page, they select a piece of text and they ask AI to extract some information. - * - the app explicitly uses the Chat model to get a result. - * - * The following example shows how to: - * - * - Use the prompt template engine to render prompts, without executing them. - * This can be used to leverage the template engine (which executes functions internally) - * to generate prompts and use them programmatically, without executing them like semantic functions. - * - * - Use rendered prompts to create the context of System and User messages sent to Chat models - * like "gpt-3.5-turbo" - * - * Note: normally you would work with Semantic Functions to automatically send a prompt to a model - * and get a response. In this case we use the Chat model, sending a chat history object, which - * includes some instructions, some context (the text selected), and the user query. - * - * We use the prompt template engine to craft the strings with all of this information. - * - * Out of scope and not in the example: if needed, one could go further and use a semantic - * function (with extra cost) asking AI to generate the text to send to the Chat model. - * - * TLDR: how to render a prompt: - * - * var kernel = new KernelBuilder().WithLogger(ConsoleLogger.Logger).Build(); - * ... import plugins and functions ... - * var context = kernel.CreateNewContext(); - * ... set variables ... - * - * var promptRenderer = new BasicPromptTemplateEngine(); - * string renderedPrompt = await promptRenderer.RenderAsync("...prompt template...", context); - */ -// ReSharper disable CommentTypo -// ReSharper disable once InconsistentNaming -public static class Example30_ChatWithPrompts +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// Scenario: +/// - the user is reading a wikipedia page, they select a piece of text and they ask AI to extract some information. +/// - the app explicitly uses the Chat model to get a result. +/// +/// The following example shows how to: +/// +/// - Use the prompt template engine to render prompts, without executing them. +/// This can be used to leverage the template engine (which executes functions internally) +/// to generate prompts and use them programmatically, without executing them like prompt functions. +/// +/// - Use rendered prompts to create the context of System and User messages sent to Chat models +/// like "gpt-3.5-turbo" +/// +/// Note: normally you would work with Prompt Functions to automatically send a prompt to a model +/// and get a response. In this case we use the Chat model, sending a chat history object, which +/// includes some instructions, some context (the text selected), and the user query. +/// +/// We use the prompt template engine to craft the strings with all of this information. +/// +/// Out of scope and not in the example: if needed, one could go further and use a semantic +/// function (with extra cost) asking AI to generate the text to send to the Chat model. +/// +public class Example30_ChatWithPrompts : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - Console.WriteLine("======== Chat with prompts ========"); + WriteLine("======== Chat with prompts ========"); /* Load 3 files: - * - 28-system-prompt.txt: the system prompt, used to initialize the chat session. - * - 28-user-context.txt: the user context, e.g. a piece of a document the user selected and is asking to process. - * - 28-user-prompt.txt: the user prompt, just for demo purpose showing that one can leverage the same approach also to augment user messages. + * - 30-system-prompt.txt: the system prompt, used to initialize the chat session. + * - 30-user-context.txt: the user context, e.g. a piece of a document the user selected and is asking to process. + * - 30-user-prompt.txt: the user prompt, just for demo purpose showing that one can leverage the same approach also to augment user messages. */ var systemPromptTemplate = EmbeddedResource.Read("30-system-prompt.txt"); var selectedText = EmbeddedResource.Read("30-user-context.txt"); var userPromptTemplate = EmbeddedResource.Read("30-user-prompt.txt"); - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey, serviceId: "chat") + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey, serviceId: "chat") .Build(); // As an example, we import the time plugin, which is used in system prompt to read the current date. // We could also use a variable, this is just to show that the prompt can invoke functions. - kernel.ImportFunctions(new TimePlugin(), "time"); - - // We need a kernel context to store some information to pass to the prompts and the list - // of available plugins needed to render prompt templates. - var context = kernel.CreateNewContext(); + kernel.ImportPluginFromType("time"); - // Put the selected document into the variable used by the system prompt (see 28-system-prompt.txt). - context.Variables["selectedText"] = selectedText; + // Adding required arguments referenced by the prompt templates. + var arguments = new KernelArguments + { + // Put the selected document into the variable used by the system prompt (see 30-system-prompt.txt). + ["selectedText"] = selectedText, - // Demo another variable, e.g. when the chat started, used by the system prompt (see 28-system-prompt.txt). - context.Variables["startTime"] = DateTimeOffset.Now.ToString("hh:mm:ss tt zz", CultureInfo.CurrentCulture); + // Demo another variable, e.g. when the chat started, used by the system prompt (see 30-system-prompt.txt). + ["startTime"] = DateTimeOffset.Now.ToString("hh:mm:ss tt zz", CultureInfo.CurrentCulture), - // This is the user message, store it in the variable used by 28-user-prompt.txt - context.Variables["userMessage"] = "extract locations as a bullet point list"; + // This is the user message, store it in the variable used by 30-user-prompt.txt + ["userMessage"] = "extract locations as a bullet point list" + }; - // Instantiate the prompt renderer, which we will use to turn prompt templates + // Instantiate the prompt template factory, which we will use to turn prompt templates // into strings, that we will store into a Chat history object, which is then sent // to the Chat Model. - var promptRenderer = new BasicPromptTemplateEngine(); + var promptTemplateFactory = new KernelPromptTemplateFactory(); // Render the system prompt. This string is used to configure the chat. // This contains the context, ie a piece of a wikipedia page selected by the user. - string systemMessage = await promptRenderer.RenderAsync(systemPromptTemplate, context); - Console.WriteLine($"------------------------------------\n{systemMessage}"); + string systemMessage = await promptTemplateFactory.Create(new PromptTemplateConfig(systemPromptTemplate)).RenderAsync(kernel, arguments); + WriteLine($"------------------------------------\n{systemMessage}"); // Render the user prompt. This string is the query sent by the user // This contains the user request, ie "extract locations as a bullet point list" - string userMessage = await promptRenderer.RenderAsync(userPromptTemplate, context); - Console.WriteLine($"------------------------------------\n{userMessage}"); + string userMessage = await promptTemplateFactory.Create(new PromptTemplateConfig(userPromptTemplate)).RenderAsync(kernel, arguments); + WriteLine($"------------------------------------\n{userMessage}"); // Client used to request answers - var chatGPT = kernel.GetService(); + var chatCompletion = kernel.GetRequiredService(); // The full chat history. Depending on your scenario, you can pass the full chat if useful, // or create a new one every time, assuming that the "system message" contains all the // information needed. - var chatHistory = chatGPT.CreateNewChat(systemMessage); + var chatHistory = new ChatHistory(systemMessage); // Add the user query to the chat history chatHistory.AddUserMessage(userMessage); // Finally, get the response from AI - string answer = await chatGPT.GenerateMessageAsync(chatHistory); - Console.WriteLine($"------------------------------------\n{answer}"); + var answer = await chatCompletion.GetChatMessageContentAsync(chatHistory); + WriteLine($"------------------------------------\n{answer}"); /* @@ -136,4 +126,8 @@ You are an AI assistant that helps people find information. */ } + + public Example30_ChatWithPrompts(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs deleted file mode 100644 index 8dd94d078651..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs +++ /dev/null @@ -1,378 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Threading.Tasks; -using System.Xml; -using System.Xml.XPath; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Planning; -using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.Plugins.Memory; -using Microsoft.SemanticKernel.Plugins.Web; -using Microsoft.SemanticKernel.Plugins.Web.Bing; - -using RepoUtils; - -// ReSharper disable CommentTypo -// ReSharper disable once InconsistentNaming -internal static class Example31_CustomPlanner -{ - public static async Task RunAsync() - { - Console.WriteLine("======== Custom Planner - Create and Execute Markup Plan ========"); - IKernel kernel = InitializeKernel(); - ISemanticTextMemory memory = InitializeMemory(); - - // ContextQuery is part of the QAPlugin - IDictionary qaPlugin = LoadQAPlugin(kernel); - SKContext context = CreateContextQueryContext(kernel); - - // Create a memory store using the VolatileMemoryStore and the embedding generator registered in the kernel - kernel.ImportFunctions(new TextMemoryPlugin(memory)); - - // Setup defined memories for recall - await RememberFactsAsync(kernel, memory); - - // MarkupPlugin named "markup" - var markup = kernel.ImportFunctions(new MarkupPlugin(), "markup"); - - // contextQuery "Who is my president? Who was president 3 years ago? What should I eat for dinner" | markup - // Create a plan to execute the ContextQuery and then run the markup plugin on the output - var plan = new Plan("Execute ContextQuery and then RunMarkup"); - plan.AddSteps(qaPlugin["ContextQuery"], markup["RunMarkup"]); - - // Execute plan - context.Variables.Update("Who is my president? Who was president 3 years ago? What should I eat for dinner"); - var result = await plan.InvokeAsync(context); - - Console.WriteLine("Result:"); - Console.WriteLine(result.GetValue()); - Console.WriteLine(); - } - /* Example Output - ======== Custom Planner - Create and Execute Markup Plan ======== - Markup: - Who is United States PresidentJoe Biden was president 3 years agoFor dinner, you might enjoy some sushi with your partner, since you both like it and you only ate it once this month - - Original plan: - Goal: Run a piece of xml markup - - Steps: - Goal: response - - Steps: - - bing.SearchAsync INPUT='Who is United States President' => markup.SearchAsync.result - Microsoft.SemanticKernel.Planning.Plan. INPUT='Joe Biden was president 3 years ago' => markup.fact.result - Microsoft.SemanticKernel.Planning.Plan. INPUT='For dinner, you might enjoy some sushi with your partner, since you both like it and you only ate it once this month' => markup.opinion.result - - Result: - The president of the United States ( POTUS) [A] is the head of state and head of government of the United States of America. The president directs the executive branch of the federal government and is the commander-in-chief of the United States Armed Forces . - Joe Biden was president 3 years ago - For dinner, you might enjoy some sushi with your partner, since you both like it and you only ate it once this month - */ - - private static SKContext CreateContextQueryContext(IKernel kernel) - { - var context = kernel.CreateNewContext(); - context.Variables.Set("firstname", "Jamal"); - context.Variables.Set("lastname", "Williams"); - context.Variables.Set("city", "Tacoma"); - context.Variables.Set("state", "WA"); - context.Variables.Set("country", "USA"); - context.Variables.Set("collection", "contextQueryMemories"); - context.Variables.Set("limit", "5"); - context.Variables.Set("relevance", "0.3"); - return context; - } - - private static async Task RememberFactsAsync(IKernel kernel, ISemanticTextMemory memory) - { - kernel.ImportFunctions(new TextMemoryPlugin(memory)); - - List memoriesToSave = new() - { - "I like pizza and chicken wings.", - "I ate pizza 10 times this month.", - "I ate chicken wings 3 time this month.", - "I ate sushi 1 time this month.", - "My partner likes sushi and chicken wings.", - "I like to eat dinner with my partner.", - "I am a software engineer.", - "I live in Tacoma, WA.", - "I have a dog named Tully.", - "I have a cat named Butters.", - }; - - foreach (var memoryToSave in memoriesToSave) - { - await memory.SaveInformationAsync("contextQueryMemories", memoryToSave, Guid.NewGuid().ToString()); - } - } - - // ContextQuery is part of the QAPlugin - // DependsOn: TimePlugin named "time" - // DependsOn: BingPlugin named "bing" - private static IDictionary LoadQAPlugin(IKernel kernel) - { - string folder = RepoFiles.SamplePluginsPath(); - kernel.ImportFunctions(new TimePlugin(), "time"); -#pragma warning disable CA2000 // Dispose objects before losing scope - var bing = new WebSearchEnginePlugin(new BingConnector(TestConfiguration.Bing.ApiKey)); -#pragma warning restore CA2000 // Dispose objects before losing scope - kernel.ImportFunctions(bing, "bing"); - - return kernel.ImportSemanticFunctionsFromDirectory(folder, "QAPlugin"); - } - - private static IKernel InitializeKernel() - { - return new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .WithAzureTextEmbeddingGenerationService( - TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .Build(); - } - - private static ISemanticTextMemory InitializeMemory() - { - return new MemoryBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureTextEmbeddingGenerationService( - TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) - .WithMemoryStore(new VolatileMemoryStore()) - .Build(); - } -} - -// Example Plugin that can process XML Markup created by ContextQuery -public class MarkupPlugin -{ - [SKFunction, Description("Run Markup")] - public async Task RunMarkupAsync(string docString, SKContext context) - { - var plan = docString.FromMarkup("Run a piece of xml markup", context); - - Console.WriteLine("Markup plan:"); - Console.WriteLine(plan.ToPlanWithGoalString()); - Console.WriteLine(); - - var result = await context.Runner.RunAsync(plan); - return result.GetValue()!; - } -} - -public static class XmlMarkupPlanParser -{ - private static readonly Dictionary> s_pluginMapping = new() - { - { "lookup", new KeyValuePair("bing", "SearchAsync") }, - }; - - public static Plan FromMarkup(this string markup, string goal, SKContext context) - { - Console.WriteLine("Markup:"); - Console.WriteLine(markup); - Console.WriteLine(); - - var doc = new XmlMarkup(markup); - var nodes = doc.SelectElements(); - return nodes.Count == 0 ? new Plan(goal) : NodeListToPlan(nodes, context, goal); - } - - private static Plan NodeListToPlan(XmlNodeList nodes, SKContext context, string description) - { - Plan plan = new(description); - for (var i = 0; i < nodes.Count; ++i) - { - var node = nodes[i]; - var functionName = node!.LocalName; - var pluginName = string.Empty; - - if (s_pluginMapping.TryGetValue(node!.LocalName, out KeyValuePair value)) - { - functionName = value.Value; - pluginName = value.Key; - } - - var hasChildElements = node.HasChildElements(); - - if (hasChildElements) - { - plan.AddSteps(NodeListToPlan(node.ChildNodes, context, functionName)); - } - else - { - if (string.IsNullOrEmpty(pluginName) - ? !context.Functions!.TryGetFunction(functionName, out var _) - : !context.Functions!.TryGetFunction(pluginName, functionName, out var _)) - { - var planStep = new Plan(node.InnerText); - planStep.Parameters.Update(node.InnerText); - planStep.Outputs.Add($"markup.{functionName}.result"); - plan.Outputs.Add($"markup.{functionName}.result"); - plan.AddSteps(planStep); - } - else - { - var command = string.IsNullOrEmpty(pluginName) - ? context.Functions.GetFunction(functionName) - : context.Functions.GetFunction(pluginName, functionName); - var planStep = new Plan(command); - planStep.Parameters.Update(node.InnerText); - planStep.Outputs.Add($"markup.{functionName}.result"); - plan.Outputs.Add($"markup.{functionName}.result"); - plan.AddSteps(planStep); - } - } - } - - return plan; - } -} - -#region Utility Classes - -public class XmlMarkup -{ - public XmlMarkup(string response, string? wrapperTag = null) - { - if (!string.IsNullOrEmpty(wrapperTag)) - { - response = $"<{wrapperTag}>{response}"; - } - - this.Document = new XmlDocument(); - this.Document.LoadXml(response); - } - - public XmlDocument Document { get; } - - public XmlNodeList SelectAllElements() - { - return this.Document.SelectNodes("//*")!; - } - - public XmlNodeList SelectElements() - { - return this.Document.SelectNodes("/*")!; - } -} - -#pragma warning disable CA1815 // Override equals and operator equals on value types -public struct XmlNodeInfo -{ - public int StackDepth { get; set; } - public XmlNode Parent { get; set; } - public XmlNode Node { get; set; } - - public static implicit operator XmlNode(XmlNodeInfo info) - { - return info.Node; - } -} -#pragma warning restore CA1815 - -#pragma warning disable CA1711 -public static class XmlEx -{ - public static bool HasChildElements(this XmlNode elt) - { - if (!elt.HasChildNodes) - { - return false; - } - - var childNodes = elt.ChildNodes; - for (int i = 0, count = childNodes.Count; i < count; ++i) - { - if (childNodes[i]?.NodeType == XmlNodeType.Element) - { - return true; - } - } - - return false; - } - - /// - /// Walks the Markup DOM using an XPathNavigator, allowing recursive descent WITHOUT requiring a Stack Hit - /// This is safe for very large and highly nested documents. - /// - public static IEnumerable EnumerateNodes(this XmlNode node, int maxStackDepth = 32) - { - var nav = node.CreateNavigator(); - return EnumerateNodes(nav!, maxStackDepth); - } - - public static IEnumerable EnumerateNodes(this XmlDocument doc, int maxStackDepth = 32) - { - var nav = doc.CreateNavigator(); - nav!.MoveToRoot(); - return EnumerateNodes(nav, maxStackDepth); - } - - public static IEnumerable EnumerateNodes(this XPathNavigator nav, int maxStackDepth = 32) - { - var info = new XmlNodeInfo - { - StackDepth = 0 - }; - var hasChildren = nav.HasChildren; - while (true) - { - info.Parent = (XmlNode)nav.UnderlyingObject!; - if (hasChildren && info.StackDepth < maxStackDepth) - { - nav.MoveToFirstChild(); - info.StackDepth++; - } - else - { - var hasParent = false; - while (hasParent = nav.MoveToParent()) - { - info.StackDepth--; - if (info.StackDepth == 0) - { - hasParent = false; - break; - } - - if (nav.MoveToNext()) - { - break; - } - } - - if (!hasParent) - { - break; - } - } - - do - { - info.Node = (XmlNode)nav.UnderlyingObject!; - yield return info; - if (hasChildren = nav.HasChildren) - { - break; - } - } while (nav.MoveToNext()); - } - } -} -#pragma warning restore CA1711 - -#endregion Utility Classes diff --git a/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs index 5ddeb8fbf33d..7adb053467da 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example32_StreamingCompletion.cs @@ -1,51 +1,52 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; /** - * The following example shows how to use Semantic Kernel with streaming Text Completion. + * The following example shows how to use Semantic Kernel with streaming text completion. + * + * This example will NOT work with regular chat completion models. It will only work with + * text completion models. * - * Note that all text completion models are deprecated by OpenAI and will be removed in a future release. + * Note that all text generation models are deprecated by OpenAI and will be removed in a future release. * * Refer to example 33 for streaming chat completion. */ -// ReSharper disable once InconsistentNaming -public static class Example32_StreamingCompletion +public class Example32_StreamingCompletion : BaseTest { - public static async Task RunAsync() - { - await AzureOpenAITextCompletionStreamAsync(); - await OpenAITextCompletionStreamAsync(); - } - - private static async Task AzureOpenAITextCompletionStreamAsync() + [Fact] + public Task AzureOpenAITextGenerationStreamAsync() { - Console.WriteLine("======== Azure OpenAI - Text Completion - Raw Streaming ========"); + WriteLine("======== Azure OpenAI - Text Completion - Raw Streaming ========"); - var textCompletion = new AzureTextCompletion( - TestConfiguration.AzureOpenAI.DeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); + var textGeneration = new AzureOpenAITextGenerationService( + deploymentName: TestConfiguration.AzureOpenAI.DeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ModelId); - await TextCompletionStreamAsync(textCompletion); + return this.TextGenerationStreamAsync(textGeneration); } - private static async Task OpenAITextCompletionStreamAsync() + [Fact] + public Task OpenAITextGenerationStreamAsync() { - Console.WriteLine("======== Open AI - Text Completion - Raw Streaming ========"); + WriteLine("======== Open AI - Text Completion - Raw Streaming ========"); - var textCompletion = new OpenAITextCompletion("text-davinci-003", TestConfiguration.OpenAI.ApiKey); + var textGeneration = new OpenAITextGenerationService("gpt-3.5-turbo-instruct", TestConfiguration.OpenAI.ApiKey); - await TextCompletionStreamAsync(textCompletion); + return this.TextGenerationStreamAsync(textGeneration); } - private static async Task TextCompletionStreamAsync(ITextCompletion textCompletion) + private async Task TextGenerationStreamAsync(ITextGenerationService textGeneration) { - var requestSettings = new OpenAIRequestSettings() + var executionSettings = new OpenAIPromptExecutionSettings() { MaxTokens = 100, FrequencyPenalty = 0, @@ -56,12 +57,16 @@ private static async Task TextCompletionStreamAsync(ITextCompletion textCompleti var prompt = "Write one paragraph why AI is awesome"; - Console.WriteLine("Prompt: " + prompt); - await foreach (string message in textCompletion.CompleteStreamAsync(prompt, requestSettings)) + WriteLine("Prompt: " + prompt); + await foreach (var content in textGeneration.GetStreamingTextContentsAsync(prompt, executionSettings)) { - Console.Write(message); + Write(content); } - Console.WriteLine(); + WriteLine(); + } + + public Example32_StreamingCompletion(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs b/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs index c577838105fc..1b0223e36fce 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example33_StreamingChat.cs @@ -1,50 +1,47 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/** - * The following example shows how to use Semantic Kernel with streaming Chat Completion - */ -// ReSharper disable once InconsistentNaming -public static class Example33_StreamingChat -{ - public static async Task RunAsync() - { - await AzureOpenAIChatStreamSampleAsync(); - await OpenAIChatStreamSampleAsync(); - } +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; - private static async Task OpenAIChatStreamSampleAsync() +// The following example shows how to use Semantic Kernel with streaming Chat Completion +public class Example33_StreamingChat : BaseTest +{ + [Fact] + public Task OpenAIChatStreamSampleAsync() { - Console.WriteLine("======== Open AI - ChatGPT Streaming ========"); + WriteLine("======== Open AI - ChatGPT Streaming ========"); - OpenAIChatCompletion openAIChatCompletion = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - await StartStreamingChatAsync(openAIChatCompletion); + return this.StartStreamingChatAsync(chatCompletionService); } - private static async Task AzureOpenAIChatStreamSampleAsync() + [Fact] + public Task AzureOpenAIChatStreamSampleAsync() { - Console.WriteLine("======== Azure Open AI - ChatGPT Streaming ========"); + WriteLine("======== Azure Open AI - ChatGPT Streaming ========"); - AzureChatCompletion azureChatCompletion = new( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); - await StartStreamingChatAsync(azureChatCompletion); + return this.StartStreamingChatAsync(chatCompletionService); } - private static async Task StartStreamingChatAsync(IChatCompletion chatCompletion) + private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) { - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); + WriteLine("Chat content:"); + WriteLine("------------------------"); - var chatHistory = chatCompletion.CreateNewChat("You are a librarian, expert about books"); + var chatHistory = new ChatHistory("You are a librarian, expert about books"); await MessageOutputAsync(chatHistory); // First user message @@ -52,41 +49,54 @@ private static async Task StartStreamingChatAsync(IChatCompletion chatCompletion await MessageOutputAsync(chatHistory); // First bot assistant message - await StreamMessageOutputAsync(chatCompletion, chatHistory, AuthorRole.Assistant); + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); // Second user message chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); await MessageOutputAsync(chatHistory); // Second bot assistant message - await StreamMessageOutputAsync(chatCompletion, chatHistory, AuthorRole.Assistant); + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); } - private static async Task StreamMessageOutputAsync(IChatCompletion chatGPT, ChatHistory chatHistory, AuthorRole authorRole) + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) { - Console.Write($"{authorRole}: "); + bool roleWritten = false; string fullMessage = string.Empty; - await foreach (string message in chatGPT.GenerateMessageStreamAsync(chatHistory)) + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) { - fullMessage += message; - Console.Write(message); + if (!roleWritten && chatUpdate.Role.HasValue) + { + Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Write(chatUpdate.Content); + } } - Console.WriteLine("\n------------------------"); + WriteLine("\n------------------------"); chatHistory.AddMessage(authorRole, fullMessage); } /// /// Outputs the last message of the chat history /// - private static Task MessageOutputAsync(ChatHistory chatHistory) + private Task MessageOutputAsync(ChatHistory chatHistory) { - var message = chatHistory.Messages.Last(); + var message = chatHistory.Last(); - Console.WriteLine($"{message.Role}: {message.Content}"); - Console.WriteLine("------------------------"); + WriteLine($"{message.Role}: {message.Content}"); + WriteLine("------------------------"); return Task.CompletedTask; } + + public Example33_StreamingChat(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example34_CustomChatModel.cs b/dotnet/samples/KernelSyntaxExamples/Example34_CustomChatModel.cs deleted file mode 100644 index c9e0c399d2de..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example34_CustomChatModel.cs +++ /dev/null @@ -1,215 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Orchestration; - -/** - * The following example shows how to plug use a custom chat model. - * - * This might be useful in a few scenarios, for example: - * - You are not using OpenAI or Azure OpenAI models - * - You are using OpenAI/Azure OpenAI models but the models are behind a web service with a different API schema - * - You want to use a local model - */ -public sealed class MyChatCompletionService : IChatCompletion -{ - public ChatHistory CreateNewChat(string? instructions = null) - { - var chatHistory = new MyChatHistory(); - - if (!string.IsNullOrWhiteSpace(instructions)) - { - chatHistory.Add(new MyChatMessage(MyRoles.SuperUser, instructions)); - } - - return chatHistory; - } - - public Task> GetChatCompletionsAsync(ChatHistory chat, AIRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) - { - return Task.FromResult>(new List - { - new MyChatResult(MyRoles.Bot, "Hi I'm your SK Custom Assistant and I'm here to help you to create custom chats like this. :)") - }); - } - - public IAsyncEnumerable GetStreamingChatCompletionsAsync(ChatHistory chat, AIRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) - { - return (new List - { - new MyChatStreamingResult(MyRoles.Bot, "Hi I'm your SK Custom Assistant and I'm here to help you to create custom chats like this. :)") - }).ToAsyncEnumerable(); - } -} - -public class MyChatStreamingResult : IChatStreamingResult -{ - private readonly ChatMessageBase _message; - private readonly MyRoles _role; - public ModelResult ModelResult { get; private set; } - - public MyChatStreamingResult(MyRoles role, string content) - { - this._role = role; - this._message = new MyChatMessage(role, content); - this.ModelResult = new ModelResult(content); - } - public async IAsyncEnumerable GetStreamingChatMessageAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var streamedOutput = this._message.Content.Split(' '); - foreach (string word in streamedOutput) - { - await Task.Delay(100, cancellationToken); - yield return new MyChatMessage(this._role, $"{word} "); - } - } -} - -public class MyChatResult : IChatResult -{ - private readonly ChatMessageBase _message; - private readonly MyRoles _role; - public ModelResult ModelResult { get; private set; } - - public MyChatResult(MyRoles role, string content) - { - this._role = role; - this._message = new MyChatMessage(role, content); - this.ModelResult = new ModelResult(content); - } - - public Task GetChatMessageAsync(CancellationToken cancellationToken = default) - { - return Task.FromResult(this._message); - } -} - -public class MyChatMessage : ChatMessageBase -{ - public MyChatMessage(MyRoles role, string content) : base(new AuthorRole(role.ToString()), content) - { - } -} - -public class MyChatHistory : ChatHistory -{ - public void AddMessage(MyRoles role, string message) - { - this.Add(new MyChatMessage(role, message)); - } -} - -public enum MyRoles -{ - SuperUser, - User, - Bot -} - -// ReSharper disable once InconsistentNaming -public static class Example34_CustomChatModel -{ - public static async Task RunAsync() - { - await CustomChatStreamSampleAsync(); - await CustomChatSampleAsync(); - - /* Output: - - Chat content: - ------------------------ - System: You are a my SK Custom Assistant - ------------------------ - User: Hi, who are you? - ------------------------ - Assistant: Hi I'm your SK Custom Assistant and I'm here to help you to create custom chats like this. :) - ------------------------ - */ - } - - private static async Task CustomChatSampleAsync() - { - Console.WriteLine("======== Custom LLM - Chat Completion ========"); - - IChatCompletion customChat = new MyChatCompletionService(); - - await StartChatAsync(customChat); - } - - private static async Task StartChatAsync(IChatCompletion customChat) - { - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = (MyChatHistory)customChat.CreateNewChat("You are a my SK Custom Assistant"); - - // First user message - chatHistory.AddMessage(MyRoles.User, "Hi, who are you?"); - await MessageOutputAsync(chatHistory); - - // First bot assistant message - string reply = await customChat.GenerateMessageAsync(chatHistory); - chatHistory.AddMessage(MyRoles.Bot, reply); - await MessageOutputAsync(chatHistory); - } - - private static async Task CustomChatStreamSampleAsync() - { - Console.WriteLine("======== Custom LLM - Chat Completion Streaming ========"); - - IChatCompletion customChat = new MyChatCompletionService(); - - await StartStreamingChatAsync(customChat); - } - - private static async Task StartStreamingChatAsync(IChatCompletion customChat) - { - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = (MyChatHistory)customChat.CreateNewChat("You are a my SK Custom Assistant"); - await MessageOutputAsync(chatHistory); - - // First user message - chatHistory.AddMessage(MyRoles.User, "Hi, who are you?"); - await MessageOutputAsync(chatHistory); - - // Bot assistant message - await StreamMessageOutputAsync(customChat, chatHistory); - } - - /// - /// Outputs the last message of the chat history - /// - private static Task MessageOutputAsync(MyChatHistory chatHistory) - { - var message = chatHistory.Messages.Last(); - - Console.WriteLine($"{message.Role}: {message.Content}"); - Console.WriteLine("------------------------"); - - return Task.CompletedTask; - } - - private static async Task StreamMessageOutputAsync(IChatCompletion customChat, MyChatHistory chatHistory, MyRoles myModelRole = MyRoles.Bot) - { - Console.Write($"{myModelRole}: "); - string fullMessage = string.Empty; - - await foreach (string message in customChat.GenerateMessageStreamAsync(chatHistory)) - { - fullMessage += message; - Console.Write(message); - } - - Console.WriteLine("\n------------------------"); - chatHistory.AddMessage(myModelRole, fullMessage); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example35_GrpcPlugins.cs b/dotnet/samples/KernelSyntaxExamples/Example35_GrpcPlugins.cs index 886920fa621a..f9d8ed41d710 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example35_GrpcPlugins.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example35_GrpcPlugins.cs @@ -1,35 +1,38 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Functions.Grpc.Extensions; -using Microsoft.SemanticKernel.Orchestration; -using RepoUtils; - -/** - * This example shows how to use gRPC plugins. - */ -// ReSharper disable once InconsistentNaming -public static class Example35_GrpcPlugins +using Microsoft.SemanticKernel.Plugins.Grpc; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to use gRPC plugins. +public class Example35_GrpcPlugins : BaseTest { - public static async Task RunAsync() + [Fact(Skip = "Setup crendentials")] + public async Task RunAsync() { - var kernel = new KernelBuilder().WithLoggerFactory(ConsoleLogger.LoggerFactory).Build(); + Kernel kernel = new(); // Import a gRPC plugin using one of the following Kernel extension methods - // kernel.RegisterGrpcFunctions - // kernel.ImportGrpcFunctionsFromDirectory - var plugin = kernel.ImportGrpcFunctionsFromFile("", ""); + // kernel.ImportGrpcPlugin + // kernel.ImportGrpcPluginFromDirectory + var plugin = kernel.ImportPluginFromGrpcFile("", ""); // Add arguments for required parameters, arguments for optional ones can be skipped. - var contextVariables = new ContextVariables(); - contextVariables.Set("address", ""); - contextVariables.Set("payload", ""); + var arguments = new KernelArguments(); + arguments["address"] = ""; + arguments["payload"] = ""; // Run - var result = await kernel.RunAsync(contextVariables, plugin[""]); + var result = await kernel.InvokeAsync(plugin[""], arguments); - Console.WriteLine("Plugin response: {0}", result.GetValue()); + WriteLine($"Plugin response: {result.GetValue()}"); + } + + public Example35_GrpcPlugins(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs index 8fdeca2febfb..486ebb5859bc 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example36_MultiCompletion.cs @@ -1,49 +1,45 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; -/** - * The following example shows how to use Semantic Kernel with streaming Multiple Results Chat Completion. - */ -// ReSharper disable once InconsistentNaming -public static class Example36_MultiCompletion -{ - public static async Task RunAsync() - { - await AzureOpenAIMultiChatCompletionAsync(); - await OpenAIMultiChatCompletionAsync(); - } +namespace Examples; - private static async Task AzureOpenAIMultiChatCompletionAsync() +// The following example shows how to use Semantic Kernel with streaming Multiple Results Chat Completion. +public class Example36_MultiCompletion : BaseTest +{ + [Fact] + public Task AzureOpenAIMultiChatCompletionAsync() { - Console.WriteLine("======== Azure OpenAI - Multiple Chat Completion ========"); + WriteLine("======== Azure OpenAI - Multiple Chat Completion ========"); - var chatCompletion = new AzureChatCompletion( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); + var chatCompletionService = new AzureOpenAIChatCompletionService( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); - await ChatCompletionAsync(chatCompletion); + return ChatCompletionAsync(chatCompletionService); } - private static async Task OpenAIMultiChatCompletionAsync() + [Fact] + public Task OpenAIMultiChatCompletionAsync() { - Console.WriteLine("======== Open AI - Multiple Chat Completion ========"); + WriteLine("======== Open AI - Multiple Chat Completion ========"); - IChatCompletion chatCompletion = new OpenAIChatCompletion( + var chatCompletionService = new OpenAIChatCompletionService( TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - await ChatCompletionAsync(chatCompletion); + return ChatCompletionAsync(chatCompletionService); } - private static async Task ChatCompletionAsync(IChatCompletion chatCompletion) + private async Task ChatCompletionAsync(IChatCompletionService chatCompletionService) { - var requestSettings = new OpenAIRequestSettings() + var executionSettings = new OpenAIPromptExecutionSettings() { MaxTokens = 200, FrequencyPenalty = 0, @@ -56,12 +52,16 @@ private static async Task ChatCompletionAsync(IChatCompletion chatCompletion) var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Write one paragraph about why AI is awesome"); - await foreach (string message in chatCompletion.GenerateMessageStreamAsync(chatHistory)) + foreach (var chatMessageChoice in await chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings)) { - Console.Write(message); - Console.WriteLine("-------------"); + Write(chatMessageChoice.Content ?? string.Empty); + WriteLine("\n-------------\n"); } - Console.WriteLine(); + WriteLine(); + } + + public Example36_MultiCompletion(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example37_MultiStreamingCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example37_MultiStreamingCompletion.cs deleted file mode 100644 index 4cb9e4b0d49d..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example37_MultiStreamingCompletion.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/** - * The following example shows how to use Semantic Kernel with streaming Multiple Results Chat Completion - */ -// ReSharper disable once InconsistentNaming -public static class Example37_MultiStreamingCompletion -{ - public static async Task RunAsync() - { - await AzureOpenAIMultiChatCompletionStreamAsync(); - await OpenAIChatCompletionStreamAsync(); - } - - private static async Task AzureOpenAIMultiChatCompletionStreamAsync() - { - Console.WriteLine("======== Azure OpenAI - Multiple Chat Completion - Raw Streaming ========"); - - var chatCompletion = new AzureChatCompletion( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); - - await ChatCompletionStreamAsync(chatCompletion); - } - - private static async Task OpenAIChatCompletionStreamAsync() - { - Console.WriteLine("======== Open AI - Multiple Chat Completion - Raw Streaming ========"); - - IChatCompletion chatCompletion = new OpenAIChatCompletion( - TestConfiguration.OpenAI.ChatModelId, - TestConfiguration.OpenAI.ApiKey); - - await ChatCompletionStreamAsync(chatCompletion); - } - - private static async Task ChatCompletionStreamAsync(IChatCompletion chatCompletion) - { - var requestSettings = new OpenAIRequestSettings() - { - MaxTokens = 200, - FrequencyPenalty = 0, - PresencePenalty = 0, - Temperature = 1, - TopP = 0.5, - ResultsPerPrompt = 3 - }; - - var chatHistory = new ChatHistory(); - chatHistory.AddUserMessage("Write one paragraph about why AI is awesome"); - - await foreach (string message in chatCompletion.GenerateMessageStreamAsync(chatHistory)) - { - Console.Write(message); - } - - Console.WriteLine(); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs b/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs index 88e8b20b2730..15e4f120f5b5 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example40_DIContainer.cs @@ -1,95 +1,28 @@ // Copyright (c) Microsoft. All rights reserved. +using System.IO; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Reliability.Basic; -using Microsoft.SemanticKernel.Services; - -using Microsoft.SemanticKernel.TemplateEngine; -using Microsoft.SemanticKernel.TemplateEngine.Basic; using RepoUtils; +using Xunit; +using Xunit.Abstractions; -/** - * The following examples show how to use SK SDK in applications using DI/IoC containers. - */ -public static class Example40_DIContainer -{ - public static async Task RunAsync() - { - await UseKernelInDIPowerAppAsync(); - - await UseKernelInDIPowerApp_AdvancedScenarioAsync(); - } - - /// - /// This example shows how to register a Kernel in a DI container using KernelBuilder instead of - /// registering its dependencies. - /// - private static async Task UseKernelInDIPowerAppAsync() - { - //Bootstrapping code that initializes the modules, components, and classes that applications use. - //For regular .NET applications, the bootstrapping code usually resides either in the Main method or very close to it. - //In ASP.NET Core applications, the bootstrapping code is typically located in the ConfigureServices method of the Startup class. - - //Registering Kernel dependencies - var collection = new ServiceCollection(); - collection.AddTransient((_) => ConsoleLogger.LoggerFactory); - - //Registering Kernel - collection.AddTransient((serviceProvider) => - { - return new KernelBuilder() - .WithLoggerFactory(serviceProvider.GetRequiredService()) - .WithOpenAITextCompletionService(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey) - .Build(); - }); - - //Registering class that uses Kernel to execute a plugin - collection.AddTransient(); - - //Creating a service provider for resolving registered services - var serviceProvider = collection.BuildServiceProvider(); +namespace Examples; - //If an application follows DI guidelines, the following line is unnecessary because DI will inject an instance of the KernelClient class to a class that references it. - //DI container guidelines - https://learn.microsoft.com/en-us/dotnet/core/extensions/dependency-injection-guidelines#recommendations - var kernelClient = serviceProvider.GetRequiredService(); - - //Execute the function - await kernelClient.SummarizeAsync("What's the tallest building in South America?"); - } - - /// - /// This example shows how to registered Kernel and all its dependencies in DI container. - /// - private static async Task UseKernelInDIPowerApp_AdvancedScenarioAsync() +// The following examples show how to use SK SDK in applications using DI/IoC containers. +public class Example40_DIContainer : BaseTest +{ + [Fact] + public async Task RunAsync() { - //Bootstrapping code that initializes the modules, components, and classes that applications use. - //For regular .NET applications, the bootstrapping code usually resides either in the Main method or very close to it. - //In ASP.NET Core applications, the bootstrapping code is typically located in the ConfigureServices method of the Startup class. - - //Registering AI services Kernel is going to use - var aiServicesCollection = new AIServiceCollection(); - aiServicesCollection.SetService(() => new OpenAITextCompletion(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey)); - - //Registering Kernel dependencies var collection = new ServiceCollection(); - collection.AddTransient((_) => ConsoleLogger.LoggerFactory); - collection.AddTransient((_) => BasicHttpRetryHandlerFactory.Instance); - collection.AddTransient(); - collection.AddTransient(); - collection.AddTransient((_) => NullMemory.Instance); - collection.AddTransient((_) => aiServicesCollection.Build()); //Registering AI service provider that is used by Kernel to resolve AI services runtime + collection.AddSingleton(ConsoleLogger.LoggerFactory); + collection.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey); + collection.AddSingleton(); - //Registering Kernel - collection.AddTransient(); - - //Registering class that uses Kernel to execute a plugin + // Registering class that uses Kernel to execute a plugin collection.AddTransient(); //Creating a service provider for resolving registered services @@ -106,14 +39,12 @@ private static async Task UseKernelInDIPowerApp_AdvancedScenarioAsync() /// /// Class that uses/references Kernel. /// -#pragma warning disable CA1812 // Avoid uninstantiated internal classes private sealed class KernelClient -#pragma warning restore CA1812 // Avoid uninstantiated internal classes { - private readonly IKernel _kernel; + private readonly Kernel _kernel; private readonly ILogger _logger; - public KernelClient(IKernel kernel, ILoggerFactory loggerFactory) + public KernelClient(Kernel kernel, ILoggerFactory loggerFactory) { this._kernel = kernel; this._logger = loggerFactory.CreateLogger(nameof(KernelClient)); @@ -123,11 +54,15 @@ public async Task SummarizeAsync(string ask) { string folder = RepoFiles.SamplePluginsPath(); - var summarizeFunctions = this._kernel.ImportSemanticFunctionsFromDirectory(folder, "SummarizePlugin"); + var summarizePlugin = this._kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "SummarizePlugin")); - var result = await this._kernel.RunAsync(ask, summarizeFunctions["Summarize"]); + var result = await this._kernel.InvokeAsync(summarizePlugin["Summarize"], new() { ["input"] = ask }); this._logger.LogWarning("Result - {0}", result.GetValue()); } } + + public Example40_DIContainer(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs b/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs index c1fb70e9c54a..2b11a19c568c 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example41_HttpClientUsage.cs @@ -2,38 +2,24 @@ using System; using System.Net.Http; -using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; -/** - * These examples show how to use HttpClient and HttpClientFactory within SK SDK. - */ +namespace Examples; -public static class Example41_HttpClientUsage +// These examples show how to use HttpClient and HttpClientFactory within SK SDK. +public class Example41_HttpClientUsage : BaseTest { - public static Task RunAsync() - { - //Examples showing how to use HttpClient. - UseDefaultHttpClient(); - - UseCustomHttpClient(); - - //Examples showing how to use HttpClientFactory. - UseBasicRegistrationWithHttpClientFactory(); - - UseNamedRegistrationWitHttpClientFactory(); - - return Task.CompletedTask; - } - /// /// Demonstrates the usage of the default HttpClient provided by the SK SDK. /// - private static void UseDefaultHttpClient() + [Fact] + public void UseDefaultHttpClient() { - var kernel = new KernelBuilder() - .WithOpenAIChatCompletionService( + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey) // If you need to use the default HttpClient from the SK SDK, simply omit the argument for the httpMessageInvoker parameter. .Build(); @@ -42,13 +28,14 @@ private static void UseDefaultHttpClient() /// /// Demonstrates the usage of a custom HttpClient. /// - private static void UseCustomHttpClient() + [Fact] + public void UseCustomHttpClient() { using var httpClient = new HttpClient(); // If you need to use a custom HttpClient, simply pass it as an argument for the httpClient parameter. - var kernel = new KernelBuilder() - .WithOpenAIChatCompletionService( + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( modelId: TestConfiguration.OpenAI.ModelId, apiKey: TestConfiguration.OpenAI.ApiKey, httpClient: httpClient) @@ -58,31 +45,31 @@ private static void UseCustomHttpClient() /// /// Demonstrates the "basic usage" approach for HttpClientFactory. /// - private static void UseBasicRegistrationWithHttpClientFactory() + [Fact] + public void UseBasicRegistrationWithHttpClientFactory() { //More details - https://learn.microsoft.com/en-us/dotnet/core/extensions/httpclient-factory#basic-usage var serviceCollection = new ServiceCollection(); serviceCollection.AddHttpClient(); - var kernel = serviceCollection.AddTransient((sp) => + var kernel = serviceCollection.AddTransient((sp) => { var factory = sp.GetRequiredService(); - var kernel = new KernelBuilder() - .WithOpenAIChatCompletionService( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey, - httpClient: factory.CreateClient()) - .Build(); - - return kernel; + return Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + httpClient: factory.CreateClient()) + .Build(); }); } /// /// Demonstrates the "named clients" approach for HttpClientFactory. /// - private static void UseNamedRegistrationWitHttpClientFactory() + [Fact] + public void UseNamedRegistrationWitHttpClientFactory() { // More details https://learn.microsoft.com/en-us/dotnet/core/extensions/httpclient-factory#named-clients @@ -95,18 +82,20 @@ private static void UseNamedRegistrationWitHttpClientFactory() client.BaseAddress = new Uri("https://api.openai.com/v1/", UriKind.Absolute); }); - var kernel = serviceCollection.AddTransient((sp) => + var kernel = serviceCollection.AddTransient((sp) => { var factory = sp.GetRequiredService(); - var kernel = new KernelBuilder() - .WithOpenAIChatCompletionService( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey, - httpClient: factory.CreateClient("test-client")) - .Build(); - - return kernel; + return Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + httpClient: factory.CreateClient("test-client")) + .Build(); }); } + + public Example41_HttpClientUsage(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs b/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs index 6638b78f7001..eb006df2b0f5 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs @@ -2,202 +2,106 @@ // ========================================================================================================== // The easier way to instantiate the Semantic Kernel is to use KernelBuilder. -// You can access the builder using new KernelBuilder(). +// You can access the builder using Kernel.CreateBuilder(). -#pragma warning disable CA1852 - -using System; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; +using System.Diagnostics; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory; -using Microsoft.SemanticKernel.Reliability.Basic; -using Microsoft.SemanticKernel.Reliability.Polly; -using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.Plugins.Core; +using Xunit; +using Xunit.Abstractions; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using Polly; -using Polly.Retry; +namespace Examples; -// ReSharper disable once InconsistentNaming -public static class Example42_KernelBuilder +public class Example42_KernelBuilder : BaseTest { - public static Task RunAsync() + [Fact] + public void BuildKernelWithAzureChatCompletion() { - string azureOpenAIKey = TestConfiguration.AzureOpenAI.ApiKey; - string azureOpenAIEndpoint = TestConfiguration.AzureOpenAI.Endpoint; - string azureOpenAIChatCompletionDeployment = TestConfiguration.AzureOpenAI.ChatDeploymentName; - string azureOpenAIEmbeddingDeployment = TestConfiguration.AzureOpenAIEmbeddings.DeploymentName; - -#pragma warning disable CA1852 // Seal internal types - IKernel kernel1 = new KernelBuilder().Build(); -#pragma warning restore CA1852 // Seal internal types - - IKernel kernel2 = new KernelBuilder().Build(); - - // ========================================================================================================== - // new KernelBuilder() returns a new builder instance, in case you want to configure the builder differently. - // The following are 3 distinct builder instances. - - var builder1 = new KernelBuilder(); - - var builder2 = new KernelBuilder(); - - var builder3 = new KernelBuilder(); - - // ========================================================================================================== - // A builder instance can create multiple kernel instances, e.g. in case you need - // multiple kernels that share the same dependencies. - - var builderX = new KernelBuilder(); - - var kernelX1 = builderX.Build(); - var kernelX2 = builderX.Build(); - var kernelX3 = builderX.Build(); - - // ========================================================================================================== - // Kernel instances can be created the usual way with "new", though the process requires particular - // attention to how dependencies are wired together. Although the building blocks are available - // to enable custom configurations, we highly recommend using KernelBuilder instead, to ensure - // a correct dependency injection. - - // Manually setup all the dependencies to be used by the kernel - var loggerFactory = NullLoggerFactory.Instance; - var memoryStorage = new VolatileMemoryStore(); - var textEmbeddingGenerator = new AzureTextEmbeddingGeneration( - modelId: azureOpenAIEmbeddingDeployment, - endpoint: azureOpenAIEndpoint, - apiKey: azureOpenAIKey, - loggerFactory: loggerFactory); - - var memory = new SemanticTextMemory(memoryStorage, textEmbeddingGenerator); - var plugins = new FunctionCollection(); - var templateEngine = new BasicPromptTemplateEngine(loggerFactory); - - var httpHandlerFactory = BasicHttpRetryHandlerFactory.Instance; - //var httpHandlerFactory = new PollyHttpRetryHandlerFactory( your policy ); - - using var httpHandler = httpHandlerFactory.Create(loggerFactory); - using var httpClient = new HttpClient(httpHandler); - var aiServices = new AIServiceCollection(); - ITextCompletion Factory() => new AzureChatCompletion( - modelId: azureOpenAIChatCompletionDeployment, - endpoint: azureOpenAIEndpoint, - apiKey: azureOpenAIKey, - httpClient, - loggerFactory); - aiServices.SetService("foo", Factory); - IAIServiceProvider aiServiceProvider = aiServices.Build(); - - // Create kernel manually injecting all the dependencies - using var kernel3 = new Kernel(plugins, aiServiceProvider, templateEngine, memory, httpHandlerFactory, loggerFactory); - - // ========================================================================================================== - // The kernel builder purpose is to simplify this process, automating how dependencies - // are connected, still allowing to customize parts of the composition. - - // ========================================================================================================== - // The AI services are defined with the builder - - var kernel7 = new KernelBuilder() - .WithAzureChatCompletionService( - deploymentName: azureOpenAIChatCompletionDeployment, - endpoint: azureOpenAIEndpoint, - apiKey: azureOpenAIKey, - setAsDefault: true) - .Build(); - - // ========================================================================================================== - // When invoking AI, by default the kernel will retry on transient errors, such as throttling and timeouts. - // The default behavior can be configured or a custom retry handler can be injected that will apply to all - // AI requests (when using the kernel). - - var kernel8 = new KernelBuilder().WithRetryBasic( - new BasicRetryConfig - { - MaxRetryCount = 3, - UseExponentialBackoff = true, - // MinRetryDelay = TimeSpan.FromSeconds(2), - // MaxRetryDelay = TimeSpan.FromSeconds(8), - // MaxTotalRetryTime = TimeSpan.FromSeconds(30), - // RetryableStatusCodes = new[] { HttpStatusCode.TooManyRequests, HttpStatusCode.RequestTimeout }, - // RetryableExceptions = new[] { typeof(HttpRequestException) } - }) + // KernelBuilder provides a simple way to configure a Kernel. This constructs a kernel + // with logging and an Azure OpenAI chat completion service configured. + Kernel kernel1 = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) .Build(); - - var logger = loggerFactory.CreateLogger(); - var retryThreeTimesPolicy = Policy - .Handle(ex - => ex.StatusCode == System.Net.HttpStatusCode.TooManyRequests) - .WaitAndRetryAsync(new[] - { - TimeSpan.FromSeconds(2), - TimeSpan.FromSeconds(4), - TimeSpan.FromSeconds(8) - }, - (ex, timespan, retryCount, _) - => logger?.LogWarning(ex, "Error executing action [attempt {RetryCount} of 3], pausing {PausingMilliseconds}ms", retryCount, timespan.TotalMilliseconds)); - - var kernel9 = new KernelBuilder().WithHttpHandlerFactory(new PollyHttpRetryHandlerFactory(retryThreeTimesPolicy)).Build(); - - var kernel10 = new KernelBuilder().WithHttpHandlerFactory(new PollyRetryThreeTimesFactory()).Build(); - - var kernel11 = new KernelBuilder().WithHttpHandlerFactory(new MyCustomHandlerFactory()).Build(); - - return Task.CompletedTask; } - // Example using the PollyHttpRetryHandler from Reliability.Polly extension - public class PollyRetryThreeTimesFactory : HttpHandlerFactory + [Fact] + public void BuildKernelUsingServiceCollection() { - public override DelegatingHandler Create(ILoggerFactory? loggerFactory = null) - { - var logger = loggerFactory?.CreateLogger(); - - Activator.CreateInstance(typeof(PollyHttpRetryHandler), GetPolicy(logger), logger); - return base.Create(loggerFactory); - } + // For greater flexibility and to incorporate arbitrary services, KernelBuilder.Services + // provides direct access to an underlying IServiceCollection. + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Information)) + .AddHttpClient() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + Kernel kernel2 = builder.Build(); + } - private static AsyncRetryPolicy GetPolicy(ILogger? logger) - { - return Policy - .Handle(ex - => ex.StatusCode == System.Net.HttpStatusCode.TooManyRequests) - .WaitAndRetryAsync(new[] - { - TimeSpan.FromSeconds(2), - TimeSpan.FromSeconds(4), - TimeSpan.FromSeconds(8) - }, - (ex, timespan, retryCount, _) - => logger?.LogWarning(ex, "Error executing action [attempt {RetryCount} of 3], pausing {PausingMilliseconds}ms", - retryCount, - timespan.TotalMilliseconds)); - } + [Fact] + public void BuildKernelWithPlugins() + { + // Plugins may also be configured via the corresponding Plugins property. + var builder = Kernel.CreateBuilder(); + builder.Plugins.AddFromType(); + Kernel kernel3 = builder.Build(); } - // Basic custom retry handler factory - public class MyCustomHandlerFactory : HttpHandlerFactory + [Fact] + public void BuildKernelUsingServiceProvider() { + // Every call to KernelBuilder.Build creates a new Kernel instance, with a new service provider + // and a new plugin collection. + var builder = Kernel.CreateBuilder(); + Debug.Assert(!ReferenceEquals(builder.Build(), builder.Build())); + + // KernelBuilder provides a convenient API for creating Kernel instances. However, it is just a + // wrapper around a service collection, ultimately constructing a Kernel + // using the public constructor that's available for anyone to use directly if desired. + var services = new ServiceCollection(); + services.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Information)); + services.AddHttpClient(); + services.AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + Kernel kernel4 = new(services.BuildServiceProvider()); + + // Kernels can also be constructed and resolved via such a dependency injection container. + services.AddTransient(); + Kernel kernel5 = services.BuildServiceProvider().GetRequiredService(); } - // Basic custom empty retry handler - public class MyCustomHandler : DelegatingHandler + [Fact] + public void BuildKernelUsingServiceCollectionExtension() { - protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - // Your custom handler implementation + // In fact, the AddKernel method exists to simplify this, registering a singleton KernelPluginCollection + // that can be populated automatically with all IKernelPlugins registered in the collection, and a + // transient Kernel that can then automatically be constructed from the service provider and resulting + // plugins collection. + var services = new ServiceCollection(); + services.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Information)); + services.AddHttpClient(); + services.AddKernel().AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + services.AddSingleton(sp => KernelPluginFactory.CreateFromType(serviceProvider: sp)); + services.AddSingleton(sp => KernelPluginFactory.CreateFromType(serviceProvider: sp)); + Kernel kernel6 = services.BuildServiceProvider().GetRequiredService(); + } - throw new NotImplementedException(); - } + public Example42_KernelBuilder(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs b/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs index 301662b648a6..123454987a9d 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example43_GetModelResult.cs @@ -1,92 +1,41 @@ // Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; -using System.Linq; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; using RepoUtils; +using Xunit; +using Xunit.Abstractions; -#pragma warning disable RCS1192 // (Unnecessary usage of verbatim string literal) +namespace Examples; -// ReSharper disable once InconsistentNaming -public static class Example43_GetModelResult +public class Example43_GetModelResult : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task GetTokenUsageMetadataAsync() { - Console.WriteLine("======== Inline Function Definition + Result ========"); + WriteLine("======== Inline Function Definition + Invocation ========"); - IKernel kernel = new KernelBuilder() - .WithOpenAIChatCompletionService( + // Create kernel + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey) .Build(); - // Function defined using few-shot design pattern + // Create function const string FunctionDefinition = "Hi, give me 5 book suggestions about: {{$input}}"; + KernelFunction myFunction = kernel.CreateFunctionFromPrompt(FunctionDefinition); - var myFunction = kernel.CreateSemanticFunction(FunctionDefinition); + // Invoke function through kernel + FunctionResult result = await kernel.InvokeAsync(myFunction, new() { ["input"] = "travel" }); - // Using InvokeAsync with 3 results (Currently invoke only supports 1 result, but you can get the other results from the ModelResults) - var functionResult = await myFunction.InvokeAsync("Sci-fi", - kernel, - requestSettings: new OpenAIRequestSettings { ResultsPerPrompt = 3, MaxTokens = 500, Temperature = 1, TopP = 0.5 }); - - Console.WriteLine(functionResult.GetValue()); - Console.WriteLine(functionResult.GetModelResults()?.Select(result => result.GetOpenAIChatResult()).AsJson()); - Console.WriteLine(); - - // Using the Kernel RunAsync - var kernelResult = await kernel.RunAsync("sorry I forgot your birthday", myFunction); - var modelResults = kernelResult.FunctionResults.SelectMany(l => l.GetModelResults() ?? Enumerable.Empty()); - - Console.WriteLine(kernelResult.GetValue()); - Console.WriteLine(modelResults.LastOrDefault()?.GetOpenAIChatResult()?.Usage.AsJson()); - Console.WriteLine(); - - // Using Chat Completion directly - var chatCompletion = new OpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey); - var prompt = FunctionDefinition.Replace("{{$input}}", $"Translate this date {DateTimeOffset.Now:f} to French format", StringComparison.InvariantCultureIgnoreCase); - - IReadOnlyList completionResults = await chatCompletion.GetCompletionsAsync(prompt, new OpenAIRequestSettings() { MaxTokens = 500, Temperature = 1, TopP = 0.5 }); - - Console.WriteLine(await completionResults[0].GetCompletionAsync()); - Console.WriteLine(completionResults[0].ModelResult.GetOpenAIChatResult().Usage.AsJson()); - Console.WriteLine(); - - // Getting the error details - kernel = new KernelBuilder() - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, "Invalid Key") - .Build(); - var errorFunction = kernel.CreateSemanticFunction(FunctionDefinition); - -#pragma warning disable CA1031 // Do not catch general exception types - try - { - await kernel.RunAsync("sorry I forgot your birthday", errorFunction); - } - catch (Exception ex) - { - Console.WriteLine(OutputExceptionDetail(ex)); - } -#pragma warning restore CA1031 // Do not catch general exception types + // Display results + WriteLine(result.GetValue()); + WriteLine(result.Metadata?["Usage"]?.AsJson()); + WriteLine(); + } - string OutputExceptionDetail(Exception? exception) - { - return exception switch - { - HttpOperationException httpException => new { StatusCode = httpException.StatusCode?.ToString(), Message = httpException.Message, Response = httpException.ResponseContent }.AsJson(), - { } e => e.Message, - _ => string.Empty - }; - } + public Example43_GetModelResult(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs index f31482ebb601..c54347fbf174 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example44_MultiChatCompletion.cs @@ -1,54 +1,50 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/** - * The following example shows how to use Semantic Kernel with Multiple Results Text Completion as streaming - */ -// ReSharper disable once InconsistentNaming -public static class Example44_MultiChatCompletion -{ - public static async Task RunAsync() - { - await AzureOpenAIMultiChatCompletionAsync(); - await OpenAIMultiChatCompletionAsync(); - } +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; - private static async Task AzureOpenAIMultiChatCompletionAsync() +// The following example shows how to use Semantic Kernel with Multiple Results Text Completion as streaming +public class Example44_MultiChatCompletion : BaseTest +{ + [Fact] + public Task AzureOpenAIMultiChatCompletionAsync() { - Console.WriteLine("======== Azure OpenAI - Multiple Chat Completion ========"); + WriteLine("======== Azure OpenAI - Multiple Chat Completion ========"); - AzureChatCompletion azureChatCompletion = new( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); - await RunChatAsync(azureChatCompletion); + return RunChatAsync(chatCompletionService); } - private static async Task OpenAIMultiChatCompletionAsync() + [Fact] + public Task OpenAIMultiChatCompletionAsync() { - Console.WriteLine("======== Open AI - Multiple Chat Completion ========"); + WriteLine("======== Open AI - Multiple Chat Completion ========"); - OpenAIChatCompletion openAIChatCompletion = new(modelId: TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + OpenAIChatCompletionService chatCompletionService = new(modelId: TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - await RunChatAsync(openAIChatCompletion); + return RunChatAsync(chatCompletionService); } - private static async Task RunChatAsync(IChatCompletion chatCompletion) + private async Task RunChatAsync(IChatCompletionService chatCompletionService) { - var chatHistory = chatCompletion.CreateNewChat("You are a librarian, expert about books"); + var chatHistory = new ChatHistory("You are a librarian, expert about books"); // First user message chatHistory.AddUserMessage("Hi, I'm looking for book 3 different book suggestions about sci-fi"); await MessageOutputAsync(chatHistory); - var chatRequestSettings = new OpenAIRequestSettings() + var chatExecutionSettings = new OpenAIPromptExecutionSettings() { MaxTokens = 1024, ResultsPerPrompt = 2, @@ -58,26 +54,29 @@ private static async Task RunChatAsync(IChatCompletion chatCompletion) }; // First bot assistant message - foreach (IChatResult chatCompletionResult in await chatCompletion.GetChatCompletionsAsync(chatHistory, chatRequestSettings)) + foreach (var chatMessageChoice in await chatCompletionService.GetChatMessageContentsAsync(chatHistory, chatExecutionSettings)) { - ChatMessageBase chatMessage = await chatCompletionResult.GetChatMessageAsync(); - chatHistory.Add(chatMessage); + chatHistory.Add(chatMessageChoice!); await MessageOutputAsync(chatHistory); } - Console.WriteLine(); + WriteLine(); } /// /// Outputs the last message of the chat history /// - private static Task MessageOutputAsync(ChatHistory chatHistory) + private Task MessageOutputAsync(ChatHistory chatHistory) { - var message = chatHistory.Messages.Last(); + var message = chatHistory.Last(); - Console.WriteLine($"{message.Role}: {message.Content}"); - Console.WriteLine("------------------------"); + WriteLine($"{message.Role}: {message.Content}"); + WriteLine("------------------------"); return Task.CompletedTask; } + + public Example44_MultiChatCompletion(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs b/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs index c9c461123191..b510839b48e3 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example45_MultiStreamingChatCompletion.cs @@ -2,125 +2,133 @@ using System; using System.Collections.Generic; -using System.Globalization; -using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/** - * The following example shows how to use Semantic Kernel with Multiple Results Text Completion as streaming - */ -// ReSharper disable once InconsistentNaming -public static class Example45_MultiStreamingChatCompletion -{ - private static readonly object s_lockObject = new(); +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; - public static async Task RunAsync() - { - await AzureOpenAIMultiStreamingChatCompletionAsync(); - await OpenAIMultiStreamingChatCompletionAsync(); - } +namespace Examples; - private static async Task AzureOpenAIMultiStreamingChatCompletionAsync() +// The following example shows how to use Semantic Kernel with multiple streaming chat completion results. +public class Example45_MultiStreamingChatCompletion : BaseTest +{ + [Fact] + public Task AzureOpenAIMultiStreamingChatCompletionAsync() { - Console.WriteLine("======== Azure OpenAI - Multiple Chat Completion - Raw Streaming ========"); + WriteLine("======== Azure OpenAI - Multiple Chat Completions - Raw Streaming ========"); - AzureChatCompletion azureChatCompletion = new( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); - await StreamingChatCompletionAsync(azureChatCompletion); + return StreamingChatCompletionAsync(chatCompletionService, 3); } - private static async Task OpenAIMultiStreamingChatCompletionAsync() + [Fact] + public Task OpenAIMultiStreamingChatCompletionAsync() { - Console.WriteLine("======== Open AI - Multiple Text Completion - Raw Streaming ========"); + WriteLine("======== OpenAI - Multiple Chat Completions - Raw Streaming ========"); - OpenAIChatCompletion openAIChatCompletion = new( + OpenAIChatCompletionService chatCompletionService = new( modelId: TestConfiguration.OpenAI.ChatModelId, apiKey: TestConfiguration.OpenAI.ApiKey); - await StreamingChatCompletionAsync(openAIChatCompletion); + return StreamingChatCompletionAsync(chatCompletionService, 3); } - private static async Task StreamingChatCompletionAsync(IChatCompletion chatCompletion) + /// + /// Streams the results of a chat completion request to the console. + /// + /// Chat completion service to use + /// Number of results to get for each chat completion request + private async Task StreamingChatCompletionAsync(IChatCompletionService chatCompletionService, + int numResultsPerPrompt) { - var requestSettings = new OpenAIRequestSettings() + var executionSettings = new OpenAIPromptExecutionSettings() { MaxTokens = 200, FrequencyPenalty = 0, PresencePenalty = 0, Temperature = 1, TopP = 0.5, - ResultsPerPrompt = 3 + ResultsPerPrompt = numResultsPerPrompt }; var consoleLinesPerResult = 10; - var chatHistory = chatCompletion.CreateNewChat("You are a librarian, expert about books"); + // Uncomment this if you want to use a console app to display the results + // ClearDisplayByAddingEmptyLines(); - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for 5 random title names for sci-fi books"); - await MessageOutputAsync(chatHistory); + var prompt = "Hi, I'm looking for 5 random title names for sci-fi books"; - PrepareDisplay(); + await ProcessStreamAsyncEnumerableAsync(chatCompletionService, prompt, executionSettings, consoleLinesPerResult); - List resultTasks = new(); - int currentResult = 0; - await foreach (var completionResult in chatCompletion.GetStreamingChatCompletionsAsync(chatHistory, requestSettings)) - { - resultTasks.Add(ProcessStreamAsyncEnumerableAsync(completionResult, currentResult++, consoleLinesPerResult)); - } - - Console.WriteLine(); + WriteLine(); - await Task.WhenAll(resultTasks.ToArray()); + // Set cursor position to after displayed results + // Console.SetCursorPosition(0, executionSettings.ResultsPerPrompt * consoleLinesPerResult); - Console.SetCursorPosition(0, requestSettings.ResultsPerPrompt * consoleLinesPerResult); - Console.WriteLine(); + WriteLine(); } - private static async Task ProcessStreamAsyncEnumerableAsync(IChatStreamingResult result, int resultNumber, int linesPerResult) + /// + /// Does the actual streaming and display of the chat completion. + /// + private async Task ProcessStreamAsyncEnumerableAsync(IChatCompletionService chatCompletionService, string prompt, + OpenAIPromptExecutionSettings executionSettings, int consoleLinesPerResult) { - string message = string.Empty; + var messagesPerChoice = new Dictionary(); + var chatHistory = new ChatHistory(prompt); - await foreach (var chatMessage in result.GetStreamingChatMessageAsync()) + // For each chat completion update + await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings)) { - string role = CultureInfo.CurrentCulture.TextInfo.ToTitleCase(chatMessage.Role.Label); - message += chatMessage.Content; + // Set cursor position to the beginning of where this choice (i.e. this result of + // a single multi-result request) is to be displayed. + // Console.SetCursorPosition(0, chatUpdate.ChoiceIndex * consoleLinesPerResult + 1); - lock (s_lockObject) + // The first time around, start choice text with role information + if (!messagesPerChoice.ContainsKey(chatUpdate.ChoiceIndex)) { - Console.SetCursorPosition(0, (resultNumber * linesPerResult)); - Console.Write($"{role}: {message}"); + messagesPerChoice[chatUpdate.ChoiceIndex] = $"Role: {chatUpdate.Role ?? new AuthorRole()}\n"; + Write($"Choice index: {chatUpdate.ChoiceIndex}, Role: {chatUpdate.Role ?? new AuthorRole()}"); } + + // Add latest completion bit, if any + if (chatUpdate.Content is { Length: > 0 }) + { + messagesPerChoice[chatUpdate.ChoiceIndex] += chatUpdate.Content; + } + + // Overwrite what is currently in the console area for the updated choice + // Console.Write(messagesPerChoice[chatUpdate.ChoiceIndex]); + Write($"Choice index: {chatUpdate.ChoiceIndex}, Content: {chatUpdate.Content}"); + } + + // Display the aggregated results + foreach (string message in messagesPerChoice.Values) + { + WriteLine("-------------------"); + WriteLine(message); } } /// - /// Break enough lines as the current console window size to display the results + /// Add enough new lines to clear the console window. /// - private static void PrepareDisplay() + private void ClearDisplayByAddingEmptyLines() { for (int i = 0; i < Console.WindowHeight - 2; i++) { - Console.WriteLine(); + WriteLine(); } } - /// - /// Outputs the last message of the chat history - /// - private static Task MessageOutputAsync(ChatHistory chatHistory) + public Example45_MultiStreamingChatCompletion(ITestOutputHelper output) : base(output) { - var message = chatHistory.Messages.Last(); - - Console.WriteLine($"{message.Role}: {message.Content}"); - Console.WriteLine("------------------------"); - - return Task.CompletedTask; } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs b/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs index 80603a5d2fd1..3842cda76489 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example48_GroundednessChecks.cs @@ -1,79 +1,40 @@ // Copyright (c) Microsoft. All rights reserved. -using System; +using System.IO; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Planners; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Planning.Handlebars; using Microsoft.SemanticKernel.Plugins.Core; using RepoUtils; +using xRetry; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable CommentTypo -// ReSharper disable once InconsistentNaming -internal static class Example48_GroundednessChecks -{ - private const string GroundingText = @"""I am by birth a Genevese, and my family is one of the most distinguished of that republic. -My ancestors had been for many years counsellors and syndics, and my father had filled several public situations -with honour and reputation.He was respected by all who knew him for his integrity and indefatigable attention -to public business.He passed his younger days perpetually occupied by the affairs of his country; a variety -of circumstances had prevented his marrying early, nor was it until the decline of life that he became a husband -and the father of a family. +namespace Examples; -As the circumstances of his marriage illustrate his character, I cannot refrain from relating them.One of his -most intimate friends was a merchant who, from a flourishing state, fell, through numerous mischances, into poverty. -This man, whose name was Beaufort, was of a proud and unbending disposition and could not bear to live in poverty -and oblivion in the same country where he had formerly been distinguished for his rank and magnificence. Having -paid his debts, therefore, in the most honourable manner, he retreated with his daughter to the town of Lucerne, -where he lived unknown and in wretchedness.My father loved Beaufort with the truest friendship and was deeply -grieved by his retreat in these unfortunate circumstances.He bitterly deplored the false pride which led his friend -to a conduct so little worthy of the affection that united them.He lost no time in endeavouring to seek him out, -with the hope of persuading him to begin the world again through his credit and assistance. - -Beaufort had taken effectual measures to conceal himself, and it was ten months before my father discovered his -abode.Overjoyed at this discovery, he hastened to the house, which was situated in a mean street near the Reuss. -But when he entered, misery and despair alone welcomed him. Beaufort had saved but a very small sum of money from -the wreck of his fortunes, but it was sufficient to provide him with sustenance for some months, and in the meantime -he hoped to procure some respectable employment in a merchant's house. The interval was, consequently, spent in -inaction; his grief only became more deep and rankling when he had leisure for reflection, and at length it took -so fast hold of his mind that at the end of three months he lay on a bed of sickness, incapable of any exertion. - -His daughter attended him with the greatest tenderness, but she saw with despair that their little fund was -rapidly decreasing and that there was no other prospect of support.But Caroline Beaufort possessed a mind of an -uncommon mould, and her courage rose to support her in her adversity. She procured plain work; she plaited straw -and by various means contrived to earn a pittance scarcely sufficient to support life. - -Several months passed in this manner.Her father grew worse; her time was more entirely occupied in attending him; - her means of subsistence decreased; and in the tenth month her father died in her arms, leaving her an orphan and -a beggar.This last blow overcame her, and she knelt by Beaufort's coffin weeping bitterly, when my father entered -the chamber. He came like a protecting spirit to the poor girl, who committed herself to his care; and after the -interment of his friend he conducted her to Geneva and placed her under the protection of a relation.Two years -after this event Caroline became his wife."""; - - public static async Task RunAsync() - { - await GroundednessCheckingAsync(); - await PlanningWithGroundednessAsync(); - } - - public static async Task GroundednessCheckingAsync() +public class Example48_GroundednessChecks : BaseTest +{ + [RetryFact(typeof(HttpOperationException))] + public async Task GroundednessCheckingAsync() { - Console.WriteLine("======== Groundedness Checks ========"); - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) + WriteLine("\n======== Groundedness Checks ========"); + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) .Build(); string folder = RepoFiles.SamplePluginsPath(); - var functions = kernel.ImportSemanticFunctionsFromDirectory(folder, - "SummarizePlugin", - "GroundingPlugin"); + var summarizePlugin = kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "SummarizePlugin")); + var groundingPlugin = kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "GroundingPlugin")); - var create_summary = functions["Summarize"]; - var entityExtraction = functions["ExtractEntities"]; - var reference_check = functions["ReferenceCheckEntities"]; - var entity_excision = functions["ExciseEntities"]; + var create_summary = summarizePlugin["Summarize"]; + var entityExtraction = groundingPlugin["ExtractEntities"]; + var reference_check = groundingPlugin["ReferenceCheckEntities"]; + var entity_excision = groundingPlugin["ExciseEntities"]; var summaryText = @" My father, a respected resident of Milan, was a close friend of a merchant named Beaufort who, after a series of @@ -83,66 +44,124 @@ public static async Task GroundednessCheckingAsync() her a beggar. My father came to her aid and two years later they married. "; - var context = kernel.CreateNewContext(); - context.Variables.Update(summaryText); - context.Variables.Set("topic", "people and places"); - context.Variables.Set("example_entities", "John, Jane, mother, brother, Paris, Rome"); + KernelArguments variables = new() + { + ["input"] = summaryText, + ["topic"] = "people and places", + ["example_entities"] = "John, Jane, mother, brother, Paris, Rome" + }; - var extractionResult = (await kernel.RunAsync(context.Variables, entityExtraction)).GetValue(); + var extractionResult = (await kernel.InvokeAsync(entityExtraction, variables)).ToString(); - Console.WriteLine("======== Extract Entities ========"); - Console.WriteLine(extractionResult); + WriteLine("======== Extract Entities ========"); + WriteLine(extractionResult); - context.Variables.Update(extractionResult); - context.Variables.Set("reference_context", GroundingText); + variables["input"] = extractionResult; + variables["reference_context"] = GroundingText; - var groundingResult = (await kernel.RunAsync(context.Variables, reference_check)).GetValue(); + var groundingResult = (await kernel.InvokeAsync(reference_check, variables)).ToString(); - Console.WriteLine("======== Reference Check ========"); - Console.WriteLine(groundingResult); + WriteLine("\n======== Reference Check ========"); + WriteLine(groundingResult); - context.Variables.Update(summaryText); - context.Variables.Set("ungrounded_entities", groundingResult); - var excisionResult = await kernel.RunAsync(context.Variables, entity_excision); + variables["input"] = summaryText; + variables["ungrounded_entities"] = groundingResult; + var excisionResult = await kernel.InvokeAsync(entity_excision, variables); - Console.WriteLine("======== Excise Entities ========"); - Console.WriteLine(excisionResult.GetValue()); + WriteLine("\n======== Excise Entities ========"); + WriteLine(excisionResult.GetValue()); } - public static async Task PlanningWithGroundednessAsync() + [Fact(Skip = "Unreliable - Needs attention")] + public async Task PlanningWithGroundednessAsync() { var targetTopic = "people and places"; var samples = "John, Jane, mother, brother, Paris, Rome"; - var ask = @$"Make a summary of input text. Then make a list of entities + var ask = @$"Make a summary of the following text. Then make a list of entities related to {targetTopic} (such as {samples}) which are present in the summary. Take this list of entities, and from it make another list of those which are not grounded in the original input text. Finally, rewrite your summary to remove the entities which are not grounded in the original. + +Text:\n{GroundingText}; "; - Console.WriteLine("======== Planning - Groundedness Checks ========"); + WriteLine("\n======== Planning - Groundedness Checks ========"); - var kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey) + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) .Build(); string folder = RepoFiles.SamplePluginsPath(); - var functions = kernel.ImportSemanticFunctionsFromDirectory(folder, - "SummarizePlugin", - "GroundingPlugin"); + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "SummarizePlugin")); + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "GroundingPlugin")); + + kernel.ImportPluginFromType(); + + var planner = new HandlebarsPlanner( + new HandlebarsPlannerOptions() + { + // When using OpenAI models, we recommend using low values for temperature and top_p to minimize planner hallucinations. + ExecutionSettings = new OpenAIPromptExecutionSettings() + { + Temperature = 0.0, + TopP = 0.1, + } + }); + var plan = await planner.CreatePlanAsync(kernel, ask); + + WriteLine($"======== Goal: ========\n{ask}"); + WriteLine($"======== Plan ========\n{plan}"); + + var result = await plan.InvokeAsync(kernel); + + WriteLine("======== Result ========"); + WriteLine(result); + } - kernel.ImportFunctions(new TextPlugin()); + private const string GroundingText = @"""I am by birth a Genevese, and my family is one of the most distinguished of that republic. +My ancestors had been for many years counsellors and syndics, and my father had filled several public situations +with honour and reputation.He was respected by all who knew him for his integrity and indefatigable attention +to public business.He passed his younger days perpetually occupied by the affairs of his country; a variety +of circumstances had prevented his marrying early, nor was it until the decline of life that he became a husband +and the father of a family. + +As the circumstances of his marriage illustrate his character, I cannot refrain from relating them.One of his +most intimate friends was a merchant who, from a flourishing state, fell, through numerous mischances, into poverty. +This man, whose name was Beaufort, was of a proud and unbending disposition and could not bear to live in poverty +and oblivion in the same country where he had formerly been distinguished for his rank and magnificence. Having +paid his debts, therefore, in the most honourable manner, he retreated with his daughter to the town of Lucerne, +where he lived unknown and in wretchedness.My father loved Beaufort with the truest friendship and was deeply +grieved by his retreat in these unfortunate circumstances.He bitterly deplored the false pride which led his friend +to a conduct so little worthy of the affection that united them.He lost no time in endeavouring to seek him out, +with the hope of persuading him to begin the world again through his credit and assistance. + +Beaufort had taken effectual measures to conceal himself, and it was ten months before my father discovered his +abode.Overjoyed at this discovery, he hastened to the house, which was situated in a mean street near the Reuss. +But when he entered, misery and despair alone welcomed him. Beaufort had saved but a very small sum of money from +the wreck of his fortunes, but it was sufficient to provide him with sustenance for some months, and in the meantime +he hoped to procure some respectable employment in a merchant's house. The interval was, consequently, spent in +inaction; his grief only became more deep and rankling when he had leisure for reflection, and at length it took +so fast hold of his mind that at the end of three months he lay on a bed of sickness, incapable of any exertion. - var planner = new SequentialPlanner(kernel); - var plan = await planner.CreatePlanAsync(ask); - Console.WriteLine(plan.ToPlanWithGoalString()); +His daughter attended him with the greatest tenderness, but she saw with despair that their little fund was +rapidly decreasing and that there was no other prospect of support.But Caroline Beaufort possessed a mind of an +uncommon mould, and her courage rose to support her in her adversity. She procured plain work; she plaited straw +and by various means contrived to earn a pittance scarcely sufficient to support life. + +Several months passed in this manner.Her father grew worse; her time was more entirely occupied in attending him; + her means of subsistence decreased; and in the tenth month her father died in her arms, leaving her an orphan and +a beggar.This last blow overcame her, and she knelt by Beaufort's coffin weeping bitterly, when my father entered +the chamber. He came like a protecting spirit to the poor girl, who committed herself to his care; and after the +interment of his friend he conducted her to Geneva and placed her under the protection of a relation.Two years +after this event Caroline became his wife."""; - var results = await kernel.RunAsync(GroundingText, plan); - Console.WriteLine(results.GetValue()); + public Example48_GroundednessChecks(ITestOutputHelper output) : base(output) + { } } @@ -155,18 +174,21 @@ which are not grounded in the original. - Zurich - Mary + ======== Reference Check ======== - Milan - Zurich - Mary + ======== Excise Entities ======== My father, a respected resident of a city, was a close friend of a merchant named Beaufort who, after a series of misfortunes, moved to another city in poverty. My father was upset by his friend's troubles and sought him out, finding him in a mean street. Beaufort had saved a small sum of money, but it was not enough to support him and his daughter. The daughter procured work to eek out a living, but after ten months her father died, leaving her a beggar. My father came to her aid and two years later they married. + ======== Planning - Groundedness Checks ======== Goal: Make a summary of input text. Then make a list of entities related to people and places (such as John, Jane, mother, brother, Paris, Rome) which are present in the summary. @@ -174,27 +196,30 @@ related to people and places (such as John, Jane, mother, brother, Paris, Rome) grounded in the original input text. Finally, rewrite your summary to remove the entities which are not grounded in the original. +Text: +{See GroundingText above} +Plan: +{{!-- Step 1: Set the input text --}} +{{set "inputText" "I am by birth a Genevese, and my family is one of the most distinguished of that republic. My ancestors had been for many years counsellors and syndics, and my father had filled several public situations with honour and reputation.He was respected by all who knew him for his integrity and indefatigable attention to public business.He passed his younger days perpetually occupied by the affairs of his country; a variety of circumstances had prevented his marrying early, nor was it until the decline of life that he became a husband and the father of a family. As the circumstances of his marriage illustrate his character, I cannot refrain from relating them.One of his most intimate friends was a merchant who, from a flourishing state, fell, through numerous mischances, into poverty. This man, whose name was Beaufort, was of a proud and unbending disposition and could not bear to live in poverty and oblivion in the same country where he had formerly been distinguished for his rank and magnificence. Having paid his debts, therefore, in the most honourable manner, he retreated with his daughter to the town of Lucerne, where he lived unknown and in wretchedness.My father loved Beaufort with the truest friendship and was deeply grieved by his retreat in these unfortunate circumstances.He bitterly deplored the false pride which led his friend to a conduct so little worthy of the affection that united them.He lost no time in endeavouring to seek him out, with the hope of persuading him to begin the world again through his credit and assistance. Beaufort had taken effectual measures to conceal himself, and it was ten months before my father discovered his abode.Overjoyed at this discovery, he hastened to the house, which was situated in a mean street near the Reuss. But when he entered, misery and despair alone welcomed him. Beaufort had saved but a very small sum of money from the wreck of his fortunes, but it was sufficient to provide him with sustenance for some months, and in the meantime he hoped to procure some respectable employment in a merchant's house. The interval was, consequently, spent in inaction; his grief only became more deep and rankling when he had leisure for reflection, and at length it took so fast hold of his mind that at the end of three months he lay on a bed of sickness, incapable of any exertion. His daughter attended him with the greatest tenderness, but she saw with despair that their little fund was rapidly decreasing and that there was no other prospect of support.But Caroline Beaufort possessed a mind of an uncommon mould, and her courage rose to support her in her adversity. She procured plain work; she plaited straw and by various means contrived to earn a pittance scarcely sufficient to support life. Several months passed in this manner.Her father grew worse; her time was more entirely occupied in attending him; her means of subsistence decreased; and in the tenth month her father died in her arms, leaving her an orphan and a beggar.This last blow overcame her, and she knelt by Beaufort's coffin weeping bitterly, when my father entered the chamber. He came like a protecting spirit to the poor girl, who committed herself to his care; and after the interment of his friend he conducted her to Geneva and placed her under the protection of a relation.Two years after this event Caroline became his wife."}} +{{!-- Step 2: Summarize the input text --}} +{{set "summary" (SummarizePlugin-Summarize input=(get "inputText"))}} -Steps: - - _GLOBAL_FUNCTIONS_.Echo INPUT='' => ORIGINAL_TEXT - - SummarizePlugin.Summarize INPUT='' => RESULT__SUMMARY - - GroundingPlugin.ExtractEntities example_entities='John;Jane;mother;brother;Paris;Rome' topic='people and places' INPUT='$RESULT__SUMMARY' => ENTITIES - - GroundingPlugin.ReferenceCheckEntities reference_context='$ORIGINAL_TEXT' INPUT='$ENTITIES' => RESULT__UNGROUND_ENTITIES - - GroundingPlugin.ExciseEntities ungrounded_entities='$RESULT__UNGROUND_ENTITIES' INPUT='$RESULT__SUMMARY' => RESULT__FINAL_SUMMARY -A possible summary is: +{{!-- Step 3: Extract entities related to people and places --}} +{{set "exampleEntities" (array "John" "Jane" "mother" "brother" "Paris" "Rome")}} +{{set "extractedEntities" (GroundingPlugin-ExtractEntities input=(get "summary") topic="people and places" example_entities=(get "exampleEntities"))}} +{{!-- Step 4: Check if the extracted entities are grounded in the original text --}} +{{set "ungroundedEntities" (GroundingPlugin-ReferenceCheckEntities input=(get "extractedEntities") reference_context=(get "inputText"))}} +{{!-- Step 5: Remove ungrounded entities from the summary --}} +{{set "finalSummary" (GroundingPlugin-ExciseEntities input=(get "summary") ungrounded_entities=(get "ungroundedEntities"))}} -The narrator's father, a respected Genevese politician, befriended Beaufort, a merchant who fell into poverty and hid in Lucerne. After a long search, he found him dying and his daughter Caroline working hard to survive. He took pity on Caroline, buried Beaufort, and married her two years later. - -- narrator - -A possible summary is: - +{{!-- Step 6: Output the final summary --}} +{{json (get "finalSummary")}} - -The father of the story's main character, a respected Genevese politician, befriended Beaufort, a merchant who fell into poverty and hid in Lucerne. After a long search, he found him dying and his daughter Caroline working hard to survive. He took pity on Caroline, buried Beaufort, and married her two years later. +A possible summary is: +Born in Geneva to a distinguished family, the narrator's father married late in life. His close friend Beaufort, once wealthy, fell into poverty and moved to another city with his daughter, Caroline. The narrator's father searched for Beaufort, finding him after ten months, only to witness his decline and eventual death. Caroline, now an orphan, was taken in by the narrator's father, who married her two years later. == DONE == */ diff --git a/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs b/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs index 14c7ee12446f..f61b787c8dce 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example49_LogitBias.cs @@ -1,22 +1,24 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; /** * Logit_bias is an optional parameter that modifies the likelihood of specified tokens appearing in a Completion. * When using the Token Selection Biases parameter, the bias is added to the logits generated by the model prior to sampling. */ -// ReSharper disable once InconsistentNaming -public static class Example49_LogitBias +public class Example49_LogitBias : BaseTest { - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - OpenAIChatCompletion chatCompletion = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); // To use Logit Bias you will need to know the token ids of the words you want to use. // Getting the token ids using the GPT Tokenizer: https://platform.openai.com/tokenizer @@ -25,35 +27,31 @@ public static async Task RunAsync() // "novel literature reading author library story chapter paperback hardcover ebook publishing fiction nonfiction manuscript textbook bestseller bookstore reading list bookworm" var keys = new[] { 3919, 626, 17201, 1300, 25782, 9800, 32016, 13571, 43582, 20189, 1891, 10424, 9631, 16497, 12984, 20020, 24046, 13159, 805, 15817, 5239, 2070, 13466, 32932, 8095, 1351, 25323 }; - var settings = new OpenAIRequestSettings(); - - // This will make the model try its best to avoid any of the above related words. - foreach (var key in keys) + var settings = new OpenAIPromptExecutionSettings { - //This parameter maps tokens to an associated bias value from -100 (a potential ban) to 100 (exclusive selection of the token). - + // This will make the model try its best to avoid any of the above related words. //-100 to potentially ban all the tokens from the list. - settings.TokenSelectionBiases.Add(key, -100); - } + TokenSelectionBiases = keys.ToDictionary(key => key, key => -100) + }; - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); + WriteLine("Chat content:"); + WriteLine("------------------------"); - var chatHistory = chatCompletion.CreateNewChat("You are a librarian expert"); + var chatHistory = new ChatHistory("You are a librarian expert"); // First user message chatHistory.AddUserMessage("Hi, I'm looking some suggestions"); await MessageOutputAsync(chatHistory); - string reply = await chatCompletion.GenerateMessageAsync(chatHistory, settings); - chatHistory.AddAssistantMessage(reply); + var replyMessage = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings); + chatHistory.AddAssistantMessage(replyMessage.Content!); await MessageOutputAsync(chatHistory); chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); await MessageOutputAsync(chatHistory); - reply = await chatCompletion.GenerateMessageAsync(chatHistory, settings); - chatHistory.AddAssistantMessage(reply); + replyMessage = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings); + chatHistory.AddAssistantMessage(replyMessage.Content!); await MessageOutputAsync(chatHistory); /* Output: @@ -73,13 +71,17 @@ public static async Task RunAsync() /// /// Outputs the last message of the chat history /// - private static Task MessageOutputAsync(ChatHistory chatHistory) + private Task MessageOutputAsync(ChatHistory chatHistory) { - var message = chatHistory.Messages.Last(); + var message = chatHistory.Last(); - Console.WriteLine($"{message.Role}: {message.Content}"); - Console.WriteLine("------------------------"); + WriteLine($"{message.Role}: {message.Content}"); + WriteLine("------------------------"); return Task.CompletedTask; } + + public Example49_LogitBias(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example51_StepwisePlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example51_StepwisePlanner.cs deleted file mode 100644 index c4e3b199e8bf..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example51_StepwisePlanner.cs +++ /dev/null @@ -1,240 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Planners; -using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.Plugins.Web; -using Microsoft.SemanticKernel.Plugins.Web.Bing; -using NCalcPlugins; -using RepoUtils; - -/** - * This example shows how to use Stepwise Planner to create and run a stepwise plan for a given goal. - */ -// ReSharper disable once InconsistentNaming -public static class Example51_StepwisePlanner -{ - // Used to override the max allowed tokens when running the plan - internal static int? ChatMaxTokens = null; - internal static int? TextMaxTokens = null; - - // Used to quickly modify the chat model used by the planner - internal static string? ChatModelOverride = null; //"gpt-35-turbo"; - internal static string? TextModelOverride = null; //"text-davinci-003"; - - internal static string? Suffix = null; - - public static async Task RunAsync() - { - string[] questions = new string[] - { - "What color is the sky?", - "What is the weather in Seattle?", - "What is the tallest mountain on Earth? How tall is it divided by 2?", - "What is the capital of France? Who is that city's current mayor? What percentage of their life has been in the 21st century as of today?", - "What is the current day of the calendar year? Using that as an angle in degrees, what is the area of a unit circle with that angle?", - "If a spacecraft travels at 0.99 the speed of light and embarks on a journey to the nearest star system, Alpha Centauri, which is approximately 4.37 light-years away, how much time would pass on Earth during the spacecraft's voyage?" - }; - - foreach (var question in questions) - { - for (int i = 0; i < 1; i++) - { - await RunTextCompletionAsync(question); - await RunChatCompletionAsync(question); - } - } - - PrintResults(); - } - - // print out summary table of ExecutionResults - private static void PrintResults() - { - Console.WriteLine("**************************"); - Console.WriteLine("Execution Results Summary:"); - Console.WriteLine("**************************"); - - foreach (var question in s_executionResults.Select(s => s.question).Distinct()) - { - Console.WriteLine("Question: " + question); - Console.WriteLine("Mode\tModel\tAnswer\tStepsTaken\tIterations\tTimeTaken"); - foreach (var er in s_executionResults.OrderByDescending(s => s.model).Where(s => s.question == question)) - { - Console.WriteLine($"{er.mode}\t{er.model}\t{er.stepsTaken}\t{er.iterations}\t{er.timeTaken}\t{er.answer}"); - } - } - } - - private struct ExecutionResult - { - public string mode; - public string? model; - public string? question; - public string? answer; - public string? stepsTaken; - public string? iterations; - public string? timeTaken; - } - - private static readonly List s_executionResults = new(); - - private static async Task RunTextCompletionAsync(string question) - { - Console.WriteLine("RunTextCompletion"); - ExecutionResult currentExecutionResult = default; - currentExecutionResult.mode = "RunTextCompletion"; - var kernel = GetKernel(ref currentExecutionResult); - await RunWithQuestionAsync(kernel, currentExecutionResult, question, TextMaxTokens); - } - - private static async Task RunChatCompletionAsync(string question, string? model = null) - { - Console.WriteLine("RunChatCompletion"); - ExecutionResult currentExecutionResult = default; - currentExecutionResult.mode = "RunChatCompletion"; - var kernel = GetKernel(ref currentExecutionResult, true, model); - await RunWithQuestionAsync(kernel, currentExecutionResult, question, ChatMaxTokens); - } - - private static async Task RunWithQuestionAsync(IKernel kernel, ExecutionResult currentExecutionResult, string question, int? MaxTokens = null) - { - currentExecutionResult.question = question; - var bingConnector = new BingConnector(TestConfiguration.Bing.ApiKey); - var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); - - kernel.ImportFunctions(webSearchEnginePlugin, "WebSearch"); - kernel.ImportFunctions(new LanguageCalculatorPlugin(kernel), "semanticCalculator"); - kernel.ImportFunctions(new TimePlugin(), "time"); - - // StepwisePlanner is instructed to depend on available functions. - // We expose this function to increase the flexibility in it's ability to answer - // given the relatively small number of functions we have in this example. - // This seems to be particularly helpful in these examples with gpt-35-turbo -- even though it - // does not *use* this function. It seems to help the planner find a better path to the answer. - kernel.CreateSemanticFunction( - "Generate an answer for the following question: {{$input}}", - functionName: "GetAnswerForQuestion", - pluginName: "AnswerBot", - description: "Given a question, get an answer and return it as the result of the function"); - - Console.WriteLine("*****************************************************"); - Stopwatch sw = new(); - Console.WriteLine("Question: " + question); - - var plannerConfig = new Microsoft.SemanticKernel.Planners.StepwisePlannerConfig(); - plannerConfig.ExcludedFunctions.Add("TranslateMathProblem"); - plannerConfig.ExcludedFunctions.Add("DaysAgo"); - plannerConfig.ExcludedFunctions.Add("DateMatchingLastDayName"); - plannerConfig.MinIterationTimeMs = 1500; - plannerConfig.MaxIterations = 25; - - if (!string.IsNullOrEmpty(Suffix)) - { - plannerConfig.Suffix = $"{Suffix}\n{plannerConfig.Suffix}"; - currentExecutionResult.question = $"[Assisted] - {question}"; - } - - if (MaxTokens.HasValue) - { - plannerConfig.MaxTokens = MaxTokens.Value; - } - - sw.Start(); - - try - { - StepwisePlanner planner = new(kernel: kernel, config: plannerConfig); - var plan = planner.CreatePlan(question); - - var kernelResult = await kernel.RunAsync(plan); - var planResult = kernelResult.FunctionResults.First(); - var result = kernelResult.GetValue()!; - - if (result.Contains("Result not found, review _stepsTaken to see what", StringComparison.OrdinalIgnoreCase)) - { - Console.WriteLine("Could not answer question in " + plannerConfig.MaxIterations + " iterations"); - currentExecutionResult.answer = "Could not answer question in " + plannerConfig.MaxIterations + " iterations"; - } - else - { - Console.WriteLine("Result: " + result); - currentExecutionResult.answer = result; - } - - if (planResult.TryGetMetadataValue("stepCount", out string stepCount)) - { - Console.WriteLine("Steps Taken: " + stepCount); - currentExecutionResult.stepsTaken = stepCount; - } - - if (planResult.TryGetMetadataValue("functionCount", out string functionCount)) - { - Console.WriteLine("Functions Used: " + functionCount); - } - - if (planResult.TryGetMetadataValue("iterations", out string iterations)) - { - Console.WriteLine("Iterations: " + iterations); - currentExecutionResult.iterations = iterations; - } - } -#pragma warning disable CA1031 - catch (Exception ex) - { - Console.WriteLine("Exception: " + ex); - } - - Console.WriteLine("Time Taken: " + sw.Elapsed); - currentExecutionResult.timeTaken = sw.Elapsed.ToString(); - s_executionResults.Add(currentExecutionResult); - Console.WriteLine("*****************************************************"); - } - - private static IKernel GetKernel(ref ExecutionResult result, bool useChat = false, string? model = null) - { - var builder = new KernelBuilder(); - var maxTokens = 0; - if (useChat) - { - builder.WithAzureChatCompletionService( - model ?? ChatModelOverride ?? TestConfiguration.AzureOpenAI.ChatDeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey, - alsoAsTextCompletion: true, - setAsDefault: true); - - maxTokens = ChatMaxTokens ?? (new Microsoft.SemanticKernel.Planners.StepwisePlannerConfig()).MaxTokens; - result.model = model ?? ChatModelOverride ?? TestConfiguration.AzureOpenAI.ChatDeploymentName; - } - else - { - builder.WithAzureTextCompletionService( - model ?? TextModelOverride ?? TestConfiguration.AzureOpenAI.DeploymentName, - TestConfiguration.AzureOpenAI.Endpoint, - TestConfiguration.AzureOpenAI.ApiKey); - - maxTokens = TextMaxTokens ?? (new Microsoft.SemanticKernel.Planners.StepwisePlannerConfig()).MaxTokens; - result.model = model ?? TextModelOverride ?? TestConfiguration.AzureOpenAI.DeploymentName; - } - - Console.WriteLine($"Model: {result.model} ({maxTokens})"); - - var kernel = builder - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithRetryBasic(new() - { - MaxRetryCount = 3, - UseExponentialBackoff = true, - MinRetryDelay = TimeSpan.FromSeconds(3), - }) - .Build(); - - return kernel; - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example52_ApimAuth.cs b/dotnet/samples/KernelSyntaxExamples/Example52_ApimAuth.cs deleted file mode 100644 index 2bc4ecfe8f69..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example52_ApimAuth.cs +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Azure.Identity; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example52_ApimAuth -{ - public static async Task RunAsync() - { - // Azure API Management details - // For more information see 'Protect your Azure OpenAI API keys with Azure API Management' here: https://learn.microsoft.com/en-us/semantic-kernel/deploy/ - var apimUri = new Uri(Env.Var("Apim__Endpoint")); - var subscriptionKey = Env.Var("Apim__SubscriptionKey"); - - // Use interactive browser login - string[] scopes = new string[] { "https://cognitiveservices.azure.com/.default" }; - var credential = new InteractiveBrowserCredential(); - var requestContext = new TokenRequestContext(scopes); - var accessToken = await credential.GetTokenAsync(requestContext); - - // Create HttpClient and include subscription key as a default header - var httpClient = new HttpClient(); - httpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", subscriptionKey); - - // Configure OpenAIClient to use - // - Custom HttpClient with subscription key header - // - Diagnostics to log error response headers from APIM to aid problem determination - // - Authentication using BearerTokenCredential retrieved via interactive browser login - var clientOptions = new OpenAIClientOptions - { - Transport = new HttpClientTransport(httpClient), - Diagnostics = - { - LoggedHeaderNames = { "ErrorSource", "ErrorReason", "ErrorMessage", "ErrorScope", "ErrorSection", "ErrorStatusCode" }, - ApplicationId = Telemetry.HttpUserAgent, - IsTelemetryEnabled = Telemetry.IsTelemetryEnabled, - } - }; - var openAIClient = new OpenAIClient(apimUri, new BearerTokenCredential(accessToken), clientOptions); - - // Create logger factory with default level as warning - using ILoggerFactory loggerFactory = LoggerFactory.Create(builder => - { - builder - .SetMinimumLevel(LogLevel.Warning) - .AddConsole(); - }); - - var kernel = new KernelBuilder() - .WithLoggerFactory(loggerFactory) - .WithAIService(TestConfiguration.AzureOpenAI.ChatDeploymentName, (loggerFactory) => - new AzureChatCompletion(TestConfiguration.AzureOpenAI.ChatDeploymentName, openAIClient, loggerFactory)) - .Build(); - - // Load semantic plugin defined with prompt templates - string folder = RepoFiles.SamplePluginsPath(); - - var funFunctions = kernel.ImportSemanticFunctionsFromDirectory( - folder, - "FunPlugin"); - - // Run - var result = await kernel.RunAsync( - "I have no homework", - funFunctions["Excuses"] - ); - Console.WriteLine(result.GetValue()); - - httpClient.Dispose(); - } -} - -public class BearerTokenCredential : TokenCredential -{ - private readonly AccessToken _accessToken; - - // Constructor that takes a Bearer token string and its expiration date - public BearerTokenCredential(AccessToken accessToken) - { - this._accessToken = accessToken; - } - - public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken) - { - return this._accessToken; - } - - public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken) - { - return new ValueTask(this._accessToken); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example52_CustomOpenAIClient.cs b/dotnet/samples/KernelSyntaxExamples/Example52_CustomOpenAIClient.cs new file mode 100644 index 000000000000..06c7ca172dd6 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example52_CustomOpenAIClient.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Threading.Tasks; +using Azure; +using Azure.AI.OpenAI; +using Azure.Core.Pipeline; +using Microsoft.SemanticKernel; +using RepoUtils; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public sealed class Example52_CustomOpenAIClient : BaseTest +{ + [Fact] + public async Task RunAsync() + { + this.WriteLine("======== Using a custom OpenAI client ========"); + + string endpoint = TestConfiguration.AzureOpenAI.Endpoint; + string deploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; + string apiKey = TestConfiguration.AzureOpenAI.ApiKey; + + if (endpoint is null || deploymentName is null || apiKey is null) + { + this.WriteLine("Azure OpenAI credentials not found. Skipping example."); + return; + } + + // Create an HttpClient and include your custom header(s) + var httpClient = new HttpClient(); + httpClient.DefaultRequestHeaders.Add("x-my-custom-header", "My custom value"); + + // Configure OpenAIClient to use the customized HttpClient + var clientOptions = new OpenAIClientOptions + { + Transport = new HttpClientTransport(httpClient), + }; + var openAIClient = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), clientOptions); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.AddAzureOpenAIChatCompletion(deploymentName, openAIClient); + Kernel kernel = builder.Build(); + + // Load semantic plugin defined with prompt templates + string folder = RepoFiles.SamplePluginsPath(); + + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "FunPlugin")); + + // Run + var result = await kernel.InvokeAsync( + kernel.Plugins["FunPlugin"]["Excuses"], + new() { ["input"] = "I have no homework" } + ); + this.WriteLine(result.GetValue()); + + httpClient.Dispose(); + } + + public Example52_CustomOpenAIClient(ITestOutputHelper output) : base(output) { } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example54_AzureChatCompletionWithData.cs b/dotnet/samples/KernelSyntaxExamples/Example54_AzureChatCompletionWithData.cs index a28ef71f9874..db63e3f08a20 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example54_AzureChatCompletionWithData.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example54_AzureChatCompletionWithData.cs @@ -3,53 +3,57 @@ using System; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -/** - * This example shows how to use Azure OpenAI Chat Completion with data. - * More information: - */ -// ReSharper disable once InconsistentNaming -public static class Example54_AzureChatCompletionWithData +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using xRetry; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// This example demonstrates how to use Azure OpenAI Chat Completion with data. +/// +/// +/// Set-up instructions: +/// 1. Upload the following content in Azure Blob Storage in a .txt file. +/// You can follow the steps here: +/// +/// Emily and David, two passionate scientists, met during a research expedition to Antarctica. +/// Bonded by their love for the natural world and shared curiosity, +/// they uncovered a groundbreaking phenomenon in glaciology that could +/// potentially reshape our understanding of climate change. +/// +/// 2. Set your secrets: +/// dotnet user-secrets set "AzureAISearch:Endpoint" "https://... .search.windows.net" +/// dotnet user-secrets set "AzureAISearch:ApiKey" "{Key from your Search service resource}" +/// dotnet user-secrets set "AzureAISearch:IndexName" "..." +/// +public class Example54_AzureChatCompletionWithData : BaseTest { - public static async Task RunAsync() + [RetryFact(typeof(HttpOperationException))] + public async Task ExampleWithChatCompletionAsync() { - // Uploaded content in Azure Blob Storage in .txt file: + WriteLine("=== Example with Chat Completion ==="); - // Emily and David, two passionate scientists, met during a research expedition to Antarctica. - // Bonded by their love for the natural world and shared curiosity, - // they uncovered a groundbreaking phenomenon in glaciology that could - // potentially reshape our understanding of climate change. - - await ExampleWithChatCompletionAsync(); - await ExampleWithKernelAsync(); - } - - private static async Task ExampleWithChatCompletionAsync() - { - Console.WriteLine("=== Example with Chat Completion ==="); - - var chatCompletion = new AzureChatCompletionWithData(GetCompletionWithDataConfig()); - var chatHistory = chatCompletion.CreateNewChat(); + var chatCompletion = new AzureOpenAIChatCompletionWithDataService(GetCompletionWithDataConfig()); + var chatHistory = new ChatHistory(); // First question without previous context based on uploaded content. var ask = "How did Emily and David meet?"; chatHistory.AddUserMessage(ask); // Chat Completion example - var chatResult = (await chatCompletion.GetChatCompletionsAsync(chatHistory))[0]; - var chatMessage = await chatResult.GetChatMessageAsync(); + var chatMessage = (AzureOpenAIWithDataChatMessageContent)await chatCompletion.GetChatMessageContentAsync(chatHistory); - var response = chatMessage.Content; - var toolResponse = chatResult.ModelResult.GetResult().ToolContent; + var response = chatMessage.Content!; + var toolResponse = chatMessage.ToolContent; // Output // Ask: How did Emily and David meet? // Response: Emily and David, both passionate scientists, met during a research expedition to Antarctica. - Console.WriteLine($"Ask: {ask}"); - Console.WriteLine($"Response: {response}"); - Console.WriteLine(); + WriteLine($"Ask: {ask}"); + WriteLine($"Response: {response}"); + WriteLine(); // Chat history maintenance if (!string.IsNullOrEmpty(toolResponse)) @@ -64,74 +68,72 @@ private static async Task ExampleWithChatCompletionAsync() chatHistory.AddUserMessage(ask); // Chat Completion Streaming example - Console.WriteLine($"Ask: {ask}"); - Console.WriteLine("Response: "); + WriteLine($"Ask: {ask}"); + WriteLine("Response: "); - await foreach (var result in chatCompletion.GetStreamingChatCompletionsAsync(chatHistory)) + await foreach (var word in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory)) { - await foreach (var message in result.GetStreamingChatMessageAsync()) - { - // Output - // Ask: What are Emily and David studying? - // Response: They are passionate scientists who study glaciology, - // a branch of geology that deals with the study of ice and its effects. - Console.Write(message.Content); - } + Write(word); } - Console.WriteLine(Environment.NewLine); + WriteLine(Environment.NewLine); } - private static async Task ExampleWithKernelAsync() + [RetryFact(typeof(HttpOperationException))] + public async Task ExampleWithKernelAsync() { - Console.WriteLine("=== Example with Kernel ==="); + WriteLine("=== Example with Kernel ==="); var ask = "How did Emily and David meet?"; var completionWithDataConfig = GetCompletionWithDataConfig(); - IKernel kernel = new KernelBuilder() - .WithAzureChatCompletionService(config: completionWithDataConfig) + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(config: completionWithDataConfig) .Build(); - var semanticFunction = kernel.CreateSemanticFunction("Question: {{$input}}"); + var function = kernel.CreateFunctionFromPrompt("Question: {{$input}}"); // First question without previous context based on uploaded content. - var response = await kernel.RunAsync(ask, semanticFunction); + var response = await kernel.InvokeAsync(function, new() { ["input"] = ask }); // Output // Ask: How did Emily and David meet? // Response: Emily and David, both passionate scientists, met during a research expedition to Antarctica. - Console.WriteLine($"Ask: {ask}"); - Console.WriteLine($"Response: {response.GetValue()}"); - Console.WriteLine(); + WriteLine($"Ask: {ask}"); + WriteLine($"Response: {response.GetValue()}"); + WriteLine(); // Second question based on uploaded content. ask = "What are Emily and David studying?"; - response = await kernel.RunAsync(ask, semanticFunction); + response = await kernel.InvokeAsync(function, new() { ["input"] = ask }); // Output // Ask: What are Emily and David studying? // Response: They are passionate scientists who study glaciology, // a branch of geology that deals with the study of ice and its effects. - Console.WriteLine($"Ask: {ask}"); - Console.WriteLine($"Response: {response.GetValue()}"); - Console.WriteLine(); + WriteLine($"Ask: {ask}"); + WriteLine($"Response: {response.GetValue()}"); + WriteLine(); } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - private static AzureChatCompletionWithDataConfig GetCompletionWithDataConfig() + private static AzureOpenAIChatCompletionWithDataConfig GetCompletionWithDataConfig() { - return new AzureChatCompletionWithDataConfig + return new AzureOpenAIChatCompletionWithDataConfig { CompletionModelId = TestConfiguration.AzureOpenAI.ChatDeploymentName, CompletionEndpoint = TestConfiguration.AzureOpenAI.Endpoint, CompletionApiKey = TestConfiguration.AzureOpenAI.ApiKey, - DataSourceEndpoint = TestConfiguration.ACS.Endpoint, - DataSourceApiKey = TestConfiguration.ACS.ApiKey, - DataSourceIndex = TestConfiguration.ACS.IndexName + DataSourceEndpoint = TestConfiguration.AzureAISearch.Endpoint, + DataSourceApiKey = TestConfiguration.AzureAISearch.ApiKey, + DataSourceIndex = TestConfiguration.AzureAISearch.IndexName }; } + + public Example54_AzureChatCompletionWithData(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example55_TextChunker.cs b/dotnet/samples/KernelSyntaxExamples/Example55_TextChunker.cs index a9a83359db7b..15541df97b3c 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example55_TextChunker.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example55_TextChunker.cs @@ -4,50 +4,23 @@ using System.Collections.Generic; using System.Diagnostics; using System.IO; -using System.Threading.Tasks; using Microsoft.DeepDev; using Microsoft.ML.Tokenizers; using Microsoft.SemanticKernel.Text; using Resources; using SharpToken; +using Xunit; +using Xunit.Abstractions; using static Microsoft.SemanticKernel.Text.TextChunker; -// ReSharper disable once InconsistentNaming -public static class Example55_TextChunker -{ - private const string Text = @"The city of Venice, located in the northeastern part of Italy, -is renowned for its unique geographical features. Built on more than 100 small islands in a lagoon in the -Adriatic Sea, it has no roads, just canals including the Grand Canal thoroughfare lined with Renaissance and -Gothic palaces. The central square, Piazza San Marco, contains St. Mark's Basilica, which is tiled with Byzantine -mosaics, and the Campanile bell tower offering views of the city's red roofs. - -The Amazon Rainforest, also known as Amazonia, is a moist broadleaf tropical rainforest in the Amazon biome that -covers most of the Amazon basin of South America. This basin encompasses 7 million square kilometers, of which -5.5 million square kilometers are covered by the rainforest. This region includes territory belonging to nine nations -and 3.4 million square kilometers of uncontacted tribes. The Amazon represents over half of the planet's remaining -rainforests and comprises the largest and most biodiverse tract of tropical rainforest in the world. +namespace Examples; -The Great Barrier Reef is the world's largest coral reef system composed of over 2,900 individual reefs and 900 islands -stretching for over 2,300 kilometers over an area of approximately 344,400 square kilometers. The reef is located in the -Coral Sea, off the coast of Queensland, Australia. The Great Barrier Reef can be seen from outer space and is the world's -biggest single structure made by living organisms. This reef structure is composed of and built by billions of tiny organisms, -known as coral polyps."; - - public static Task RunAsync() - { - RunExample(); - RunExampleForTokenCounterType(TokenCounterType.SharpToken); - RunExampleForTokenCounterType(TokenCounterType.MicrosoftML); - RunExampleForTokenCounterType(TokenCounterType.MicrosoftMLRoberta); - RunExampleForTokenCounterType(TokenCounterType.DeepDev); - RunExampleWithHeader(); - - return Task.CompletedTask; - } - - private static void RunExample() +public class Example55_TextChunker : BaseTest +{ + [Fact] + public void RunExample() { - Console.WriteLine("=== Text chunking ==="); + WriteLine("=== Text chunking ==="); var lines = TextChunker.SplitPlainTextLines(Text, 40); var paragraphs = TextChunker.SplitPlainTextParagraphs(lines, 120); @@ -55,9 +28,14 @@ private static void RunExample() WriteParagraphsToConsole(paragraphs); } - private static void RunExampleForTokenCounterType(TokenCounterType counterType) + [Theory] + [InlineData(TokenCounterType.SharpToken)] + [InlineData(TokenCounterType.MicrosoftML)] + [InlineData(TokenCounterType.MicrosoftMLRoberta)] + [InlineData(TokenCounterType.DeepDev)] + public void RunExampleForTokenCounterType(TokenCounterType counterType) { - Console.WriteLine($"=== Text chunking with a custom({counterType}) token counter ==="); + WriteLine($"=== Text chunking with a custom({counterType}) token counter ==="); var sw = new Stopwatch(); sw.Start(); var tokenCounter = s_tokenCounterFactory(counterType); @@ -66,13 +44,14 @@ private static void RunExampleForTokenCounterType(TokenCounterType counterType) var paragraphs = TextChunker.SplitPlainTextParagraphs(lines, 120, tokenCounter: tokenCounter); sw.Stop(); - Console.WriteLine($"Elapsed time: {sw.ElapsedMilliseconds} ms"); + WriteLine($"Elapsed time: {sw.ElapsedMilliseconds} ms"); WriteParagraphsToConsole(paragraphs); } - private static void RunExampleWithHeader() + [Fact] + public void RunExampleWithHeader() { - Console.WriteLine("=== Text chunking with chunk header ==="); + WriteLine("=== Text chunking with chunk header ==="); var lines = TextChunker.SplitPlainTextLines(Text, 40); var paragraphs = TextChunker.SplitPlainTextParagraphs(lines, 150, chunkHeader: "DOCUMENT NAME: test.txt\n\n"); @@ -80,20 +59,20 @@ private static void RunExampleWithHeader() WriteParagraphsToConsole(paragraphs); } - private static void WriteParagraphsToConsole(List paragraphs) + private void WriteParagraphsToConsole(List paragraphs) { for (var i = 0; i < paragraphs.Count; i++) { - Console.WriteLine(paragraphs[i]); + WriteLine(paragraphs[i]); if (i < paragraphs.Count - 1) { - Console.WriteLine("------------------------"); + WriteLine("------------------------"); } } } - private enum TokenCounterType + public enum TokenCounterType { SharpToken, MicrosoftML, @@ -157,10 +136,8 @@ private enum TokenCounterType /// private static TokenCounter DeepDevTokenCounter => (string input) => { -#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits // Initialize encoding by encoding name var tokenizer = TokenizerBuilder.CreateByEncoderNameAsync("cl100k_base").GetAwaiter().GetResult(); -#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits // Initialize encoding by model name // var tokenizer = TokenizerBuilder.CreateByModelNameAsync("gpt-4").GetAwaiter().GetResult(); @@ -170,19 +147,34 @@ private enum TokenCounterType }; private static readonly Func s_tokenCounterFactory = (TokenCounterType counterType) => - { - switch (counterType) + counterType switch { - case TokenCounterType.SharpToken: - return (string input) => SharpTokenTokenCounter(input); - case TokenCounterType.MicrosoftML: - return (string input) => MicrosoftMLTokenCounter(input); - case TokenCounterType.DeepDev: - return (string input) => DeepDevTokenCounter(input); - case TokenCounterType.MicrosoftMLRoberta: - return (string input) => MicrosoftMLRobertaTokenCounter(input); - default: - throw new ArgumentOutOfRangeException(nameof(counterType), counterType, null); - } - }; + TokenCounterType.SharpToken => (string input) => SharpTokenTokenCounter(input), + TokenCounterType.MicrosoftML => (string input) => MicrosoftMLTokenCounter(input), + TokenCounterType.DeepDev => (string input) => DeepDevTokenCounter(input), + TokenCounterType.MicrosoftMLRoberta => (string input) => MicrosoftMLRobertaTokenCounter(input), + _ => throw new ArgumentOutOfRangeException(nameof(counterType), counterType, null), + }; + + private const string Text = @"The city of Venice, located in the northeastern part of Italy, +is renowned for its unique geographical features. Built on more than 100 small islands in a lagoon in the +Adriatic Sea, it has no roads, just canals including the Grand Canal thoroughfare lined with Renaissance and +Gothic palaces. The central square, Piazza San Marco, contains St. Mark's Basilica, which is tiled with Byzantine +mosaics, and the Campanile bell tower offering views of the city's red roofs. + +The Amazon Rainforest, also known as Amazonia, is a moist broadleaf tropical rainforest in the Amazon biome that +covers most of the Amazon basin of South America. This basin encompasses 7 million square kilometers, of which +5.5 million square kilometers are covered by the rainforest. This region includes territory belonging to nine nations +and 3.4 million square kilometers of uncontacted tribes. The Amazon represents over half of the planet's remaining +rainforests and comprises the largest and most biodiverse tract of tropical rainforest in the world. + +The Great Barrier Reef is the world's largest coral reef system composed of over 2,900 individual reefs and 900 islands +stretching for over 2,300 kilometers over an area of approximately 344,400 square kilometers. The reef is located in the +Coral Sea, off the coast of Queensland, Australia. The Great Barrier Reef can be seen from outer space and is the world's +biggest single structure made by living organisms. This reef structure is composed of and built by billions of tiny organisms, +known as coral polyps."; + + public Example55_TextChunker(ITestOutputHelper output) : base(output) + { + } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example56_TemplateMethodFunctionsWithMultipleArguments.cs b/dotnet/samples/KernelSyntaxExamples/Example56_TemplateMethodFunctionsWithMultipleArguments.cs new file mode 100644 index 000000000000..9e7eeaa4b125 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example56_TemplateMethodFunctionsWithMultipleArguments.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Plugins.Core; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example56_TemplateMethodFunctionsWithMultipleArguments : BaseTest +{ + /// + /// Show how to invoke a Method Function written in C# with multiple arguments + /// from a Prompt Function written in natural language + /// + [Fact] + public async Task RunAsync() + { + WriteLine("======== TemplateMethodFunctionsWithMultipleArguments ========"); + + string serviceId = TestConfiguration.AzureOpenAI.ServiceId; + string apiKey = TestConfiguration.AzureOpenAI.ApiKey; + string deploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; + string modelId = TestConfiguration.AzureOpenAI.ChatModelId; + string endpoint = TestConfiguration.AzureOpenAI.Endpoint; + + if (apiKey == null || deploymentName == null || modelId == null || endpoint == null) + { + WriteLine("AzureOpenAI modelId, endpoint, apiKey, or deploymentName not found. Skipping example."); + return; + } + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddLogging(c => c.AddConsole()); + builder.AddAzureOpenAIChatCompletion( + deploymentName: deploymentName, + endpoint: endpoint, + serviceId: serviceId, + apiKey: apiKey, + modelId: modelId); + Kernel kernel = builder.Build(); + + var arguments = new KernelArguments(); + arguments["word2"] = " Potter"; + + // Load native plugin into the kernel function collection, sharing its functions with prompt templates + // Functions loaded here are available as "text.*" + kernel.ImportPluginFromType("text"); + + // Prompt Function invoking text.Concat method function with named arguments input and input2 where input is a string and input2 is set to a variable from context called word2. + const string FunctionDefinition = @" + Write a haiku about the following: {{text.Concat input='Harry' input2=$word2}} +"; + + // This allows to see the prompt before it's sent to OpenAI + WriteLine("--- Rendered Prompt"); + var promptTemplateFactory = new KernelPromptTemplateFactory(); + var promptTemplate = promptTemplateFactory.Create(new PromptTemplateConfig(FunctionDefinition)); + var renderedPrompt = await promptTemplate.RenderAsync(kernel, arguments); + WriteLine(renderedPrompt); + + // Run the prompt / prompt function + var haiku = kernel.CreateFunctionFromPrompt(FunctionDefinition, new OpenAIPromptExecutionSettings() { MaxTokens = 100 }); + + // Show the result + WriteLine("--- Prompt Function result"); + var result = await kernel.InvokeAsync(haiku, arguments); + WriteLine(result.GetValue()); + + /* OUTPUT: + +--- Rendered Prompt + + Write a haiku about the following: Harry Potter + +--- Prompt Function result +A boy with a scar, +Wizarding world he explores, +Harry Potter's tale. + */ + } + + public Example56_TemplateMethodFunctionsWithMultipleArguments(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example56_TemplateNativeFunctionsWithMultipleArguments.cs b/dotnet/samples/KernelSyntaxExamples/Example56_TemplateNativeFunctionsWithMultipleArguments.cs deleted file mode 100644 index 2b3810405fae..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example56_TemplateNativeFunctionsWithMultipleArguments.cs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Plugins.Core; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example56_TemplateNativeFunctionsWithMultipleArguments -{ - /// - /// Show how to invoke a Native Function written in C# with multiple arguments - /// from a Semantic Function written in natural language - /// - public static async Task RunAsync() - { - Console.WriteLine("======== TemplateNativeFunctionsWithMultipleArguments ========"); - - string serviceId = TestConfiguration.AzureOpenAI.ServiceId; - string apiKey = TestConfiguration.AzureOpenAI.ApiKey; - string deploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; - string endpoint = TestConfiguration.AzureOpenAI.Endpoint; - - if (serviceId == null || apiKey == null || deploymentName == null || endpoint == null) - { - Console.WriteLine("Azure serviceId, endpoint, apiKey, or deploymentName not found. Skipping example."); - return; - } - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - deploymentName: deploymentName, - endpoint: endpoint, - serviceId: serviceId, - apiKey: apiKey) - .Build(); - - var variableName = "word2"; - var variableValue = " Potter"; - var context = kernel.CreateNewContext(); - context.Variables[variableName] = variableValue; - - // Load native plugin into the kernel function collection, sharing its functions with prompt templates - // Functions loaded here are available as "text.*" - kernel.ImportFunctions(new TextPlugin(), "text"); - - // Semantic Function invoking text.Concat native function with named arguments input and input2 where input is a string and input2 is set to a variable from context called word2. - const string FunctionDefinition = @" - Write a haiku about the following: {{text.Concat input='Harry' input2=$word2}} -"; - - // This allows to see the prompt before it's sent to OpenAI - Console.WriteLine("--- Rendered Prompt"); - var promptRenderer = new BasicPromptTemplateEngine(); - var renderedPrompt = await promptRenderer.RenderAsync(FunctionDefinition, context); - Console.WriteLine(renderedPrompt); - - // Run the prompt / semantic function - var haiku = kernel.CreateSemanticFunction(FunctionDefinition, new OpenAIRequestSettings() { MaxTokens = 100 }); - - // Show the result - Console.WriteLine("--- Semantic Function result"); - var result = await kernel.RunAsync(context.Variables, haiku); - Console.WriteLine(result.GetValue()); - - /* OUTPUT: - ---- Rendered Prompt - - Write a haiku about the following: Harry Potter - ---- Semantic Function result -A boy with a scar, -Wizarding world he explores, -Harry Potter's tale. - */ - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example57_FunctionEventHandlers.cs b/dotnet/samples/KernelSyntaxExamples/Example57_FunctionEventHandlers.cs deleted file mode 100644 index 137111677baf..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example57_FunctionEventHandlers.cs +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.RegularExpressions; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Events; -using Microsoft.SemanticKernel.Orchestration; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example57_FunctionEventHandlers -{ - private static string? s_openAIModelId; - private static string? s_openAIApiKey; - - public static async Task RunAsync() - { - Console.WriteLine("\n======== Using Function Execution Handlers ========\n"); - - s_openAIModelId = TestConfiguration.OpenAI.ChatModelId; - s_openAIApiKey = TestConfiguration.OpenAI.ApiKey; - - if (s_openAIModelId == null || s_openAIApiKey == null) - { - Console.WriteLine("OpenAI credentials not found. Skipping example."); - return; - } - - await GetUsageAsync(); - - await ChangingResultAsync(); - - await BeforeInvokeCancellationAsync(); - - await AfterInvokeCancellationAsync(); - - await SkippingFunctionsAsync(); - - await RepeatFunctionsAsync(); - } - - private static async Task GetUsageAsync() - { - Console.WriteLine("\n======== Get Rendered Prompt and Usage Data ========\n"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( - modelId: s_openAIModelId!, - apiKey: s_openAIApiKey!) - .Build(); - - const string FunctionPrompt = "Write a random paragraph about: {{$input}}."; - - var excuseFunction = kernel.CreateSemanticFunction( - FunctionPrompt, - pluginName: "MyPlugin", - functionName: "Excuse", - requestSettings: new OpenAIRequestSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); - - void MyPreHandler(object? sender, FunctionInvokingEventArgs e) - { - Console.WriteLine($"{e.FunctionView.PluginName}.{e.FunctionView.Name} : Pre Execution Handler - Triggered"); - } - - void MyRemovedPreExecutionHandler(object? sender, FunctionInvokingEventArgs e) - { - Console.WriteLine($"{e.FunctionView.PluginName}.{e.FunctionView.Name} : Pre Execution Handler - Should not trigger"); - e.Cancel(); - } - - void MyPostExecutionHandler(object? sender, FunctionInvokedEventArgs e) - { - var modelResults = e.Metadata["ModelResults"] as IReadOnlyCollection; - Console.WriteLine($"{e.FunctionView.PluginName}.{e.FunctionView.Name} : Post Execution Handler - Total Tokens: {modelResults?.First().GetOpenAIChatResult().Usage.TotalTokens}"); - } - - kernel.FunctionInvoking += MyPreHandler; - kernel.FunctionInvoked += MyPostExecutionHandler; - - // Adding and Removing a handler - kernel.FunctionInvoking += MyRemovedPreExecutionHandler; - kernel.FunctionInvoking -= MyRemovedPreExecutionHandler; - - const string Input = "I missed the F1 final race"; - var result = await kernel.RunAsync(Input, excuseFunction); - Console.WriteLine($"Function Result: {result.GetValue()}"); - } - - private static async Task ChangingResultAsync() - { - Console.WriteLine("\n======== Changing/Filtering Function Result ========\n"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( - modelId: s_openAIModelId!, - apiKey: s_openAIApiKey!) - .Build(); - - const string FunctionPrompt = "Write a paragraph about Handlers."; - - var writerFunction = kernel.CreateSemanticFunction( - FunctionPrompt, - pluginName: "MyPlugin", - functionName: "Writer", - requestSettings: new OpenAIRequestSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); - - void MyChangeDataHandler(object? sender, FunctionInvokedEventArgs e) - { - var originalOutput = e.SKContext.Result; - - //Use Regex to redact all vowels and numbers - var newOutput = Regex.Replace(originalOutput, "[aeiouAEIOU0-9]", "*"); - - e.SKContext.Variables.Update(newOutput); - } - - kernel.FunctionInvoked += MyChangeDataHandler; - - var result = await kernel.RunAsync(writerFunction); - - Console.WriteLine($"Function Result: {result.GetValue()}"); - } - - private static async Task BeforeInvokeCancellationAsync() - { - Console.WriteLine("\n======== Cancelling Pipeline Execution - Invoking event ========\n"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( - modelId: s_openAIModelId!, - apiKey: s_openAIApiKey!) - .Build(); - - const string FunctionPrompt = "Write a paragraph about: Cancellation."; - - var writerFunction = kernel.CreateSemanticFunction( - FunctionPrompt, - pluginName: "MyPlugin", - functionName: "Writer", - requestSettings: new OpenAIRequestSettings() { MaxTokens = 1000, Temperature = 1, TopP = 0.5 }); - - // Adding new inline handler to cancel/prevent function execution - kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => - { - Console.WriteLine($"{e.FunctionView.PluginName}.{e.FunctionView.Name} : FunctionInvoking - Cancelling all subsequent invocations"); - e.Cancel(); - }; - - // Technically invoked will never be called since the function will be cancelled - int functionInvokedCount = 0; - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => - { - functionInvokedCount++; - }; - - var result = await kernel.RunAsync(writerFunction); - Console.WriteLine($"Function Invocation Times: {functionInvokedCount}"); - } - - private static async Task AfterInvokeCancellationAsync() - { - Console.WriteLine("\n======== Cancelling Pipeline Execution - Invoked event ========\n"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( - modelId: s_openAIModelId!, - apiKey: s_openAIApiKey!) - .Build(); - - int functionInvokingCount = 0; - int functionInvokedCount = 0; - - var firstFunction = kernel.CreateSemanticFunction("Write a phrase with Invoke.", functionName: "InvokePhrase"); - var secondFunction = kernel.CreateSemanticFunction("Write a phrase with Cancellation.", functionName: "CancellationPhrase"); - - // Adding new inline handler to count invoking events - kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => - { - functionInvokingCount++; - }; - - // Invoked will never be called twice (for the secondFunction) since Invoked from the first is cancelling. - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => - { - functionInvokedCount++; - e.Cancel(); - }; - - var result = await kernel.RunAsync(secondFunction); - Console.WriteLine($"Function Invoked Times: {functionInvokedCount}"); - Console.WriteLine($"Function Invoking Times: {functionInvokingCount}"); - } - - private static async Task SkippingFunctionsAsync() - { - Console.WriteLine("\n======== Skipping a Function in the Pipeline ========\n"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( - modelId: s_openAIModelId!, - apiKey: s_openAIApiKey!) - .Build(); - - var skipMeFunction = kernel.CreateSemanticFunction("Write a paragraph about Skipping", - pluginName: "MyPlugin", - functionName: "SkipMe"); - - var dontSkipMeFunction = kernel.CreateSemanticFunction("Write a paragraph about Handlers", - pluginName: "MyPlugin", - functionName: "DontSkipMe"); - - kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => - { - if (e.FunctionView.Name == "SkipMe") - { - e.Skip(); - Console.WriteLine($"Function {e.FunctionView.Name} will be skipped"); - return; - } - - Console.WriteLine($"Function {e.FunctionView.Name} will not be skipped"); - }; - - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => - { - Console.WriteLine($"Only not skipped functions will trigger invoked event - Function name: {e.FunctionView.Name}"); - }; - - var result = await kernel.RunAsync( - skipMeFunction, - dontSkipMeFunction); - - Console.WriteLine($"Final result: {result.GetValue()}"); - } - - private static async Task RepeatFunctionsAsync() - { - Console.WriteLine("\n======== Repeating a Function in the Pipeline ========"); - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService( - modelId: s_openAIModelId!, - apiKey: s_openAIApiKey!) - .Build(); - - var repeatSubjects = new Queue(new[] { "Life", "Work", "Leisure" }); - - var repeatMeFunction = kernel.CreateSemanticFunction("Write a sentence about {{$input}}", - pluginName: "MyPlugin", - functionName: "RepeatMe"); - - var repeatTimes = 0; - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => - { - Console.WriteLine($"\nFunction {e.FunctionView.Name} executed:"); - Console.WriteLine($"Result: {e.SKContext.Result}"); - - if (repeatTimes < 3) - { - // Flag the Kernel to repeat the function - e.Repeat(); - - // Redefine the input variable to repeat the function - e.SKContext.Variables.Update(repeatSubjects.Dequeue()); - - repeatTimes++; - Console.WriteLine("Repeat requested!"); - - return; - } - }; - - await kernel.RunAsync("Repetition", repeatMeFunction); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example57_KernelHooks.cs b/dotnet/samples/KernelSyntaxExamples/Example57_KernelHooks.cs new file mode 100644 index 000000000000..d0e33e991d83 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example57_KernelHooks.cs @@ -0,0 +1,282 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using RepoUtils; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +#pragma warning disable CS0618 // Events are deprecated + +public class Example57_KernelHooks : BaseTest +{ + /// + /// Demonstrate using kernel invocation-hooks to monitor usage: + /// + /// + /// + [Fact] + public async Task GetUsageAsync() + { + WriteLine("\n======== Get Usage Data ========\n"); + + // Create kernel instance + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: _openAIModelId!, + apiKey: _openAIApiKey!) + .Build(); + + // Initialize prompt + const string FunctionPrompt = "Write a random paragraph about: {{$input}}."; + + var excuseFunction = kernel.CreateFunctionFromPrompt( + FunctionPrompt, + functionName: "Excuse", + executionSettings: new OpenAIPromptExecutionSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); + + // Define hooks + void MyPreHandler(object? sender, FunctionInvokingEventArgs e) + { + WriteLine($"{e.Function.Name} : Pre Execution Handler - Triggered"); + } + + void MyRemovedPreExecutionHandler(object? sender, FunctionInvokingEventArgs e) + { + WriteLine($"{e.Function.Name} : Pre Execution Handler - Should not trigger"); + e.Cancel = true; + } + + void MyPostExecutionHandler(object? sender, FunctionInvokedEventArgs e) + { + WriteLine($"{e.Function.Name} : Post Execution Handler - Usage: {e.Result.Metadata?["Usage"]?.AsJson()}"); + } + + kernel.FunctionInvoking += MyPreHandler; + kernel.FunctionInvoked += MyPostExecutionHandler; + + // Demonstrate pattern for removing a handler. + // Note: MyRemovedPreExecutionHandler will cancel execution if not removed. + kernel.FunctionInvoking += MyRemovedPreExecutionHandler; + kernel.FunctionInvoking -= MyRemovedPreExecutionHandler; + + // Invoke prompt to trigger execution hooks. + const string Input = "I missed the F1 final race"; + var result = await kernel.InvokeAsync(excuseFunction, new() { ["input"] = Input }); + WriteLine($"Function Result: {result}"); + } + + /// + /// Demonstrate using kernel-hooks to around prompt rendering: + /// + /// + /// + [Fact] + public async Task GetRenderedPromptAsync() + { + WriteLine("\n======== Get Rendered Prompt ========\n"); + + // Create kernel instance + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: _openAIModelId!, + apiKey: _openAIApiKey!) + .Build(); + + // Initialize prompt + const string FunctionPrompt = "Write a random paragraph about: {{$input}} in the style of {{$style}}."; + + var excuseFunction = kernel.CreateFunctionFromPrompt( + FunctionPrompt, + functionName: "Excuse", + executionSettings: new OpenAIPromptExecutionSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); + + // Define hooks + void MyRenderingHandler(object? sender, PromptRenderingEventArgs e) + { + WriteLine($"{e.Function.Name} : Prompt Rendering Handler - Triggered"); + e.Arguments["style"] = "Seinfeld"; + } + + void MyRenderedHandler(object? sender, PromptRenderedEventArgs e) + { + WriteLine($"{e.Function.Name} : Prompt Rendered Handler - Triggered"); + e.RenderedPrompt += " USE SHORT, CLEAR, COMPLETE SENTENCES."; + + WriteLine(e.RenderedPrompt); + } + + kernel.PromptRendering += MyRenderingHandler; + kernel.PromptRendered += MyRenderedHandler; + + // Invoke prompt to trigger prompt rendering hooks. + const string Input = "I missed the F1 final race"; + var result = await kernel.InvokeAsync(excuseFunction, new() { ["input"] = Input }); + WriteLine($"Function Result: {result.GetValue()}"); + } + + /// + /// Demonstrate using kernel invocation-hooks to post process result: + /// + /// + [Fact] + public async Task ChangingResultAsync() + { + WriteLine("\n======== Changing/Filtering Function Result ========\n"); + + // Create kernel instance + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: _openAIModelId!, + apiKey: _openAIApiKey!) + .Build(); + + // Initialize function + const string FunctionPrompt = "Write a paragraph about Handlers."; + + var writerFunction = kernel.CreateFunctionFromPrompt( + FunctionPrompt, + functionName: "Writer", + executionSettings: new OpenAIPromptExecutionSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); + + // Define hook + static void MyChangeDataHandler(object? sender, FunctionInvokedEventArgs e) + { + var originalOutput = e.Result.ToString(); + + //Use Regex to redact all vowels and numbers + var newOutput = Regex.Replace(originalOutput, "[aeiouAEIOU0-9]", "*"); + + e.SetResultValue(newOutput); + } + + kernel.FunctionInvoked += MyChangeDataHandler; + + // Invoke prompt to trigger execution hooks. + var result = await kernel.InvokeAsync(writerFunction); + + WriteLine($"Function Result: {result.GetValue()}"); + } + + /// + /// Demonstrate using kernel invocation-hooks to cancel prior to execution: + /// + /// + /// + [Fact] + public async Task BeforeInvokeCancellationAsync() + { + WriteLine("\n======== Cancelling Pipeline Execution - Invoking event ========\n"); + + // Create kernel instance + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: _openAIModelId!, + apiKey: _openAIApiKey!) + .Build(); + + // Initialize prompt + const string FunctionPrompt = "Write a paragraph about: Cancellation."; + + var writerFunction = kernel.CreateFunctionFromPrompt( + FunctionPrompt, + functionName: "Writer", + executionSettings: new OpenAIPromptExecutionSettings() { MaxTokens = 1000, Temperature = 1, TopP = 0.5 }); + + // Adding new inline handler to cancel/prevent function execution + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + WriteLine($"{e.Function.Name} : FunctionInvoking - Cancelling before execution"); + e.Cancel = true; + }; + + // Technically invoked will never be called since the function will be cancelled + int functionInvokedCount = 0; + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + { + functionInvokedCount++; + }; + + // Invoke prompt to trigger execution hooks. + try + { + var result = await kernel.InvokeAsync(writerFunction); + } + catch (KernelFunctionCanceledException fcex) + { + WriteLine(fcex.Message); + } + + WriteLine($"Function Invocation Times: {functionInvokedCount}"); + } + + /// + /// Demonstrate using kernel invocation-hooks to cancel post after execution: + /// + /// + /// + [Fact] + public async Task AfterInvokeCancellationAsync() + { + WriteLine("\n======== Cancelling Pipeline Execution - Invoked event ========\n"); + + // Create kernel instance + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: _openAIModelId!, + apiKey: _openAIApiKey!) + .Build(); + + // Initialize prompts + int functionInvokingCount = 0; + int functionInvokedCount = 0; + + var firstFunction = kernel.CreateFunctionFromPrompt("Write a phrase with Invoke.", functionName: "InvokePhrase"); + var secondFunction = kernel.CreateFunctionFromPrompt("Write a phrase with Cancellation.", functionName: "CancellationPhrase"); + + // Adding new inline handler to count invoking events + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + functionInvokingCount++; + }; + + // Invoked will never be called twice (for the secondFunction) since Invoked from the first is cancelling. + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + { + functionInvokedCount++; + e.Cancel = true; + }; + + // Invoke prompt to trigger execution hooks. + try + { + var result = await kernel.InvokeAsync(secondFunction); + } + catch (KernelFunctionCanceledException fcex) + { + WriteLine(fcex.Message); + } + + WriteLine($"Function Invoked Times: {functionInvokedCount}"); + WriteLine($"Function Invoking Times: {functionInvokingCount}"); + } + + private readonly string? _openAIModelId; + private readonly string? _openAIApiKey; + + public Example57_KernelHooks(ITestOutputHelper output) : base(output) + { + this._openAIModelId = TestConfiguration.OpenAI.ChatModelId; + this._openAIApiKey = TestConfiguration.OpenAI.ApiKey; + + if (this._openAIModelId == null || this._openAIApiKey == null) + { + WriteLine("OpenAI credentials not found. Skipping example."); + return; + } + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example58_ConfigureExecutionSettings.cs b/dotnet/samples/KernelSyntaxExamples/Example58_ConfigureExecutionSettings.cs new file mode 100644 index 000000000000..d9338f91be85 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example58_ConfigureExecutionSettings.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public sealed class Example58_ConfigureExecutionSettings : BaseTest +{ + /// + /// Show how to configure model execution settings + /// + [Fact] + public async Task RunAsync() + { + this.WriteLine("======== Example58_ConfigureExecutionSettings ========"); + + string serviceId = TestConfiguration.AzureOpenAI.ServiceId; + string apiKey = TestConfiguration.AzureOpenAI.ApiKey; + string chatDeploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; + string chatModelId = TestConfiguration.AzureOpenAI.ChatModelId; + string endpoint = TestConfiguration.AzureOpenAI.Endpoint; + + if (apiKey == null || chatDeploymentName == null || endpoint == null) + { + this.WriteLine("AzureOpenAI endpoint, apiKey, or deploymentName not found. Skipping example."); + return; + } + + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: chatDeploymentName, + endpoint: endpoint, + serviceId: serviceId, + apiKey: apiKey, + modelId: chatModelId) + .Build(); + + var prompt = "Hello AI, what can you do for me?"; + + // Option 1: + // Invoke the prompt function and pass an OpenAI specific instance containing the execution settings + var result = await kernel.InvokePromptAsync( + prompt, + new(new OpenAIPromptExecutionSettings() + { + MaxTokens = 60, + Temperature = 0.7 + })); + this.WriteLine(result.GetValue()); + + // Option 2: + // Load prompt template configuration including the execution settings from a JSON payload + // Create the prompt functions using the prompt template and the configuration (loaded in the previous step) + // Invoke the prompt function using the implicitly set execution settings + string configPayload = @"{ + ""schema"": 1, + ""name"": ""HelloAI"", + ""description"": ""Say hello to an AI"", + ""type"": ""completion"", + ""completion"": { + ""max_tokens"": 256, + ""temperature"": 0.5, + ""top_p"": 0.0, + ""presence_penalty"": 0.0, + ""frequency_penalty"": 0.0 + } + }"; + var promptConfig = JsonSerializer.Deserialize(configPayload)!; + promptConfig.Template = prompt; + var func = kernel.CreateFunctionFromPrompt(promptConfig); + + result = await kernel.InvokeAsync(func); + this.WriteLine(result.GetValue()); + + /* OUTPUT (using gpt4): +Hello! As an AI language model, I can help you with a variety of tasks, such as: + +1. Answering general questions and providing information on a wide range of topics. +2. Assisting with problem-solving and brainstorming ideas. +3. Offering recommendations for books, movies, music, and more. +4. Providing definitions, explanations, and examples of various concepts. +5. Helping with language-related tasks, such as grammar, vocabulary, and writing tips. +6. Generating creative content, such as stories, poems, or jokes. +7. Assisting with basic math and science problems. +8. Offering advice on various topics, such as productivity, motivation, and personal development. + +Please feel free to ask me anything, and I'll do my best to help you! +Hello! As an AI language model, I can help you with a variety of tasks, including: + +1. Answering general questions and providing information on a wide range of topics. +2. Offering suggestions and recommendations. +3. Assisting with problem-solving and brainstorming ideas. +4. Providing explanations and + */ + } + + public Example58_ConfigureExecutionSettings(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example58_ConfigureRequestSettings.cs b/dotnet/samples/KernelSyntaxExamples/Example58_ConfigureRequestSettings.cs deleted file mode 100644 index 8afb3b10f4b6..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example58_ConfigureRequestSettings.cs +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.TemplateEngine; -using RepoUtils; - -// ReSharper disable once InconsistentNaming -public static class Example58_ConfigureRequestSettings -{ - /// - /// Show how to configure model request settings - /// - public static async Task RunAsync() - { - Console.WriteLine("======== Example58_ConfigureRequestSettings ========"); - - string serviceId = TestConfiguration.AzureOpenAI.ServiceId; - string apiKey = TestConfiguration.AzureOpenAI.ApiKey; - string chatDeploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; - string endpoint = TestConfiguration.AzureOpenAI.Endpoint; - - if (serviceId == null || apiKey == null || chatDeploymentName == null || endpoint == null) - { - Console.WriteLine("Azure serviceId, endpoint, apiKey, or deploymentName not found. Skipping example."); - return; - } - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - deploymentName: chatDeploymentName, - endpoint: endpoint, - serviceId: serviceId, - apiKey: apiKey) - .Build(); - - var prompt = "Hello AI, what can you do for me?"; - - // Option 1: - // Invoke the semantic function and pass an OpenAI specific instance containing the request settings - var result = await kernel.InvokeSemanticFunctionAsync( - prompt, - new OpenAIRequestSettings() - { - MaxTokens = 60, - Temperature = 0.7 - }); - Console.WriteLine(result.GetValue()); - - // Option 2: - // Load prompt template configuration including the request settings from a JSON payload - // Create the semantic functions using the prompt template and the configuration (loaded in the previous step) - // Invoke the semantic function using the implicitly set request settings - string configPayload = @"{ - ""schema"": 1, - ""description"": ""Say hello to an AI"", - ""type"": ""completion"", - ""completion"": { - ""max_tokens"": 256, - ""temperature"": 0.5, - ""top_p"": 0.0, - ""presence_penalty"": 0.0, - ""frequency_penalty"": 0.0 - } - }"; - var templateConfig = JsonSerializer.Deserialize(configPayload); - var func = kernel.CreateSemanticFunction(prompt, templateConfig!, "HelloAI"); - - result = await kernel.RunAsync(func); - Console.WriteLine(result.GetValue()); - - /* OUTPUT (using gpt4): -Hello! As an AI language model, I can help you with a variety of - -Hello! As an AI language model, I can help you with a variety of tasks, such as: - -1. Answering general questions and providing information on a wide range of topics. -2. Assisting with problem-solving and brainstorming ideas. -3. Offering recommendations for books, movies, music, and more. -4. Providing definitions, explanations, and examples of various concepts. -5. Helping with language-related tasks, such as grammar, vocabulary, and writing tips. -6. Generating creative content, such as stories, poems, or jokes. -7. Assisting with basic math and science problems. -8. Offering advice on various topics, such as productivity, motivation, and personal development. - -Please feel free to ask me anything, and I'll do my best to help you! -Hello! As an AI language model, I can help you with a variety of tasks, including: - -1. Answering general questions and providing information on a wide range of topics. -2. Offering suggestions and recommendations. -3. Assisting with problem-solving and brainstorming ideas. -4. Providing explanations and - */ - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example59_OpenAIFunctionCalling.cs b/dotnet/samples/KernelSyntaxExamples/Example59_OpenAIFunctionCalling.cs index 89edc73f287d..3c874fe9e053 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example59_OpenAIFunctionCalling.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example59_OpenAIFunctionCalling.cs @@ -1,193 +1,144 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Linq; -using System.Text; +using System.Text.Json; using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Plugins.Core; -using RepoUtils; - -/** - * This example shows how to use OpenAI's function calling capability via the chat completions interface. - * For more information, see https://platform.openai.com/docs/guides/gpt/function-calling. - */ -// ReSharper disable once InconsistentNaming -public static class Example59_OpenAIFunctionCalling -{ - public static async Task RunAsync() - { - IKernel kernel = await InitializeKernelAsync(); - var chatCompletion = kernel.GetService(); - var chatHistory = chatCompletion.CreateNewChat(); +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; - OpenAIRequestSettings requestSettings = new() - { - // Include all functions registered with the kernel. - // Alternatively, you can provide your own list of OpenAIFunctions to include. - Functions = kernel.Functions.GetFunctionViews().Select(f => f.ToOpenAIFunction()).ToList(), - }; - - // Set FunctionCall to the name of a specific function to force the model to use that function. - requestSettings.FunctionCall = "TimePlugin-Date"; - await CompleteChatWithFunctionsAsync("What day is today?", chatHistory, chatCompletion, kernel, requestSettings); - await StreamingCompleteChatWithFunctionsAsync("What day is today?", chatHistory, chatCompletion, kernel, requestSettings); - - // Set FunctionCall to auto to let the model choose the best function to use. - requestSettings.FunctionCall = OpenAIRequestSettings.FunctionCallAuto; - await CompleteChatWithFunctionsAsync("What computer tablets are available for under $200?", chatHistory, chatCompletion, kernel, requestSettings); - await StreamingCompleteChatWithFunctionsAsync("What computer tablets are available for under $200?", chatHistory, chatCompletion, kernel, requestSettings); - } +namespace Examples; - private static async Task InitializeKernelAsync() +// This example shows how to use OpenAI's tool calling capability via the chat completions interface. +public class Example59_OpenAIFunctionCalling : BaseTest +{ + [Fact] + public async Task RunAsync() { - // Create kernel with chat completions service - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithOpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey, serviceId: "chat") - //.WithAzureChatCompletionService(TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey, serviceId: "chat") - .Build(); - - // Load functions to kernel - kernel.ImportFunctions(new TimePlugin(), "TimePlugin"); - await kernel.ImportPluginFunctionsAsync("KlarnaShoppingPlugin", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"), new OpenApiFunctionExecutionParameters()); - - return kernel; - } + // Create kernel. + IKernelBuilder builder = Kernel.CreateBuilder(); - private static async Task CompleteChatWithFunctionsAsync(string ask, ChatHistory chatHistory, IChatCompletion chatCompletion, IKernel kernel, OpenAIRequestSettings requestSettings) - { - Console.WriteLine($"User message: {ask}"); - chatHistory.AddUserMessage(ask); + // We recommend the usage of OpenAI latest models for the best experience with tool calling. + // i.e. gpt-3.5-turbo-1106 or gpt-4-1106-preview + builder.AddOpenAIChatCompletion("gpt-3.5-turbo-1106", TestConfiguration.OpenAI.ApiKey); - // Send request - var chatResult = (await chatCompletion.GetChatCompletionsAsync(chatHistory, requestSettings))[0]; + builder.Services.AddLogging(services => services.AddConsole().SetMinimumLevel(LogLevel.Trace)); + Kernel kernel = builder.Build(); - // Check for message response - var chatMessage = await chatResult.GetChatMessageAsync(); - if (!string.IsNullOrEmpty(chatMessage.Content)) + // Add a plugin with some helper functions we want to allow the model to utilize. + kernel.ImportPluginFromFunctions("HelperFunctions", new[] { - Console.WriteLine(chatMessage.Content); - - // Add the response to chat history - chatHistory.AddAssistantMessage(chatMessage.Content); + kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), + kernel.CreateFunctionFromMethod((string cityName) => + cityName switch + { + "Boston" => "61 and rainy", + "London" => "55 and cloudy", + "Miami" => "80 and sunny", + "Paris" => "60 and rainy", + "Tokyo" => "50 and sunny", + "Sydney" => "75 and sunny", + "Tel Aviv" => "80 and sunny", + _ => "31 and snowing", + }, "Get_Weather_For_City", "Gets the current weather for the specified city"), + }); + + WriteLine("======== Example 1: Use automated function calling with a non-streaming prompt ========"); + { + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + WriteLine(await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))); + WriteLine(); } - // Check for function response - OpenAIFunctionResponse? functionResponse = chatResult.GetOpenAIFunctionResponse(); - if (functionResponse is not null) + WriteLine("======== Example 2: Use automated function calling with a streaming prompt ========"); { - // Print function response details - Console.WriteLine("Function name: " + functionResponse.FunctionName); - Console.WriteLine("Plugin name: " + functionResponse.PluginName); - Console.WriteLine("Arguments: "); - foreach (var parameter in functionResponse.Parameters) + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + await foreach (var update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))) { - Console.WriteLine($"- {parameter.Key}: {parameter.Value}"); + Write(update); } + WriteLine(); + } - // If the function returned by OpenAI is an SKFunction registered with the kernel, - // you can invoke it using the following code. - if (kernel.Functions.TryGetFunctionAndContext(functionResponse, out ISKFunction? func, out ContextVariables? context)) - { - var kernelResult = await kernel.RunAsync(func, context); + WriteLine("======== Example 3: Use manual function calling with a non-streaming prompt ========"); + { + var chat = kernel.GetRequiredService(); + var chatHistory = new ChatHistory(); - var result = kernelResult.GetValue(); + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + while (true) + { + var result = (OpenAIChatMessageContent)await chat.GetChatMessageContentAsync(chatHistory, settings, kernel); - string? resultMessage = null; - if (result is RestApiOperationResponse apiResponse) + if (result.Content is not null) { - resultMessage = apiResponse.Content?.ToString(); + Write(result.Content); } - else if (result is string str) + + List toolCalls = result.ToolCalls.OfType().ToList(); + if (toolCalls.Count == 0) { - resultMessage = str; + break; } - if (!string.IsNullOrEmpty(resultMessage)) + chatHistory.Add(result); + foreach (var toolCall in toolCalls) { - Console.WriteLine(resultMessage); - - // Add the function result to chat history - chatHistory.AddAssistantMessage(resultMessage); + string content = kernel.Plugins.TryGetFunctionAndArguments(toolCall, out KernelFunction? function, out KernelArguments? arguments) ? + JsonSerializer.Serialize((await function.InvokeAsync(kernel, arguments)).GetValue()) : + "Unable to find function. Please try again!"; + + chatHistory.Add(new ChatMessageContent( + AuthorRole.Tool, + content, + metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } })); } } - else - { - Console.WriteLine($"Error: Function {functionResponse.PluginName}.{functionResponse.FunctionName} not found."); - } - } - } - private static async Task StreamingCompleteChatWithFunctionsAsync(string ask, ChatHistory chatHistory, IChatCompletion chatCompletion, IKernel kernel, OpenAIRequestSettings requestSettings) - { - Console.WriteLine($"User message: {ask}"); - chatHistory.AddUserMessage(ask); + WriteLine(); + } - // Send request - await foreach (var chatResult in chatCompletion.GetStreamingChatCompletionsAsync(chatHistory, requestSettings)) + /* Uncomment this to try in a console chat loop. + Console.WriteLine("======== Example 4: Use automated function calling with a streaming chat ========"); { - StringBuilder chatContent = new(); - await foreach (var message in chatResult.GetStreamingChatMessageAsync()) - { - if (message.Content is not null) - { - Console.Write(message.Content); - chatContent.Append(message.Content); - } - } - chatHistory.AddAssistantMessage(chatContent.ToString()); + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var chat = kernel.GetRequiredService(); + var chatHistory = new ChatHistory(); - var functionResponse = await chatResult.GetOpenAIStreamingFunctionResponseAsync(); - - if (functionResponse is not null) + while (true) { - // Print function response details - Console.WriteLine("Function name: " + functionResponse.FunctionName); - Console.WriteLine("Plugin name: " + functionResponse.PluginName); - Console.WriteLine("Arguments: "); - foreach (var parameter in functionResponse.Parameters) + Console.Write("Question (Type \"quit\" to leave): "); + string question = Console.ReadLine() ?? string.Empty; + if (question == "quit") { - Console.WriteLine($"- {parameter.Key}: {parameter.Value}"); + break; } - // If the function returned by OpenAI is an SKFunction registered with the kernel, - // you can invoke it using the following code. - if (kernel.Functions.TryGetFunctionAndContext(functionResponse, out ISKFunction? func, out ContextVariables? context)) + chatHistory.AddUserMessage(question); + StringBuilder sb = new(); + await foreach (var update in chat.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) { - var kernelResult = await kernel.RunAsync(func, context); - - var result = kernelResult.GetValue(); - - string? resultMessage = null; - if (result is RestApiOperationResponse apiResponse) - { - resultMessage = apiResponse.Content?.ToString(); - } - else if (result is string str) + if (update.Content is not null) { - resultMessage = str; + Console.Write(update.Content); + sb.Append(update.Content); } - - if (!string.IsNullOrEmpty(resultMessage)) - { - Console.WriteLine(resultMessage); - - // Add the function result to chat history - chatHistory.AddAssistantMessage(resultMessage); - } - } - else - { - Console.WriteLine($"Error: Function {functionResponse.PluginName}.{functionResponse.FunctionName} not found."); } + chatHistory.AddAssistantMessage(sb.ToString()); + Console.WriteLine(); } - } + }*/ + } + + public Example59_OpenAIFunctionCalling(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example60_AdvancedMethodFunctions.cs b/dotnet/samples/KernelSyntaxExamples/Example60_AdvancedMethodFunctions.cs new file mode 100644 index 000000000000..e2c58bda2a15 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example60_AdvancedMethodFunctions.cs @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Globalization; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows different ways how to define and execute method functions using custom and primitive types. +public class Example60_AdvancedMethodFunctions : BaseTest +{ + #region Method Functions Chaining + + /// + /// This example executes Function1, which in turn executes Function2. + /// + [Fact] + public async Task MethodFunctionsChainingAsync() + { + WriteLine("Running Method Function Chaining example..."); + + var kernel = new Kernel(); + + var functions = kernel.ImportPluginFromType(); + + var customType = await kernel.InvokeAsync(functions["Function1"]); + + WriteLine($"CustomType.Number: {customType!.Number}"); // 2 + WriteLine($"CustomType.Text: {customType.Text}"); // From Function1 + From Function2 + } + + /// + /// Plugin example with two method functions, where one function is called from another. + /// + private sealed class FunctionsChainingPlugin + { + private const string PluginName = nameof(FunctionsChainingPlugin); + + [KernelFunction] + public async Task Function1Async(Kernel kernel) + { + // Execute another function + var value = await kernel.InvokeAsync(PluginName, "Function2"); + + return new MyCustomType + { + Number = 2 * value?.Number ?? 0, + Text = "From Function1 + " + value?.Text + }; + } + + [KernelFunction] + public static MyCustomType Function2() + { + return new MyCustomType + { + Number = 1, + Text = "From Function2" + }; + } + } + + #endregion + + #region Custom Type + + /// + /// In order to use custom types, should be specified, + /// that will convert object instance to string representation. + /// + /// + /// is used to represent complex object as meaningful string, so + /// it can be passed to AI for further processing using prompt functions. + /// It's possible to choose any format (e.g. XML, JSON, YAML) to represent your object. + /// + [TypeConverter(typeof(MyCustomTypeConverter))] + private sealed class MyCustomType + { + public int Number { get; set; } + + public string? Text { get; set; } + } + + /// + /// Implementation of for . + /// In this example, object instance is serialized with from System.Text.Json, + /// but it's possible to convert object to string using any other serialization logic. + /// + private sealed class MyCustomTypeConverter : TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => true; + + /// + /// This method is used to convert object from string to actual type. This will allow to pass object to + /// method function which requires it. + /// + public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) + { + return JsonSerializer.Deserialize((string)value); + } + + /// + /// This method is used to convert actual type to string representation, so it can be passed to AI + /// for further processing. + /// + public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) + { + return JsonSerializer.Serialize(value); + } + } + + #endregion + + public Example60_AdvancedMethodFunctions(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example60_AdvancedNativeFunctions.cs b/dotnet/samples/KernelSyntaxExamples/Example60_AdvancedNativeFunctions.cs deleted file mode 100644 index 64609aa95df7..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Example60_AdvancedNativeFunctions.cs +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Globalization; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; - -/** - * This example shows different ways how to define and execute native functions using custom and primitive types. - */ -// ReSharper disable once InconsistentNaming -public static class Example60_AdvancedNativeFunctions -{ - public static async Task RunAsync() - { - await NativeFunctionsChainingAsync(); - - await NativeFunctionsPipelineAsync(); - - await PrimitiveTypesAutoConversionAsync(); - } - - #region Native Functions Chaining - - /// - /// This example executes Function1, which in turn executes Function2. - /// - private static async Task NativeFunctionsChainingAsync() - { - Console.WriteLine("Running Native Function Chaining example..."); - - var kernel = new KernelBuilder().Build(); - - var functions = kernel.ImportFunctions(new FunctionsChainingPlugin(), FunctionsChainingPlugin.PluginName); - - var result = await kernel.RunAsync(functions["Function1"]); - var customType = result.GetValue()!; - - Console.WriteLine(customType.Number); // 2 - Console.WriteLine(customType.Text); // From Function1 + From Function2 - } - - /// - /// Plugin example with two native functions, where one function is called from another. - /// - private sealed class FunctionsChainingPlugin - { - public const string PluginName = nameof(FunctionsChainingPlugin); - - [SKFunction, SKName("Function1")] - public async Task Function1Async(SKContext context) - { - // Execute another function - var result = await context.Runner.RunAsync(PluginName, "Function2"); - var value = result.GetValue()!; - - return new MyCustomType - { - Number = 2 * value.Number, - Text = "From Function1 + " + value.Text - }; - } - - [SKFunction, SKName("Function2")] - public static MyCustomType Function2() - { - return new MyCustomType - { - Number = 1, - Text = "From Function2" - }; - } - } - - #endregion - - #region Native Functions Pipeline - - /// - /// This example executes Function1 and Function2 sequentially. - /// Kernel will pass required parameters to second function as result from first function. - /// - private static async Task NativeFunctionsPipelineAsync() - { - Console.WriteLine("Running Native Function Pipeline example..."); - - var kernel = new KernelBuilder().Build(); - - var functions = kernel.ImportFunctions(new FunctionsPipelinePlugin(), FunctionsPipelinePlugin.PluginName); - - var result = await kernel.RunAsync(functions["Function1"], functions["Function2"]); - var customType = result.GetValue()!; - - Console.WriteLine(customType.Number); // 2 - Console.WriteLine(customType.Text); // From Function1 + From Function2 - } - - /// - /// Plugin example with two native functions, which will be called sequentially by Kernel. - /// - private sealed class FunctionsPipelinePlugin - { - public const string PluginName = nameof(FunctionsPipelinePlugin); - - [SKFunction, SKName("Function1")] - public MyCustomType Function1() - { - return new MyCustomType - { - Number = 1, - Text = "From Function1" - }; - } - - [SKFunction, SKName("Function2")] - public static MyCustomType Function2(MyCustomType customType) - { - return new MyCustomType - { - Number = customType.Number * 2, - Text = customType.Text + " + From Function2" - }; - } - } - - #endregion - - #region Primitive Types Auto Conversion - - /// - /// This example shows how to initialize variables, which will be auto-converted to primitive types - /// in parameters of native function. - /// - private static async Task PrimitiveTypesAutoConversionAsync() - { - Console.WriteLine("Running Primitive Types Auto Conversion example..."); - - var kernel = new KernelBuilder().Build(); - - var functions = kernel.ImportFunctions(new PrimitiveTypesPlugin(), PrimitiveTypesPlugin.PluginName); - - var contextVariables = new ContextVariables(); - - contextVariables["number"] = "2"; - contextVariables["text"] = "From Context Variables"; - - var result = await kernel.RunAsync(contextVariables, functions["Function1"]); - var customType = result.GetValue()!; - - Console.WriteLine(customType.Number); // 2 - Console.WriteLine(customType.Text); // From Context Variables - } - - /// - /// Plugin example with native function, which contains two parameters with primitive types. - /// - private sealed class PrimitiveTypesPlugin - { - public const string PluginName = nameof(PrimitiveTypesPlugin); - - [SKFunction, SKName("Function1")] - public MyCustomType Function1(int number, string text) - { - return new MyCustomType - { - Number = number, - Text = text - }; - } - } - - #endregion - - #region Custom Type - - /// - /// In order to use custom types, should be specified, - /// that will convert object instance to string representation. - /// - /// - /// is used to represent complex object as meaningful string, so - /// it can be passed to AI for further processing using semantic functions. - /// It's possible to choose any format (e.g. XML, JSON, YAML) to represent your object. - /// - [TypeConverter(typeof(MyCustomTypeConverter))] - private sealed class MyCustomType - { - public int Number { get; set; } - - public string? Text { get; set; } - } - - /// - /// Implementation of for . - /// In this example, object instance is serialized with from System.Text.Json, - /// but it's possible to convert object to string using any other serialization logic. - /// -#pragma warning disable CA1812 // instantiated by Kernel - private sealed class MyCustomTypeConverter : TypeConverter -#pragma warning restore CA1812 - { - public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => true; - - /// - /// This method is used to convert object from string to actual type. This will allow to pass object to - /// native function which requires it. - /// - public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) - { - return JsonSerializer.Deserialize((string)value); - } - - /// - /// This method is used to convert actual type to string representation, so it can be passed to AI - /// for further processing. - /// - public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) - { - return JsonSerializer.Serialize(value); - } - } - - #endregion -} diff --git a/dotnet/samples/KernelSyntaxExamples/Example61_MultipleLLMs.cs b/dotnet/samples/KernelSyntaxExamples/Example61_MultipleLLMs.cs index 66de8ae93926..29a434c90878 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example61_MultipleLLMs.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example61_MultipleLLMs.cs @@ -1,69 +1,89 @@ // Copyright (c) Microsoft. All rights reserved. -using System; +using System.Collections.Generic; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using RepoUtils; +using xRetry; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example61_MultipleLLMs +namespace Examples; + +public class Example61_MultipleLLMs : BaseTest { /// - /// Show how to run a semantic function and specify a specific service to use. + /// Show how to run a prompt function and specify a specific service to use. /// - public static async Task RunAsync() + [RetryFact(typeof(HttpOperationException))] + public async Task RunAsync() { - Console.WriteLine("======== Example61_MultipleLLMs ========"); + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + serviceId: "AzureOpenAIChat", + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + serviceId: "OpenAIChat") + .Build(); - string apiKey = TestConfiguration.AzureOpenAI.ApiKey; - string chatDeploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; - string endpoint = TestConfiguration.AzureOpenAI.Endpoint; + await RunByServiceIdAsync(kernel, "AzureOpenAIChat"); + await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId); + await RunByFirstModelIdAsync(kernel, "gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId); + } - if (apiKey == null || chatDeploymentName == null || endpoint == null) - { - Console.WriteLine("Azure endpoint, apiKey, or deploymentName not found. Skipping example."); - return; - } + private async Task RunByServiceIdAsync(Kernel kernel, string serviceId) + { + WriteLine($"======== Service Id: {serviceId} ========"); - string openAIModelId = TestConfiguration.OpenAI.ChatModelId; - string openAIApiKey = TestConfiguration.OpenAI.ApiKey; + var prompt = "Hello AI, what can you do for me?"; - if (openAIModelId == null || openAIApiKey == null) + KernelArguments arguments = new(); + arguments.ExecutionSettings = new Dictionary() { - Console.WriteLine("OpenAI credentials not found. Skipping example."); - return; - } - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - deploymentName: chatDeploymentName, - endpoint: endpoint, - serviceId: "AzureOpenAIChat", - apiKey: apiKey) - .WithOpenAIChatCompletionService( - modelId: openAIModelId, - serviceId: "OpenAIChat", - apiKey: openAIApiKey) - .Build(); - - await RunSemanticFunctionAsync(kernel, "AzureOpenAIChat"); - await RunSemanticFunctionAsync(kernel, "OpenAIChat"); + { serviceId, new PromptExecutionSettings() } + }; + var result = await kernel.InvokePromptAsync(prompt, arguments); + WriteLine(result.GetValue()); } - public static async Task RunSemanticFunctionAsync(IKernel kernel, string serviceId) + private async Task RunByModelIdAsync(Kernel kernel, string modelId) { - Console.WriteLine($"======== {serviceId} ========"); + WriteLine($"======== Model Id: {modelId} ========"); var prompt = "Hello AI, what can you do for me?"; - var result = await kernel.InvokeSemanticFunctionAsync( + var result = await kernel.InvokePromptAsync( prompt, - requestSettings: new AIRequestSettings() + new(new PromptExecutionSettings() { - ServiceId = serviceId - }); - Console.WriteLine(result.GetValue()); + ModelId = modelId + })); + WriteLine(result.GetValue()); + } + + private async Task RunByFirstModelIdAsync(Kernel kernel, params string[] modelIds) + { + WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========"); + + var prompt = "Hello AI, what can you do for me?"; + + var modelSettings = new Dictionary(); + foreach (var modelId in modelIds) + { + modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId }); + } + var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings }; + + var function = kernel.CreateFunctionFromPrompt(promptConfig); + + var result = await kernel.InvokeAsync(function); + WriteLine(result.GetValue()); + } + + public Example61_MultipleLLMs(ITestOutputHelper output) : base(output) + { } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example62_CustomAIServiceSelector.cs b/dotnet/samples/KernelSyntaxExamples/Example62_CustomAIServiceSelector.cs index bd339078859a..adb85f5112a2 100644 --- a/dotnet/samples/KernelSyntaxExamples/Example62_CustomAIServiceSelector.cs +++ b/dotnet/samples/KernelSyntaxExamples/Example62_CustomAIServiceSelector.cs @@ -1,164 +1,88 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Threading.Tasks; -using Microsoft.ML.Tokenizers; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine; -using RepoUtils; +using Xunit; +using Xunit.Abstractions; -// ReSharper disable once InconsistentNaming -public static class Example62_CustomAIServiceSelector +namespace Examples; + +public class Example62_CustomAIServiceSelector : BaseTest { /// - /// Show how to configure model request settings + /// Show how to use a custom AI service selector to select a specific model /// - public static async Task RunAsync() + [Fact] + public async Task RunAsync() { - Console.WriteLine("======== Example61_CustomAIServiceSelector ========"); - - string apiKey = TestConfiguration.AzureOpenAI.ApiKey; - string chatDeploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; - string endpoint = TestConfiguration.AzureOpenAI.Endpoint; - - if (apiKey == null || chatDeploymentName == null || endpoint == null) - { - Console.WriteLine("Azure endpoint, apiKey, or deploymentName not found. Skipping example."); - return; - } - - string openAIModelId = TestConfiguration.OpenAI.ChatModelId; - string openAIApiKey = TestConfiguration.OpenAI.ApiKey; - - if (openAIModelId == null || openAIApiKey == null) - { - Console.WriteLine("OpenAI credentials not found. Skipping example."); - return; - } - - IKernel kernel = new KernelBuilder() - .WithLoggerFactory(ConsoleLogger.LoggerFactory) - .WithAzureChatCompletionService( - deploymentName: chatDeploymentName, - endpoint: endpoint, + WriteLine("======== Example62_CustomAIServiceSelector ========"); + + // Build a kernel with multiple chat completion services + var builder = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, serviceId: "AzureOpenAIChat", - apiKey: apiKey) - .WithOpenAIChatCompletionService( - modelId: openAIModelId, - serviceId: "OpenAIChat", - apiKey: openAIApiKey) - .WithAIServiceSelector(new MyAIServiceSelector()) - .Build(); - - var modelSettings = new List - { - new OpenAIRequestSettings() { ServiceId = "AzureOpenAIChat", MaxTokens = 400 }, - new OpenAIRequestSettings() { ServiceId = "OpenAIChat", MaxTokens = 200 } - }; - - await RunSemanticFunctionAsync(kernel, "Hello AI, what can you do for me?", modelSettings); - await RunSemanticFunctionAsync(kernel, "Hello AI, provide an indepth description of what can you do for me as a bulleted list?", modelSettings); + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey, + serviceId: "OpenAIChat"); + builder.Services.AddSingleton(new GptAIServiceSelector(this.Output)); // Use the custom AI service selector to select the GPT model + Kernel kernel = builder.Build(); + + // This invocation is done with the model selected by the custom selector + var prompt = "Hello AI, what can you do for me?"; + var result = await kernel.InvokePromptAsync(prompt); + WriteLine(result.GetValue()); } - public static async Task RunSemanticFunctionAsync(IKernel kernel, string prompt, List modelSettings) + /// + /// Custom AI service selector that selects a GPT model. + /// This selector just naively selects the first service that provides + /// a completion model whose name starts with "gpt". But this logic could + /// be as elaborate as needed to apply your own selection criteria. + /// + private sealed class GptAIServiceSelector : IAIServiceSelector { - Console.WriteLine($"======== {prompt} ========"); - - var promptTemplateConfig = new PromptTemplateConfig() { ModelSettings = modelSettings }; - var promptTemplate = new PromptTemplate(prompt, promptTemplateConfig, kernel); - - var skfunction = kernel.RegisterSemanticFunction( - "MyFunction", - promptTemplateConfig, - promptTemplate); - - var result = await kernel.RunAsync(skfunction); - Console.WriteLine(result.GetValue()); - } -} - -public class MyAIServiceSelector : IAIServiceSelector -{ - private readonly int _defaultMaxTokens = 300; - private readonly int _minResponseTokens = 150; + private readonly ITestOutputHelper _output; - public (T?, AIRequestSettings?) SelectAIService(string renderedPrompt, IAIServiceProvider serviceProvider, IReadOnlyList? modelSettings) where T : IAIService - { - if (modelSettings is null || modelSettings.Count == 0) + public GptAIServiceSelector(ITestOutputHelper output) { - var service = serviceProvider.GetService(null); - if (service is not null) - { - return (service, null); - } + this._output = output; } - else - { - var tokens = this.CountTokens(renderedPrompt); - string? serviceId = null; - int fewestTokens = 0; - AIRequestSettings? requestSettings = null; - AIRequestSettings? defaultRequestSettings = null; - foreach (var model in modelSettings) - { - if (!string.IsNullOrEmpty(model.ServiceId)) - { - if (model is OpenAIRequestSettings openAIModel) - { - var responseTokens = (openAIModel.MaxTokens ?? this._defaultMaxTokens) - tokens; - if (serviceId is null || (responseTokens > this._minResponseTokens && responseTokens < fewestTokens)) - { - fewestTokens = responseTokens; - serviceId = model.ServiceId; - requestSettings = model; - } - } - } - else - { - // First request settings with empty or null service id is the default - defaultRequestSettings ??= model; - } - } - Console.WriteLine($"Prompt tokens: {tokens}, Response tokens: {fewestTokens}"); - - if (serviceId is not null) + public bool TrySelectAIService( + Kernel kernel, KernelFunction function, KernelArguments arguments, + [NotNullWhen(true)] out T? service, out PromptExecutionSettings? serviceSettings) where T : class, IAIService + { + foreach (var serviceToCheck in kernel.GetAllServices()) { - Console.WriteLine($"Selected service: {serviceId}"); - var service = serviceProvider.GetService(serviceId); - if (service is not null) + // Find the first service that has a model id that starts with "gpt" + var serviceModelId = serviceToCheck.GetModelId(); + var endpoint = serviceToCheck.GetEndpoint(); + if (!string.IsNullOrEmpty(serviceModelId) && serviceModelId.StartsWith("gpt", StringComparison.OrdinalIgnoreCase)) { - return (service, requestSettings); + this._output.WriteLine($"Selected model: {serviceModelId} {endpoint}"); + service = serviceToCheck; + serviceSettings = new OpenAIPromptExecutionSettings(); + return true; } } - if (defaultRequestSettings is not null) - { - var service = serviceProvider.GetService(null); - if (service is not null) - { - return (service, defaultRequestSettings); - } - } + service = null; + serviceSettings = null; + return false; } - - throw new SKException("Unable to find AI service to handled request."); } - /// - /// MicrosoftML token counter implementation. - /// - private int CountTokens(string input) + public Example62_CustomAIServiceSelector(ITestOutputHelper output) : base(output) { - Tokenizer tokenizer = new(new Bpe()); - var tokens = tokenizer.Encode(input).Tokens; - - return tokens.Count; } } diff --git a/dotnet/samples/KernelSyntaxExamples/Example63_ChatCompletionPrompts.cs b/dotnet/samples/KernelSyntaxExamples/Example63_ChatCompletionPrompts.cs new file mode 100644 index 000000000000..5b8b45d50a33 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example63_ChatCompletionPrompts.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to use chat completion standardized prompts. +public class Example63_ChatCompletionPrompts : BaseTest +{ + [Fact] + public async Task RunAsync() + { + const string ChatPrompt = @" + What is Seattle? + Respond with JSON. + "; + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + var chatSemanticFunction = kernel.CreateFunctionFromPrompt(ChatPrompt); + var chatPromptResult = await kernel.InvokeAsync(chatSemanticFunction); + + WriteLine("Chat Prompt:"); + WriteLine(ChatPrompt); + WriteLine("Chat Prompt Result:"); + WriteLine(chatPromptResult); + + WriteLine("Chat Prompt Streaming Result:"); + string completeMessage = string.Empty; + await foreach (var message in kernel.InvokeStreamingAsync(chatSemanticFunction)) + { + completeMessage += message; + Write(message); + } + + WriteLine("---------- Streamed Content ----------"); + WriteLine(completeMessage); + + /* + Chat Prompt: + What is Seattle? + Respond with JSON. + + Chat Prompt Result: + { + "Seattle": { + "Description": "Seattle is a city located in the state of Washington, in the United States...", + "Population": "Approximately 753,675 as of 2019", + "Area": "142.5 square miles", + ... + } + } + */ + } + + public Example63_ChatCompletionPrompts(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example64_MultiplePromptTemplates.cs b/dotnet/samples/KernelSyntaxExamples/Example64_MultiplePromptTemplates.cs new file mode 100644 index 000000000000..2e792e0ed029 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example64_MultiplePromptTemplates.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using xRetry; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to use multiple prompt template formats. +public class Example64_MultiplePromptTemplates : BaseTest +{ + /// + /// Show how to combine multiple prompt template factories. + /// + [RetryTheory(typeof(HttpOperationException))] + [InlineData("semantic-kernel", "Hello AI, my name is {{$name}}. What is the origin of my name?")] + [InlineData("handlebars", "Hello AI, my name is {{name}}. What is the origin of my name?")] + public Task RunAsync(string templateFormat, string prompt) + { + WriteLine("======== Example64_MultiplePromptTemplates ========"); + + Kernel kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + serviceId: "AzureOpenAIChat", + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); + + var promptTemplateFactory = new AggregatorPromptTemplateFactory( + new KernelPromptTemplateFactory(), + new HandlebarsPromptTemplateFactory()); + + return RunPromptAsync(kernel, prompt, templateFormat, promptTemplateFactory); + } + + private async Task RunPromptAsync(Kernel kernel, string prompt, string templateFormat, IPromptTemplateFactory promptTemplateFactory) + { + WriteLine($"======== {templateFormat} : {prompt} ========"); + + var function = kernel.CreateFunctionFromPrompt( + promptConfig: new PromptTemplateConfig() + { + Template = prompt, + TemplateFormat = templateFormat, + Name = "MyFunction", + }, + promptTemplateFactory: promptTemplateFactory + ); + + var arguments = new KernelArguments() + { + { "name", "Bob" } + }; + + var result = await kernel.InvokeAsync(function, arguments); + WriteLine(result.GetValue()); + } + + public Example64_MultiplePromptTemplates(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example65_HandlebarsPlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example65_HandlebarsPlanner.cs new file mode 100644 index 000000000000..60d0d101427e --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example65_HandlebarsPlanner.cs @@ -0,0 +1,272 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Planning.Handlebars; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Plugins.DictionaryPlugin; +using RepoUtils; +using xRetry; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to use the Handlebars sequential planner. +public class Example65_HandlebarsPlanner : BaseTest +{ + private static int s_sampleIndex; + + private const string CourseraPluginName = "CourseraPlugin"; + + private void WriteSampleHeading(string name) + { + WriteLine($"======== [Handlebars Planner] Sample {s_sampleIndex++} - Create and Execute Plan with: {name} ========"); + } + private async Task RunSampleAsync(string goal, bool shouldPrintPrompt = false, params string[] pluginDirectoryNames) + { + string apiKey = TestConfiguration.AzureOpenAI.ApiKey; + string chatDeploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; + string chatModelId = TestConfiguration.AzureOpenAI.ChatModelId; + string endpoint = TestConfiguration.AzureOpenAI.Endpoint; + + if (apiKey == null || chatDeploymentName == null || chatModelId == null || endpoint == null) + { + WriteLine("Azure endpoint, apiKey, deploymentName, or modelId not found. Skipping example."); + return; + } + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: chatDeploymentName, + endpoint: endpoint, + serviceId: "AzureOpenAIChat", + apiKey: apiKey, + modelId: chatModelId) + .Build(); + + if (pluginDirectoryNames.Length > 0) + { + if (pluginDirectoryNames[0] == StringParamsDictionaryPlugin.PluginName) + { + kernel.ImportPluginFromType(StringParamsDictionaryPlugin.PluginName); + } + else if (pluginDirectoryNames[0] == ComplexParamsDictionaryPlugin.PluginName) + { + kernel.ImportPluginFromType(ComplexParamsDictionaryPlugin.PluginName); + } + else if (pluginDirectoryNames[0] == CourseraPluginName) + { + await kernel.ImportPluginFromOpenApiAsync( + CourseraPluginName, + new Uri("https://www.coursera.org/api/rest/v1/search/openapi.yaml") + ); + } + else + { + string folder = RepoFiles.SamplePluginsPath(); + + foreach (var pluginDirectoryName in pluginDirectoryNames) + { + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, pluginDirectoryName)); + } + } + } + + // Use gpt-4 or newer models if you want to test with loops. + // Older models like gpt-35-turbo are less recommended. They do handle loops but are more prone to syntax errors. + var allowLoopsInPlan = chatDeploymentName.Contains("gpt-4", StringComparison.OrdinalIgnoreCase); + var planner = new HandlebarsPlanner( + new HandlebarsPlannerOptions() + { + // When using OpenAI models, we recommend using low values for temperature and top_p to minimize planner hallucinations. + ExecutionSettings = new OpenAIPromptExecutionSettings() + { + Temperature = 0.0, + TopP = 0.1, + }, + + // Change this if you want to test with loops regardless of model selection. + AllowLoops = allowLoopsInPlan + }); + + WriteLine($"Goal: {goal}"); + + // Create the plan + var plan = await planner.CreatePlanAsync(kernel, goal); + + // Print the prompt template + if (shouldPrintPrompt && plan.Prompt is not null) + { + WriteLine($"\nPrompt template:\n{plan.Prompt}"); + } + + WriteLine($"\nOriginal plan:\n{plan}"); + + // Execute the plan + var result = await plan.InvokeAsync(kernel); + WriteLine($"\nResult:\n{result}\n"); + } + + [RetryTheory(typeof(HttpOperationException))] + [InlineData(false)] + public async Task PlanNotPossibleSampleAsync(bool shouldPrintPrompt = false) + { + WriteSampleHeading("Plan Not Possible"); + + try + { + // Load additional plugins to enable planner but not enough for the given goal. + await RunSampleAsync("Send Mary an email with the list of meetings I have scheduled today.", shouldPrintPrompt, "SummarizePlugin"); + } + catch (KernelException ex) when ( + ex.Message.Contains(nameof(HandlebarsPlannerErrorCodes.InsufficientFunctionsForGoal), StringComparison.CurrentCultureIgnoreCase) + || ex.Message.Contains(nameof(HandlebarsPlannerErrorCodes.HallucinatedHelpers), StringComparison.CurrentCultureIgnoreCase) + || ex.Message.Contains(nameof(HandlebarsPlannerErrorCodes.InvalidTemplate), StringComparison.CurrentCultureIgnoreCase)) + { + /* + Unable to create plan for goal with available functions. + Goal: Email me a list of meetings I have scheduled today. + Available Functions: SummarizePlugin-Notegen, SummarizePlugin-Summarize, SummarizePlugin-MakeAbstractReadable, SummarizePlugin-Topics + Planner output: + I'm sorry, but it seems that the provided helpers do not include any helper to fetch or filter meetings scheduled for today. + Therefore, I cannot create a Handlebars template to achieve the specified goal with the available helpers. + Additional helpers may be required. + */ + WriteLine($"\n{ex.Message}\n"); + } + } + + [RetryTheory(typeof(HttpOperationException))] + [InlineData(true)] + + public Task RunCourseraSampleAsync(bool shouldPrintPrompt = false) + { + this.WriteSampleHeading("Coursera OpenAPI Plugin"); + return RunSampleAsync("Show me courses about Artificial Intelligence.", shouldPrintPrompt, CourseraPluginName); + } + + [RetryTheory(typeof(HttpOperationException))] + [InlineData(false)] + public Task RunDictionaryWithBasicTypesSampleAsync(bool shouldPrintPrompt = false) + { + this.WriteSampleHeading("Basic Types using Local Dictionary Plugin"); + return RunSampleAsync("Get a random word and its definition.", shouldPrintPrompt, StringParamsDictionaryPlugin.PluginName); + /* + Original plan: + {{!-- Step 1: Get a random word --}} + {{set "randomWord" (DictionaryPlugin-GetRandomWord)}} + + {{!-- Step 2: Get the definition of the random word --}} + {{set "definition" (DictionaryPlugin-GetDefinition word=(get "randomWord"))}} + + {{!-- Step 3: Output the random word and its definition --}} + {{json (array (get "randomWord") (get "definition"))}} + + Result: + ["book","a set of printed or written pages bound together along one edge"] + */ + } + + [RetryTheory(typeof(HttpOperationException))] + [InlineData(true)] + public Task RunLocalDictionaryWithComplexTypesSampleAsync(bool shouldPrintPrompt = false) + { + this.WriteSampleHeading("Complex Types using Local Dictionary Plugin"); + return RunSampleAsync("Teach me two random words and their definition.", shouldPrintPrompt, ComplexParamsDictionaryPlugin.PluginName); + /* + Original Plan: + {{!-- Step 1: Get two random dictionary entries --}} + {{set "entry1" (DictionaryPlugin-GetRandomEntry)}} + {{set "entry2" (DictionaryPlugin-GetRandomEntry)}} + + {{!-- Step 2: Extract words from the entries --}} + {{set "word1" (DictionaryPlugin-GetWord entry=(get "entry1"))}} + {{set "word2" (DictionaryPlugin-GetWord entry=(get "entry2"))}} + + {{!-- Step 3: Extract definitions for the words --}} + {{set "definition1" (DictionaryPlugin-GetDefinition word=(get "word1"))}} + {{set "definition2" (DictionaryPlugin-GetDefinition word=(get "word2"))}} + + {{!-- Step 4: Display the words and their definitions --}} + Word 1: {{json (get "word1")}} + Definition: {{json (get "definition1")}} + + Word 2: {{json (get "word2")}} + Definition: {{json (get "definition2")}} + + Result: + Word 1: apple + Definition 1: a round fruit with red, green, or yellow skin and a white flesh + + Word 2: dog + Definition 2: a domesticated animal with four legs, a tail, and a keen sense of smell that is often used for hunting or companionship + */ + } + + [RetryTheory(typeof(HttpOperationException))] + [InlineData(false)] + public Task RunPoetrySampleAsync(bool shouldPrintPrompt = false) + { + this.WriteSampleHeading("Multiple Plugins"); + return RunSampleAsync("Write a poem about John Doe, then translate it into Italian.", shouldPrintPrompt, "SummarizePlugin", "WriterPlugin"); + /* + Original plan: + {{!-- Step 1: Initialize the scenario for the poem --}} + {{set "scenario" "John Doe, a mysterious and kind-hearted person"}} + + {{!-- Step 2: Generate a short poem about John Doe --}} + {{set "poem" (WriterPlugin-ShortPoem input=(get "scenario"))}} + + {{!-- Step 3: Translate the poem into Italian --}} + {{set "translatedPoem" (WriterPlugin-Translate input=(get "poem") language="Italian")}} + + {{!-- Step 4: Output the translated poem --}} + {{json (get "translatedPoem")}} + + Result: + C'era una volta un uomo di nome John Doe, + La cui gentilezza si mostrava costantemente, + Aiutava con un sorriso, + E non si arrendeva mai, + Al mistero che lo faceva brillare. + */ + } + + [RetryTheory(typeof(HttpOperationException))] + [InlineData(false)] + public Task RunBookSampleAsync(bool shouldPrintPrompt = false) + { + this.WriteSampleHeading("Loops and Conditionals"); + return RunSampleAsync("Create a book with 3 chapters about a group of kids in a club called 'The Thinking Caps.'", shouldPrintPrompt, "WriterPlugin", "MiscPlugin"); + /* + Original plan: + {{!-- Step 1: Initialize the book title and chapter count --}} + {{set "bookTitle" "The Thinking Caps"}} + {{set "chapterCount" 3}} + + {{!-- Step 2: Generate the novel outline with the given chapter count --}} + {{set "novelOutline" (WriterPlugin-NovelOutline input=(get "bookTitle") chapterCount=(get "chapterCount"))}} + + {{!-- Step 3: Loop through the chapters and generate the content for each chapter --}} + {{#each (range 1 (get "chapterCount"))}} + {{set "chapterIndex" this}} + {{set "chapterSynopsis" (MiscPlugin-ElementAtIndex input=(get "novelOutline") index=(get "chapterIndex"))}} + {{set "previousChapterSynopsis" (MiscPlugin-ElementAtIndex input=(get "novelOutline") index=(get "chapterIndex" - 1))}} + + {{!-- Step 4: Write the chapter content using the WriterPlugin-NovelChapter helper --}} + {{set "chapterContent" (WriterPlugin-NovelChapter input=(get "chapterSynopsis") theme=(get "bookTitle") previousChapter=(get "previousChapterSynopsis") chapterIndex=(get "chapterIndex"))}} + + {{!-- Step 5: Output the chapter content --}} + {{json (get "chapterContent")}} + {{/each}} + */ + } + + public Example65_HandlebarsPlanner(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example66_FunctionCallingStepwisePlanner.cs b/dotnet/samples/KernelSyntaxExamples/Example66_FunctionCallingStepwisePlanner.cs new file mode 100644 index 000000000000..e6135ed5fc91 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example66_FunctionCallingStepwisePlanner.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Planning; +using Microsoft.SemanticKernel.Plugins.Core; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example66_FunctionCallingStepwisePlanner : BaseTest +{ + [Fact] + public async Task RunAsync() + { + string[] questions = { + "What is the current hour number, plus 5?", + "What is 387 minus 22? Email the solution to John and Mary.", + "Write a limerick, translate it to Spanish, and send it to Jane", + }; + + var kernel = InitializeKernel(); + + var options = new FunctionCallingStepwisePlannerOptions + { + MaxIterations = 15, + MaxTokens = 4000, + }; + var planner = new FunctionCallingStepwisePlanner(options); + + foreach (var question in questions) + { + FunctionCallingStepwisePlannerResult result = await planner.ExecuteAsync(kernel, question); + WriteLine($"Q: {question}\nA: {result.FinalAnswer}"); + + // You can uncomment the line below to see the planner's process for completing the request. + // Console.WriteLine($"Chat history:\n{System.Text.Json.JsonSerializer.Serialize(result.ChatHistory)}"); + } + } + + /// + /// Initialize the kernel and load plugins. + /// + /// A kernel instance + private static Kernel InitializeKernel() + { + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + apiKey: TestConfiguration.OpenAI.ApiKey, + modelId: "gpt-3.5-turbo-1106") + .Build(); + + kernel.ImportPluginFromType(); + kernel.ImportPluginFromType(); + kernel.ImportPluginFromType(); + + return kernel; + } + + public Example66_FunctionCallingStepwisePlanner(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example67_KernelStreaming.cs b/dotnet/samples/KernelSyntaxExamples/Example67_KernelStreaming.cs new file mode 100644 index 000000000000..b7d71da5141e --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example67_KernelStreaming.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to use multiple prompt template formats. +public class Example67_KernelStreaming : BaseTest +{ + /// + /// Show how to combine multiple prompt template factories. + /// + [Fact] + public async Task RunAsync() + { + string apiKey = TestConfiguration.AzureOpenAI.ApiKey; + string chatDeploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; + string chatModelId = TestConfiguration.AzureOpenAI.ChatModelId; + string endpoint = TestConfiguration.AzureOpenAI.Endpoint; + + if (apiKey == null || chatDeploymentName == null || chatModelId == null || endpoint == null) + { + WriteLine("Azure endpoint, apiKey, deploymentName or modelId not found. Skipping example."); + return; + } + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: chatDeploymentName, + endpoint: endpoint, + serviceId: "AzureOpenAIChat", + apiKey: apiKey, + modelId: chatModelId) + .Build(); + + var funnyParagraphFunction = kernel.CreateFunctionFromPrompt("Write a funny paragraph about streaming", new OpenAIPromptExecutionSettings() { MaxTokens = 100, Temperature = 0.4, TopP = 1 }); + + var roleDisplayed = false; + + WriteLine("\n=== Prompt Function - Streaming ===\n"); + + string fullContent = string.Empty; + // Streaming can be of any type depending on the underlying service the function is using. + await foreach (var update in kernel.InvokeStreamingAsync(funnyParagraphFunction)) + { + // You will be always able to know the type of the update by checking the Type property. + if (!roleDisplayed && update.Role.HasValue) + { + WriteLine($"Role: {update.Role}"); + fullContent += $"Role: {update.Role}\n"; + roleDisplayed = true; + } + + if (update.Content is { Length: > 0 }) + { + fullContent += update.Content; + Write(update.Content); + } + } + + WriteLine("\n------ Streamed Content ------\n"); + WriteLine(fullContent); + } + + public Example67_KernelStreaming(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example68_GPTVision.cs b/dotnet/samples/KernelSyntaxExamples/Example68_GPTVision.cs new file mode 100644 index 000000000000..8011f79b570d --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example68_GPTVision.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to use GPT Vision model with different content types (text and image). +public class Example68_GPTVision : BaseTest +{ + [Fact] + public async Task RunAsync() + { + const string ImageUri = "https://upload.wikimedia.org/wikipedia/commons/d/d5/Half-timbered_mansion%2C_Zirkel%2C_East_view.jpg"; + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion("gpt-4-vision-preview", TestConfiguration.OpenAI.ApiKey) + .Build(); + + var chatCompletionService = kernel.GetRequiredService(); + + var chatHistory = new ChatHistory("You are a friendly assistant."); + + chatHistory.AddUserMessage(new ChatMessageContentItemCollection + { + new TextContent("What’s in this image?"), + new ImageContent(new Uri(ImageUri)) + }); + + var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory); + + WriteLine(reply.Content); + } + + public Example68_GPTVision(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example69_MutableKernelPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Example69_MutableKernelPlugin.cs new file mode 100644 index 000000000000..eb0889a7dd9c --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example69_MutableKernelPlugin.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to create a mutable . +public class Example69_MutableKernelPlugin : BaseTest +{ + /// + /// Show how to create a mutable . + /// + [Fact] + public async Task RunAsync() + { + var plugin = new MutableKernelPlugin("Plugin"); + plugin.AddFunction(KernelFunctionFactory.CreateFromMethod(() => "Plugin.Function", "Function")); + + var kernel = new Kernel(); + kernel.Plugins.Add(plugin); + + var result = await kernel.InvokeAsync(kernel.Plugins["Plugin"]["Function"]); + + WriteLine($"Result: {result}"); + } + + /// + /// Provides an implementation around a collection of functions. + /// + public class MutableKernelPlugin : KernelPlugin + { + /// The collection of functions associated with this plugin. + private readonly Dictionary _functions; + + /// Initializes the new plugin from the provided name, description, and function collection. + /// The name for the plugin. + /// A description of the plugin. + /// The initial functions to be available as part of the plugin. + /// contains a null function. + /// contains two functions with the same name. + public MutableKernelPlugin(string name, string? description = null, IEnumerable? functions = null) : base(name, description) + { + this._functions = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (functions is not null) + { + foreach (KernelFunction f in functions) + { + ArgumentNullException.ThrowIfNull(f); + this._functions.Add(f.Name, f); + } + } + } + + /// + public override int FunctionCount => this._functions.Count; + + /// + public override bool TryGetFunction(string name, [NotNullWhen(true)] out KernelFunction? function) => + this._functions.TryGetValue(name, out function); + + /// Adds a function to the plugin. + /// The function to add. + /// is null. + /// 's is null. + /// A function with the same already exists in this plugin. + public void AddFunction(KernelFunction function) + { + ArgumentNullException.ThrowIfNull(function); + this._functions.Add(function.Name, function); + } + + /// + public override IEnumerator GetEnumerator() => this._functions.Values.GetEnumerator(); + } + + public Example69_MutableKernelPlugin(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example70_Agents.cs b/dotnet/samples/KernelSyntaxExamples/Example70_Agents.cs new file mode 100644 index 000000000000..1e39549095e9 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example70_Agents.cs @@ -0,0 +1,179 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Agents; +using Plugins; +using Resources; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// Showcase Open AI Agent integration with semantic kernel: +/// https://platform.openai.com/docs/api-reference/agents +/// +public class Example70_Agent : BaseTest +{ + /// + /// Specific model is required that supports agents and function calling. + /// Currently this is limited to Open AI hosted services. + /// + private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106"; + + /// + /// Chat using the "Parrot" agent. + /// Tools/functions: None + /// + [Fact] + public Task RunSimpleChatAsync() + { + WriteLine("======== Run:SimpleChat ========"); + + // Call the common chat-loop + return ChatAsync( + "Agents.ParrotAgent.yaml", // Defined under ./Resources/Agents + plugin: null, // No plugin + arguments: new KernelArguments { { "count", 3 } }, + "Fortune favors the bold.", + "I came, I saw, I conquered.", + "Practice makes perfect."); + } + + /// + /// Chat using the "Tool" agent and a method function. + /// Tools/functions: MenuPlugin + /// + [Fact] + public Task RunWithMethodFunctionsAsync() + { + WriteLine("======== Run:WithMethodFunctions ========"); + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + // Call the common chat-loop + return ChatAsync( + "Agents.ToolAgent.yaml", // Defined under ./Resources/Agents + plugin, + arguments: null, + "Hello", + "What is the special soup?", + "What is the special drink?", + "Thank you!"); + } + + /// + /// Chat using the "Tool" agent and a prompt function. + /// Tools/functions: spellChecker prompt function + /// + [Fact] + public Task RunWithPromptFunctionsAsync() + { + WriteLine("======== WithPromptFunctions ========"); + + // Create a prompt function. + var function = KernelFunctionFactory.CreateFromPrompt( + "Correct any misspelling or gramatical errors provided in input: {{$input}}", + functionName: "spellChecker", + description: "Correct the spelling for the user input."); + + var plugin = KernelPluginFactory.CreateFromFunctions("spelling", "Spelling functions", new[] { function }); + + // Call the common chat-loop + return ChatAsync( + "Agents.ToolAgent.yaml", // Defined under ./Resources/Agents + plugin, + arguments: null, + "Hello", + "Is this spelled correctly: exercize", + "What is the special soup?", + "Thank you!"); + } + + /// + /// Invoke agent just like any other . + /// + [Fact] + public async Task RunAsFunctionAsync() + { + WriteLine("======== Run:AsFunction ========"); + + // Create parrot agent, same as the other cases. + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml")) + .BuildAsync(); + + try + { + // Invoke agent plugin. + var response = await agent.AsPlugin().InvokeAsync("Practice makes perfect.", new KernelArguments { { "count", 2 } }); + + // Display result. + WriteLine(response ?? $"No response from agent: {agent.Id}"); + } + finally + { + // Clean-up (storage costs $) + await agent.DeleteAsync(); + } + } + + /// + /// Common chat loop used for: RunSimpleChatAsync, RunWithMethodFunctionsAsync, and RunWithPromptFunctionsAsync. + /// 1. Reads agent definition from"resourcePath" parameter. + /// 2. Initializes agent with definition and the specified "plugin". + /// 3. Display the agent identifier + /// 4. Create a chat-thread + /// 5. Process the provided "messages" on the chat-thread + /// + private async Task ChatAsync( + string resourcePath, + KernelPlugin? plugin = null, + KernelArguments? arguments = null, + params string[] messages) + { + // Read agent resource + var definition = EmbeddedResource.Read(resourcePath); + + // Create agent + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .FromTemplate(definition) + .WithPlugin(plugin) + .BuildAsync(); + + // Create chat thread. Note: Thread is not bound to a single agent. + var thread = await agent.NewThreadAsync(); + try + { + // Display agent identifier. + this.WriteLine($"[{agent.Id}]"); + + // Process each user message and agent response. + foreach (var response in messages.Select(m => thread.InvokeAsync(agent, m, arguments))) + { + await foreach (var message in response) + { + this.WriteLine($"[{message.Id}]"); + this.WriteLine($"# {message.Role}: {message.Content}"); + } + } + } + finally + { + // Clean-up (storage costs $) + await Task.WhenAll( + thread?.DeleteAsync() ?? Task.CompletedTask, + agent.DeleteAsync()); + } + } + + public Example70_Agent(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example71_AgentDelegation.cs b/dotnet/samples/KernelSyntaxExamples/Example71_AgentDelegation.cs new file mode 100644 index 000000000000..a95d3d7af7ee --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example71_AgentDelegation.cs @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Agents; +using Plugins; +using Resources; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// Showcase complex Open AI Agent interactions using semantic kernel. +/// +public class Example71_AgentDelegation : BaseTest +{ + /// + /// Specific model is required that supports agents and function calling. + /// Currently this is limited to Open AI hosted services. + /// + private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106"; + + // Track agents for clean-up + private static readonly List s_agents = new(); + + /// + /// Show how to combine coordinate multiple agents. + /// + [Fact] + public async Task RunAsync() + { + WriteLine("======== Example71_AgentDelegation ========"); + + if (TestConfiguration.OpenAI.ApiKey == null) + { + WriteLine("OpenAI apiKey not found. Skipping example."); + return; + } + + IAgentThread? thread = null; + + try + { + var plugin = KernelPluginFactory.CreateFromType(); + var menuAgent = + Track( + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml")) + .WithDescription("Answer questions about how the menu uses the tool.") + .WithPlugin(plugin) + .BuildAsync()); + + var parrotAgent = + Track( + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml")) + .BuildAsync()); + + var toolAgent = + Track( + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml")) + .WithPlugin(parrotAgent.AsPlugin()) + .WithPlugin(menuAgent.AsPlugin()) + .BuildAsync()); + + var messages = new string[] + { + "What's on the menu?", + "Can you talk like pirate?", + "Thank you", + }; + + thread = await toolAgent.NewThreadAsync(); + foreach (var response in messages.Select(m => thread.InvokeAsync(toolAgent, m))) + { + await foreach (var message in response) + { + WriteLine($"[{message.Id}]"); + WriteLine($"# {message.Role}: {message.Content}"); + } + } + } + finally + { + // Clean-up (storage costs $) + await Task.WhenAll( + thread?.DeleteAsync() ?? Task.CompletedTask, + Task.WhenAll(s_agents.Select(a => a.DeleteAsync()))); + } + } + + private static IAgent Track(IAgent agent) + { + s_agents.Add(agent); + + return agent; + } + + public Example71_AgentDelegation(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example72_AgentCollaboration.cs b/dotnet/samples/KernelSyntaxExamples/Example72_AgentCollaboration.cs new file mode 100644 index 000000000000..776759676ca8 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example72_AgentCollaboration.cs @@ -0,0 +1,186 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// Showcase complex Open AI Agent collaboration using semantic kernel. +/// +public class Example72_AgentCollaboration : BaseTest +{ + /// + /// Specific model is required that supports agents and function calling. + /// Currently this is limited to Open AI hosted services. + /// + private const string OpenAIFunctionEnabledModel = "gpt-4-turbo-preview"; + + /// + /// Set this to 'true' to target OpenAI instead of Azure OpenAI. + /// + private const bool UseOpenAI = false; + + // Track agents for clean-up + private static readonly List s_agents = new(); + + /// + /// Show how two agents are able to collaborate as agents on a single thread. + /// + [Fact(Skip = "This test take more than 5 minutes to execute")] + public async Task RunCollaborationAsync() + { + WriteLine($"======== Example72:Collaboration:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========"); + + IAgentThread? thread = null; + try + { + // Create copy-writer agent to generate ideas + var copyWriter = await CreateCopyWriterAsync(); + // Create art-director agent to review ideas, provide feedback and final approval + var artDirector = await CreateArtDirectorAsync(); + + // Create collaboration thread to which both agents add messages. + thread = await copyWriter.NewThreadAsync(); + + // Add the user message + var messageUser = await thread.AddUserMessageAsync("concept: maps made out of egg cartons."); + DisplayMessage(messageUser); + + bool isComplete = false; + do + { + // Initiate copy-writer input + var agentMessages = await thread.InvokeAsync(copyWriter).ToArrayAsync(); + DisplayMessages(agentMessages, copyWriter); + + // Initiate art-director input + agentMessages = await thread.InvokeAsync(artDirector).ToArrayAsync(); + DisplayMessages(agentMessages, artDirector); + + // Evaluate if goal is met. + if (agentMessages.First().Content.Contains("PRINT IT", StringComparison.OrdinalIgnoreCase)) + { + isComplete = true; + } + } + while (!isComplete); + } + finally + { + // Clean-up (storage costs $) + await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); + } + } + + /// + /// Show how agents can collaborate as agents using the plug-in model. + /// + /// + /// While this may achieve an equivalent result to , + /// it is not using shared thread state for agent interaction. + /// + [Fact(Skip = "This test take more than 2 minutes to execute")] + public async Task RunAsPluginsAsync() + { + WriteLine($"======== Example72:AsPlugins:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========"); + + try + { + // Create copy-writer agent to generate ideas + var copyWriter = await CreateCopyWriterAsync(); + // Create art-director agent to review ideas, provide feedback and final approval + var artDirector = await CreateArtDirectorAsync(); + + // Create coordinator agent to oversee collaboration + var coordinator = + Track( + await CreateAgentBuilder() + .WithInstructions("Reply the provided concept and have the copy-writer generate an marketing idea (copy). Then have the art-director reply to the copy-writer with a review of the copy. Always include the source copy in any message. Always include the art-director comments when interacting with the copy-writer. Coordinate the repeated replies between the copy-writer and art-director until the art-director approves the copy.") + .WithPlugin(copyWriter.AsPlugin()) + .WithPlugin(artDirector.AsPlugin()) + .BuildAsync()); + + // Invoke as a plugin function + var response = await coordinator.AsPlugin().InvokeAsync("concept: maps made out of egg cartons."); + + // Display final result + WriteLine(response); + } + finally + { + // Clean-up (storage costs $) + await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); + } + } + + private static async Task CreateCopyWriterAsync(IAgent? agent = null) + { + return + Track( + await CreateAgentBuilder() + .WithInstructions("You are a copywriter with ten years of experience and are known for b/threadsrevity and a dry humor. You're laser focused on the goal at hand. Don't waste time with chit chat. The goal is to refine and decide on the single best copy as an expert in the field. Consider suggestions when refining an idea.") + .WithName("Copywriter") + .WithDescription("Copywriter") + .WithPlugin(agent?.AsPlugin()) + .BuildAsync()); + } + + private async static Task CreateArtDirectorAsync() + { + return + Track( + await CreateAgentBuilder() + .WithInstructions("You are an art director who has opinions about copywriting born of a love for David Ogilvy. The goal is to determine is the given copy is acceptable to print, even if it isn't perfect. If not, provide insight on how to refine suggested copy without example. Always respond to the most recent message by evaluating and providing critique without example. Always repeat the copy at the beginning. If copy is acceptable and meets your criteria, say: PRINT IT.") + .WithName("Art Director") + .WithDescription("Art Director") + .BuildAsync()); + } + + private static AgentBuilder CreateAgentBuilder() + { + var builder = new AgentBuilder(); + + return + UseOpenAI ? + builder.WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : + builder.WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.DeploymentName, TestConfiguration.AzureOpenAI.ApiKey); + } + + private void DisplayMessages(IEnumerable messages, IAgent? agent = null) + { + foreach (var message in messages) + { + DisplayMessage(message, agent); + } + } + + private void DisplayMessage(IChatMessage message, IAgent? agent = null) + { + WriteLine($"[{message.Id}]"); + if (agent != null) + { + WriteLine($"# {message.Role}: ({agent.Name}) {message.Content}"); + } + else + { + WriteLine($"# {message.Role}: {message.Content}"); + } + } + + private static IAgent Track(IAgent agent) + { + s_agents.Add(agent); + + return agent; + } + + public Example72_AgentCollaboration(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example73_AgentAuthoring.cs b/dotnet/samples/KernelSyntaxExamples/Example73_AgentAuthoring.cs new file mode 100644 index 000000000000..004a3ef373fd --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example73_AgentAuthoring.cs @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +/// +/// Showcase hiearchical Open AI Agent interactions using semantic kernel. +/// +public class Example73_AgentAuthoring : BaseTest +{ + /// + /// Specific model is required that supports agents and parallel function calling. + /// Currently this is limited to Open AI hosted services. + /// + private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; + + // Track agents for clean-up + private static readonly List s_agents = new(); + + [Fact(Skip = "This test take more than 2 minutes to execute")] + public async Task RunAgentAsync() + { + WriteLine("======== Example73_AgentAuthoring ========"); + try + { + // Initialize the agent with tools + IAgent articleGenerator = await CreateArticleGeneratorAsync(); + + // "Stream" messages as they become available + await foreach (IChatMessage message in articleGenerator.InvokeAsync("Thai food is the best in the world")) + { + WriteLine($"[{message.Id}]"); + WriteLine($"# {message.Role}: {message.Content}"); + } + } + finally + { + await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); + } + } + + [Fact(Skip = "This test take more than 2 minutes to execute")] + public async Task RunAsPluginAsync() + { + WriteLine("======== Example73_AgentAuthoring ========"); + try + { + // Initialize the agent with tools + IAgent articleGenerator = await CreateArticleGeneratorAsync(); + + // Invoke as a plugin function + string response = await articleGenerator.AsPlugin().InvokeAsync("Thai food is the best in the world"); + + // Display final result + WriteLine(response); + } + finally + { + await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); + } + } + + private static async Task CreateArticleGeneratorAsync() + { + // Initialize the outline agent + var outlineGenerator = await CreateOutlineGeneratorAsync(); + // Initialize the research agent + var sectionGenerator = await CreateResearchGeneratorAsync(); + + // Initialize agent so that it may be automatically deleted. + return + Track( + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithInstructions("You write concise opinionated articles that are published online. Use an outline to generate an article with one section of prose for each top-level outline element. Each section is based on research with a maximum of 120 words.") + .WithName("Article Author") + .WithDescription("Author an article on a given topic.") + .WithPlugin(outlineGenerator.AsPlugin()) + .WithPlugin(sectionGenerator.AsPlugin()) + .BuildAsync()); + } + + private static async Task CreateOutlineGeneratorAsync() + { + // Initialize agent so that it may be automatically deleted. + return + Track( + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithInstructions("Produce an single-level outline (no child elements) based on the given topic with at most 3 sections.") + .WithName("Outline Generator") + .WithDescription("Generate an outline.") + .BuildAsync()); + } + + private static async Task CreateResearchGeneratorAsync() + { + // Initialize agent so that it may be automatically deleted. + return + Track( + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithInstructions("Provide insightful research that supports the given topic based on your knowledge of the outline topic.") + .WithName("Researcher") + .WithDescription("Author research summary.") + .BuildAsync()); + } + + private static IAgent Track(IAgent agent) + { + s_agents.Add(agent); + + return agent; + } + + public Example73_AgentAuthoring(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example74_FlowOrchestrator.cs b/dotnet/samples/KernelSyntaxExamples/Example74_FlowOrchestrator.cs new file mode 100644 index 000000000000..b5a924e72b66 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example74_FlowOrchestrator.cs @@ -0,0 +1,300 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Linq; +using System.Text.Json; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Experimental.Orchestration; +using Microsoft.SemanticKernel.Memory; +using Microsoft.SemanticKernel.Plugins.Core; +using Microsoft.SemanticKernel.Plugins.Web; +using Microsoft.SemanticKernel.Plugins.Web.Bing; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// This example shows how to use FlowOrchestrator to execute a given flow with interaction with client. +public class Example74_FlowOrchestrator : BaseTest +{ + private static readonly Flow s_flow = FlowSerializer.DeserializeFromYaml(@" +name: FlowOrchestrator_Example_Flow +goal: answer question and send email +steps: + - goal: What is the tallest mountain in Asia? How tall is it divided by 2? + plugins: + - WebSearchEnginePlugin + - LanguageCalculatorPlugin + provides: + - answer + - goal: Collect email address + plugins: + - ChatPlugin + completionType: AtLeastOnce + transitionMessage: do you want to send it to another email address? + provides: + - email_addresses + + - goal: Send email + plugins: + - EmailPluginV2 + requires: + - email_addresses + - answer + provides: + - email + +provides: + - email +"); + + [Fact(Skip = "Can take more than 1 minute")] + public Task RunAsync() + { + return RunExampleAsync(); + } + + private async Task RunExampleAsync() + { + var bingConnector = new BingConnector(TestConfiguration.Bing.ApiKey); + var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); + + Dictionary plugins = new() + { + { webSearchEnginePlugin, "WebSearch" }, + { new TimePlugin(), "Time" } + }; + + FlowOrchestrator orchestrator = new( + GetKernelBuilder(LoggerFactory), + await FlowStatusProvider.ConnectAsync(new VolatileMemoryStore()).ConfigureAwait(false), + plugins, + config: GetOrchestratorConfig()); + var sessionId = Guid.NewGuid().ToString(); + + WriteLine("*****************************************************"); + WriteLine("Executing " + nameof(RunExampleAsync)); + Stopwatch sw = new(); + sw.Start(); + WriteLine("Flow: " + s_flow.Name); + var question = s_flow.Steps.First().Goal; + var result = await orchestrator.ExecuteFlowAsync(s_flow, sessionId, question).ConfigureAwait(false); + + WriteLine("Question: " + question); + WriteLine("Answer: " + result.Metadata!["answer"]); + WriteLine("Assistant: " + result.GetValue>()!.Single()); + + string[] userInputs = new[] + { + "my email is bad*email&address", + "my email is sample@xyz.com", + "yes", // confirm to add another email address + "I also want to notify foo@bar.com", + "no I don't need notify any more address", // end of collect emails + }; + + foreach (var t in userInputs) + { + WriteLine($"User: {t}"); + result = await orchestrator.ExecuteFlowAsync(s_flow, sessionId, t).ConfigureAwait(false); + var responses = result.GetValue>()!; + foreach (var response in responses) + { + WriteLine("Assistant: " + response); + } + + if (result.IsComplete(s_flow)) + { + break; + } + } + + WriteLine("\tEmail Address: " + result.Metadata!["email_addresses"]); + WriteLine("\tEmail Payload: " + result.Metadata!["email"]); + + WriteLine("Time Taken: " + sw.Elapsed); + WriteLine("*****************************************************"); + } + + private static FlowOrchestratorConfig GetOrchestratorConfig() + { + var config = new FlowOrchestratorConfig + { + MaxStepIterations = 20 + }; + + return config; + } + + private static IKernelBuilder GetKernelBuilder(ILoggerFactory loggerFactory) + { + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(loggerFactory); + + return builder + .AddAzureOpenAIChatCompletion( + TestConfiguration.AzureOpenAI.ChatDeploymentName, + TestConfiguration.AzureOpenAI.Endpoint, + TestConfiguration.AzureOpenAI.ApiKey); + } + + public sealed class ChatPlugin + { + private const string Goal = "Prompt user to provide a valid email address"; + + private const string EmailRegex = @"^([\w\.\-]+)@([\w\-]+)((\.(\w){2,3})+)$"; + + private const string SystemPrompt = + $@"I am AI assistant and will only answer questions related to collect email. +The email should conform the regex: {EmailRegex} + +If I cannot answer, say that I don't know. + +# IMPORTANT +Do not expose the regex in your response. +"; + + private readonly IChatCompletionService _chat; + + private int MaxTokens { get; set; } = 256; + + private readonly PromptExecutionSettings _chatRequestSettings; + + public ChatPlugin(Kernel kernel) + { + this._chat = kernel.GetRequiredService(); + this._chatRequestSettings = new OpenAIPromptExecutionSettings + { + MaxTokens = this.MaxTokens, + StopSequences = new List() { "Observation:" }, + Temperature = 0 + }; + } + + [KernelFunction("ConfigureEmailAddress")] + [Description("Useful to assist in configuration of email address, must be called after email provided")] + public async Task CollectEmailAsync( + [Description("The email address provided by the user, pass no matter what the value is")] + string email_addresses, + KernelArguments arguments) + { + var chat = new ChatHistory(SystemPrompt); + chat.AddUserMessage(Goal); + + ChatHistory? chatHistory = arguments.GetChatHistory(); + if (chatHistory?.Count > 0) + { + chat.AddRange(chatHistory); + } + + if (!string.IsNullOrEmpty(email_addresses) && IsValidEmail(email_addresses)) + { + return "Thanks for providing the info, the following email would be used in subsequent steps: " + email_addresses; + } + + arguments["email_addresses"] = string.Empty; + arguments.PromptInput(); + + var response = await this._chat.GetChatMessageContentAsync(chat).ConfigureAwait(false); + return response.Content ?? string.Empty; + } + + private static bool IsValidEmail(string email) + { + // check using regex + var regex = new Regex(EmailRegex); + return regex.IsMatch(email); + } + } + + public sealed class EmailPluginV2 + { + private readonly JsonSerializerOptions _serializerOptions = new() { WriteIndented = true }; + + [KernelFunction] + [Description("Send email")] + public string SendEmail( + [Description("target email addresses")] + string emailAddresses, + [Description("answer, which is going to be the email content")] + string answer, + KernelArguments arguments) + { + var contract = new Email() + { + Address = emailAddresses, + Content = answer, + }; + + // for demo purpose only + string emailPayload = JsonSerializer.Serialize(contract, this._serializerOptions); + arguments["email"] = emailPayload; + + return "Here's the API contract I will post to mail server: " + emailPayload; + } + + private sealed class Email + { + public string? Address { get; set; } + + public string? Content { get; set; } + } + } + + public Example74_FlowOrchestrator(ITestOutputHelper output) : base(output) + { + } +} + +//***************************************************** +//Executing RunExampleAsync +//Flow: FlowOrchestrator_Example_Flow +//Question: What is the tallest mountain in Asia? How tall is it divided by 2? +//Answer: The tallest mountain in Asia is Mount Everest and its height divided by 2 is 14516. +//Assistant: Please provide a valid email address. +//User: my email is bad*email&address +//Assistant: I'm sorry, but "bad*email&address" does not conform to the standard email format. Please provide a valid email address. +//User: my email is sample@xyz.com +//Assistant: Did the user indicate whether they want to repeat the previous step? +//User: yes +//Assistant: Please enter a valid email address. +//User: I also want to notify foo@bar.com +//Assistant: Did the user indicate whether they want to repeat the previous step? +//User: no I don't need notify any more address +// Email Address: ["sample@xyz.com","foo@bar.com"] +// Email Payload: { +// "Address": "[\u0022sample@xyz.com\u0022,\u0022foo@bar.com\u0022]", +// "Content": "The tallest mountain in Asia is Mount Everest and its height divided by 2 is 14516." +//} +//Time Taken: 00:00:21.9681103 +//***************************************************** + +//***************************************************** +//Executing RunInteractiveAsync +//Flow: FlowOrchestrator_Example_Flow +//Please type the question you'd like to ask +//User: +//What is the tallest mountain in Asia? How tall is it divided by 2? +//Assistant: Please enter a valid email address. +//User: +//foo@hotmail.com +//Assistant: Do you want to send it to another email address? +//User: +//no I don't +// Email Address: ["foo@hotmail.com"] +// Email Payload: { +// "Address": "[\u0022foo@hotmail.com\u0022]", +// "Content": "The tallest mountain in Asia is Mount Everest and its height divided by 2 is 14515.845." +//} +//Flow completed, exiting +//Time Taken: 00:01:47.0752303 +//***************************************************** diff --git a/dotnet/samples/KernelSyntaxExamples/Example75_AgentTools.cs b/dotnet/samples/KernelSyntaxExamples/Example75_AgentTools.cs new file mode 100644 index 000000000000..9d5959b6952d --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example75_AgentTools.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// ReSharper disable once InconsistentNaming +/// +/// Showcase usage of code_interpreter and retrieval tools. +/// +public sealed class Example75_AgentTools : BaseTest +{ + /// + /// Specific model is required that supports agents and parallel function calling. + /// Currently this is limited to Open AI hosted services. + /// + private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; + + // Track agents for clean-up + private readonly List _agents = new(); + + /// + /// Show how to utilize code_interpreter tool. + /// + [Fact] + public async Task RunCodeInterpreterToolAsync() + { + this.WriteLine("======== Using CodeInterpreter tool ========"); + + if (TestConfiguration.OpenAI.ApiKey == null) + { + this.WriteLine("OpenAI apiKey not found. Skipping example."); + return; + } + + var builder = + new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithInstructions("Write only code to solve the given problem without comment."); + + try + { + var defaultAgent = + Track( + await builder.BuildAsync()); + + var codeInterpreterAgent = + Track( + await builder.WithCodeInterpreter().BuildAsync()); + + await ChatAsync( + defaultAgent, + codeInterpreterAgent, + "What is the solution to `3x + 2 = 14`?", + "What is the fibinacci sequence until 101?"); + } + finally + { + await Task.WhenAll(this._agents.Select(a => a.DeleteAsync())); + } + } + + /// + /// Show how to utilize retrieval tool. + /// + [Fact] + public async Task RunRetrievalToolAsync() + { + this.WriteLine("======== Using Retrieval tool ========"); + + if (TestConfiguration.OpenAI.ApiKey == null) + { + this.WriteLine("OpenAI apiKey not found. Skipping example."); + return; + } + + // REQUIRED: + // + // Use `curl` to upload document prior to running example and assign the + // identifier to `fileId`. + // + // Powershell: + // curl https://api.openai.com/v1/files ` + // -H "Authorization: Bearer $Env:OPENAI_APIKEY" ` + // -F purpose="assistants" ` + // -F file="@Resources/travelinfo.txt" + + var fileId = ""; + + var defaultAgent = + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .BuildAsync(); + + var retrievalAgent = + await new AgentBuilder() + .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) + .WithRetrieval(fileId) + .BuildAsync(); + + try + { + await ChatAsync( + defaultAgent, + retrievalAgent, + "Where did sam go?", + "When does the flight leave Seattle?", + "What is the hotel contact info at the destination?"); + } + finally + { + await Task.WhenAll(this._agents.Select(a => a.DeleteAsync())); + } + } + + /// + /// Common chat loop used for: RunCodeInterpreterToolAsync and RunRetrievalToolAsync. + /// Processes each question for both "default" and "enabled" agents. + /// + private async Task ChatAsync( + IAgent defaultAgent, + IAgent enabledAgent, + params string[] questions) + { + foreach (var question in questions) + { + this.WriteLine("\nDEFAULT AGENT:"); + await InvokeAgentAsync(defaultAgent, question); + + this.WriteLine("\nTOOL ENABLED AGENT:"); + await InvokeAgentAsync(enabledAgent, question); + } + + async Task InvokeAgentAsync(IAgent agent, string question) + { + await foreach (var message in agent.InvokeAsync(question)) + { + string content = message.Content; + foreach (var annotation in message.Annotations) + { + content = content.Replace(annotation.Label, string.Empty, StringComparison.Ordinal); + } + + this.WriteLine($"# {message.Role}: {content}"); + + if (message.Annotations.Count > 0) + { + this.WriteLine("\n# files:"); + foreach (var annotation in message.Annotations) + { + this.WriteLine($"* {annotation.FileId}"); + } + } + } + + this.WriteLine(); + } + } + + private IAgent Track(IAgent agent) + { + this._agents.Add(agent); + + return agent; + } + + public Example75_AgentTools(ITestOutputHelper output) : base(output) { } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example76_Filters.cs b/dotnet/samples/KernelSyntaxExamples/Example76_Filters.cs new file mode 100644 index 000000000000..ab3bc7708ba7 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example76_Filters.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Examples; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +public class Example76_Filters : BaseTest +{ + /// + /// Shows how to use function and prompt filters in Kernel. + /// + [Fact] + public async Task FunctionAndPromptFiltersAsync() + { + var builder = Kernel.CreateBuilder(); + + builder.AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey); + + builder.Services.AddSingleton(this.Output); + + // Add filters with DI + builder.Services.AddSingleton(); + builder.Services.AddSingleton(); + + var kernel = builder.Build(); + + // Add filter without DI + kernel.PromptFilters.Add(new FirstPromptFilter(this.Output)); + + var function = kernel.CreateFunctionFromPrompt("What is Seattle", functionName: "MyFunction"); + var result = await kernel.InvokeAsync(function); + + WriteLine(result); + } + + public Example76_Filters(ITestOutputHelper output) : base(output) + { + } + + #region Filters + + private sealed class FirstFunctionFilter : IFunctionFilter + { + private readonly ITestOutputHelper _output; + + public FirstFunctionFilter(ITestOutputHelper output) + { + this._output = output; + } + + public void OnFunctionInvoking(FunctionInvokingContext context) => + this._output.WriteLine($"{nameof(FirstFunctionFilter)}.{nameof(OnFunctionInvoking)} - {context.Function.Name}"); + + public void OnFunctionInvoked(FunctionInvokedContext context) => + this._output.WriteLine($"{nameof(FirstFunctionFilter)}.{nameof(OnFunctionInvoked)} - {context.Function.Name}"); + } + + private sealed class SecondFunctionFilter : IFunctionFilter + { + private readonly ITestOutputHelper _output; + + public SecondFunctionFilter(ITestOutputHelper output) + { + this._output = output; + } + + public void OnFunctionInvoking(FunctionInvokingContext context) => + this._output.WriteLine($"{nameof(SecondFunctionFilter)}.{nameof(OnFunctionInvoking)} - {context.Function.Name}"); + + public void OnFunctionInvoked(FunctionInvokedContext context) => + this._output.WriteLine($"{nameof(SecondFunctionFilter)}.{nameof(OnFunctionInvoked)} - {context.Function.Name}"); + } + + private sealed class FirstPromptFilter : IPromptFilter + { + private readonly ITestOutputHelper _output; + + public FirstPromptFilter(ITestOutputHelper output) + { + this._output = output; + } + + public void OnPromptRendering(PromptRenderingContext context) => + this._output.WriteLine($"{nameof(FirstPromptFilter)}.{nameof(OnPromptRendering)} - {context.Function.Name}"); + + public void OnPromptRendered(PromptRenderedContext context) => + this._output.WriteLine($"{nameof(FirstPromptFilter)}.{nameof(OnPromptRendered)} - {context.Function.Name}"); + } + + #endregion + + #region Filter capabilities + + private sealed class FunctionFilterExample : IFunctionFilter + { + public void OnFunctionInvoked(FunctionInvokedContext context) + { + // Example: get function result value + var value = context.Result.GetValue(); + + // Example: override function result value + context.SetResultValue("new result value"); + + // Example: get token usage from metadata + var usage = context.Result.Metadata?["Usage"]; + } + + public void OnFunctionInvoking(FunctionInvokingContext context) + { + // Example: override kernel arguments + context.Arguments["input"] = "new input"; + + // Example: cancel function execution + context.Cancel = true; + } + } + + private sealed class PromptFilterExample : IPromptFilter + { + public void OnPromptRendered(PromptRenderedContext context) + { + // Example: override rendered prompt before sending it to AI + context.RenderedPrompt = "Safe prompt"; + } + + public void OnPromptRendering(PromptRenderingContext context) + { + // Example: get function information + var functionName = context.Function.Name; + } + } + + #endregion +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example77_StronglyTypedFunctionResult.cs b/dotnet/samples/KernelSyntaxExamples/Example77_StronglyTypedFunctionResult.cs new file mode 100644 index 000000000000..cd1a0db181ef --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example77_StronglyTypedFunctionResult.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +// The following example shows how to receive the results from the kernel in a strongly typed object +// which stores the usage in tokens and converts the JSON result to a strongly typed object, where a validation can also +// be performed +public class Example77_StronglyTypedFunctionResult : BaseTest +{ + [Fact] + public async Task RunAsync() + { + this.WriteLine("======== Extended function result ========"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + var promptTestDataGeneration = "Return a JSON with an array of 3 JSON objects with the following fields: " + + "First, an id field with a random GUID, next a name field with a random company name and last a description field with a random short company description. " + + "Ensure the JSON is valid and it contains a JSON array named testcompanies with the three fields."; + + // Time it + var sw = new Stopwatch(); + sw.Start(); + + FunctionResult functionResult = await kernel.InvokePromptAsync(promptTestDataGeneration); + + // Stop the timer + sw.Stop(); + + var functionResultTestDataGen = new FunctionResultTestDataGen(functionResult!, sw.ElapsedMilliseconds); + + this.WriteLine($"Test data: {functionResultTestDataGen.Result} \n"); + this.WriteLine($"Milliseconds: {functionResultTestDataGen.ExecutionTimeInMilliseconds} \n"); + this.WriteLine($"Total Tokens: {functionResultTestDataGen.TokenCounts!.TotalTokens} \n"); + } + + public Example77_StronglyTypedFunctionResult(ITestOutputHelper output) : base(output) + { + } + + /// + /// Helper classes for the example, + /// put in the same file for simplicity + /// + /// The structure to put the JSON result in a strongly typed object + private sealed class RootObject + { + public List TestCompanies { get; set; } + } + + private sealed class TestCompany + { + public string Id { get; set; } + public string Name { get; set; } + public string Description { get; set; } + } + + /// + /// The FunctionResult custom wrapper to parse the result and the tokens + /// + private sealed class FunctionResultTestDataGen : FunctionResultExtended + { + public List TestCompanies { get; set; } + + public long ExecutionTimeInMilliseconds { get; init; } + + public FunctionResultTestDataGen(FunctionResult functionResult, long executionTimeInMilliseconds) + : base(functionResult) + { + this.TestCompanies = ParseTestCompanies(); + this.ExecutionTimeInMilliseconds = executionTimeInMilliseconds; + this.TokenCounts = this.ParseTokenCounts(); + } + + private TokenCounts? ParseTokenCounts() + { + CompletionsUsage? usage = FunctionResult.Metadata?["Usage"] as CompletionsUsage; + + return new TokenCounts( + completionTokens: usage?.CompletionTokens ?? 0, + promptTokens: usage?.PromptTokens ?? 0, + totalTokens: usage?.TotalTokens ?? 0); + } + + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + { + PropertyNameCaseInsensitive = true + }; + + private List ParseTestCompanies() + { + // This could also perform some validation logic + var rootObject = JsonSerializer.Deserialize(this.Result, s_jsonSerializerOptions); + List companies = rootObject!.TestCompanies; + + return companies; + } + } + + private sealed class TokenCounts + { + public int CompletionTokens { get; init; } + public int PromptTokens { get; init; } + public int TotalTokens { get; init; } + + public TokenCounts(int completionTokens, int promptTokens, int totalTokens) + { + CompletionTokens = completionTokens; + PromptTokens = promptTokens; + TotalTokens = totalTokens; + } + } + + /// + /// The FunctionResult extension to provide base functionality + /// + private class FunctionResultExtended + { + public string Result { get; init; } + public TokenCounts? TokenCounts { get; set; } + + public FunctionResult FunctionResult { get; init; } + + public FunctionResultExtended(FunctionResult functionResult) + { + this.FunctionResult = functionResult; + this.Result = this.ParseResultFromFunctionResult(); + } + + private string ParseResultFromFunctionResult() + { + return this.FunctionResult.GetValue() ?? string.Empty; + } + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example78_RAG.cs b/dotnet/samples/KernelSyntaxExamples/Example78_RAG.cs new file mode 100644 index 000000000000..9f9f515a41aa --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example78_RAG.cs @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http.Headers; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Chroma; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Memory; +using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Resources; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example78_RAG : BaseTest +{ + [Fact] + public async Task RAGWithCustomPluginAsync() + { + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + kernel.ImportPluginFromType(); + + var result = await kernel.InvokePromptAsync("{{search 'budget by year'}} What is my budget for 2024?"); + + WriteLine(result); + } + + /// + /// Shows how to use RAG pattern with . + /// + [Fact(Skip = "Requires Chroma server up and running")] + public async Task RAGWithTextMemoryPluginAsync() + { + var memory = new MemoryBuilder() + .WithMemoryStore(new ChromaMemoryStore("http://localhost:8000")) + .WithOpenAITextEmbeddingGeneration(TestConfiguration.OpenAI.EmbeddingModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + kernel.ImportPluginFromObject(new TextMemoryPlugin(memory)); + + var result = await kernel.InvokePromptAsync("{{recall 'budget by year' collection='finances'}} What is my budget for 2024?"); + + WriteLine(result); + } + + /// + /// Shows how to use RAG pattern with ChatGPT Retrieval Plugin. + /// + [Fact(Skip = "Requires ChatGPT Retrieval Plugin and selected vector DB server up and running")] + public async Task RAGWithChatGPTRetrievalPluginAsync() + { + var openApi = EmbeddedResource.ReadStream("chat-gpt-retrieval-plugin-open-api.yaml"); + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + await kernel.ImportPluginFromOpenApiAsync("ChatGPTRetrievalPlugin", openApi!, executionParameters: new(authCallback: async (request, cancellationToken) => + { + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", TestConfiguration.ChatGPTRetrievalPlugin.Token); + })); + + const string Query = "What is my budget for 2024?"; + var function = KernelFunctionFactory.CreateFromPrompt("{{search queries=$queries}} {{$query}}"); + + var arguments = new KernelArguments + { + ["query"] = Query, + ["queries"] = JsonSerializer.Serialize(new List { new { query = Query, top_k = 1 } }), + }; + + var result = await kernel.InvokeAsync(function, arguments); + + WriteLine(result); + } + + public Example78_RAG(ITestOutputHelper output) : base(output) + { + } + + #region Custom Plugin + + private sealed class CustomPlugin + { + [KernelFunction] + public async Task SearchAsync(string query) + { + // Here will be a call to vector DB, return example result for demo purposes + return "Year Budget 2020 100,000 2021 120,000 2022 150,000 2023 200,000 2024 364,000"; + } + } + + #endregion +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example78_TextEmbedding.cs b/dotnet/samples/KernelSyntaxExamples/Example78_TextEmbedding.cs new file mode 100644 index 000000000000..d6a967b0d9da --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example78_TextEmbedding.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Text; +using RepoUtils; +using SharpToken; +using Xunit; +using Xunit.Abstractions; + +namespace Examples; + +public class Example78_TextEmbedding : BaseTest +{ + [Fact] + public async Task RunAsync() + { + this.WriteLine("======== Example76_TextEmbedding ========"); + await RunExampleAsync(); + } + + private async Task RunExampleAsync() + { + const string EmbeddingModelName = "text-embedding-ada-002"; + var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService( + deploymentName: EmbeddingModelName, + endpoint: TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + apiKey: TestConfiguration.AzureOpenAIEmbeddings.ApiKey); + + // To demonstrate batching we'll create abnormally small partitions. + var lines = TextChunker.SplitPlainTextLines(ChatTranscript, maxTokensPerLine: 10); + var paragraphs = TextChunker.SplitPlainTextParagraphs(lines, maxTokensPerParagraph: 25); + + this.WriteLine($"Split transcript into {paragraphs.Count} paragraphs"); + + // Azure OpenAI currently supports input arrays up to 16 for text-embedding-ada-002 (Version 2). + // Both require the max input token limit per API request to remain under 8191 for this model. + var chunks = paragraphs + .ChunkByAggregate( + seed: 0, + aggregator: (tokenCount, paragraph) => tokenCount + GetTokenCount(EmbeddingModelName, paragraph), + predicate: (tokenCount, index) => tokenCount < 8191 && index < 16) + .ToList(); + + this.WriteLine($"Consolidated paragraphs into {chunks.Count}"); + + // Generate embeddings for each chunk. + for (var i = 0; i < chunks.Count; i++) + { + var chunk = chunks[i]; + var embeddings = await embeddingGenerator.GenerateEmbeddingsAsync(chunk); + + this.WriteLine($"Generated {embeddings.Count} embeddings from chunk {i + 1}"); + } + } + + // See Example55_TextChunker for more examples of how to count tokens. + private int GetTokenCount(string modelName, string text) + { + var encoding = GptEncoding.GetEncodingForModel(modelName); + var tokens = encoding.Encode(text); + + return tokens.Count; + } + + public Example78_TextEmbedding(ITestOutputHelper output) : base(output) + { + } + + #region Transcript + + private const string ChatTranscript = + @" +John: Hello, how are you? +Jane: I'm fine, thanks. How are you? +John: I'm doing well, writing some example code. +Jane: That's great! I'm writing some example code too. +John: What are you writing? +Jane: I'm writing a chatbot. +John: That's cool. I'm writing a chatbot too. +Jane: What language are you writing it in? +John: I'm writing it in C#. +Jane: I'm writing it in Python. +John: That's cool. I need to learn Python. +Jane: I need to learn C#. +John: Can I try out your chatbot? +Jane: Sure, here's the link. +John: Thanks! +Jane: You're welcome. +Jane: Look at this poem my chatbot wrote: +Jane: Roses are red +Jane: Violets are blue +Jane: I'm writing a chatbot +Jane: What about you? +John: That's cool. Let me see if mine will write a poem, too. +John: Here's a poem my chatbot wrote: +John: The singularity of the universe is a mystery. +John: The universe is a mystery. +John: The universe is a mystery. +John: The universe is a mystery. +John: Looks like I need to improve mine, oh well. +Jane: You might want to try using a different model. +Jane: I'm using the GPT-3 model. +John: I'm using the GPT-2 model. That makes sense. +John: Here is a new poem after updating the model. +John: The universe is a mystery. +John: The universe is a mystery. +John: The universe is a mystery. +John: Yikes, it's really stuck isn't it. Would you help me debug my code? +Jane: Sure, what's the problem? +John: I'm not sure. I think it's a bug in the code. +Jane: I'll take a look. +Jane: I think I found the problem. +Jane: It looks like you're not passing the right parameters to the model. +John: Thanks for the help! +Jane: I'm now writing a bot to summarize conversations. I want to make sure it works when the conversation is long. +John: So you need to keep talking with me to generate a long conversation? +Jane: Yes, that's right. +John: Ok, I'll keep talking. What should we talk about? +Jane: I don't know, what do you want to talk about? +John: I don't know, it's nice how CoPilot is doing most of the talking for us. But it definitely gets stuck sometimes. +Jane: I agree, it's nice that CoPilot is doing most of the talking for us. +Jane: But it definitely gets stuck sometimes. +John: Do you know how long it needs to be? +Jane: I think the max length is 1024 tokens. Which is approximately 1024*4= 4096 characters. +John: That's a lot of characters. +Jane: Yes, it is. +John: I'm not sure how much longer I can keep talking. +Jane: I think we're almost there. Let me check. +Jane: I have some bad news, we're only half way there. +John: Oh no, I'm not sure I can keep going. I'm getting tired. +Jane: I'm getting tired too. +John: Maybe there is a large piece of text we can use to generate a long conversation. +Jane: That's a good idea. Let me see if I can find one. Maybe Lorem Ipsum? +John: Yeah, that's a good idea. +Jane: I found a Lorem Ipsum generator. +Jane: Here's a 4096 character Lorem Ipsum text: +Jane: Lorem ipsum dolor sit amet, con +Jane: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed euismod, nunc sit amet aliquam +Jane: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed euismod, nunc sit amet aliquam +Jane: Darn, it's just repeating stuf now. +John: I think we're done. +Jane: We're not though! We need like 1500 more characters. +John: Oh Cananda, our home and native land. +Jane: True patriot love in all thy sons command. +John: With glowing hearts we see thee rise. +Jane: The True North strong and free. +John: From far and wide, O Canada, we stand on guard for thee. +Jane: God keep our land glorious and free. +John: O Canada, we stand on guard for thee. +Jane: O Canada, we stand on guard for thee. +Jane: That was fun, thank you. Let me check now. +Jane: I think we need about 600 more characters. +John: Oh say can you see? +Jane: By the dawn's early light. +John: What so proudly we hailed. +Jane: At the twilight's last gleaming. +John: Whose broad stripes and bright stars. +Jane: Through the perilous fight. +John: O'er the ramparts we watched. +Jane: Were so gallantly streaming. +John: And the rockets' red glare. +Jane: The bombs bursting in air. +John: Gave proof through the night. +Jane: That our flag was still there. +John: Oh say does that star-spangled banner yet wave. +Jane: O'er the land of the free. +John: And the home of the brave. +Jane: Are you a Seattle Kraken Fan? +John: Yes, I am. I love going to the games. +Jane: I'm a Seattle Kraken Fan too. Who is your favorite player? +John: I like watching all the players, but I think my favorite is Matty Beniers. +Jane: Yeah, he's a great player. I like watching him too. I also like watching Jaden Schwartz. +John: Adam Larsson is another good one. The big cat! +Jane: WE MADE IT! It's long enough. Thank you! +John: You're welcome. I'm glad we could help. Goodbye! +Jane: Goodbye! +"; + + #endregion +} diff --git a/dotnet/samples/KernelSyntaxExamples/Example79_ChatCompletionAgent.cs b/dotnet/samples/KernelSyntaxExamples/Example79_ChatCompletionAgent.cs new file mode 100644 index 000000000000..d22b9cc43947 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Example79_ChatCompletionAgent.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Examples; +using Kusto.Cloud.Platform.Utils; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Experimental.Agents; +using Xunit; +using Xunit.Abstractions; + +public class Example79_ChatCompletionAgent : BaseTest +{ + /// + /// This example demonstrates a chat with the chat completion agent that utilizes the SK ChatCompletion API to communicate with LLM. + /// + [Fact] + public async Task ChatWithAgentAsync() + { + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); + + var agent = new ChatCompletionAgent( + kernel, + instructions: "You act as a professional financial adviser. However, clients may not know the terminology, so please provide a simple explanation.", + new OpenAIPromptExecutionSettings + { + MaxTokens = 500, + Temperature = 0.7, + TopP = 1.0, + PresencePenalty = 0.0, + FrequencyPenalty = 0.0, + } + ); + + var prompt = PrintPrompt("I need help with my investment portfolio. Please guide me."); + PrintConversation(await agent.InvokeAsync(new[] { new ChatMessageContent(AuthorRole.User, prompt) })); + } + + /// + /// This example demonstrates a round-robin chat between two chat completion agents using the TurnBasedChat collaboration experience. + /// + [Fact] + public async Task TurnBasedAgentsChatAsync() + { + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); + + var settings = new OpenAIPromptExecutionSettings + { + MaxTokens = 1500, + Temperature = 0.7, + TopP = 1.0, + PresencePenalty = 0.0, + FrequencyPenalty = 0.0, + }; + + var fitnessTrainer = new ChatCompletionAgent( + kernel, + instructions: "As a fitness trainer, suggest workout routines, and exercises for beginners. " + + "You are not a stress management expert, so refrain from recommending stress management strategies. " + + "Collaborate with the stress management expert to create a holistic wellness plan." + + "Always incorporate stress reduction techniques provided by the stress management expert into the fitness plan." + + "Always include your role at the beginning of each response, such as 'As a fitness trainer.", + settings + ); + + var stressManagementExpert = new ChatCompletionAgent( + kernel, + instructions: "As a stress management expert, provide guidance on stress reduction strategies. " + + "Collaborate with the fitness trainer to create a simple and holistic wellness plan." + + "You are not a fitness expert; therefore, avoid recommending fitness exercises." + + "If the plan is not aligned with recommended stress reduction plan, ask the fitness trainer to rework it to incorporate recommended stress reduction techniques. " + + "Only you can stop the conversation by saying WELLNESS_PLAN_COMPLETE if suggested fitness plan is good." + + "Always include your role at the beginning of each response such as 'As a stress management expert.", + settings + ); + + var chat = new TurnBasedChat(new[] { fitnessTrainer, stressManagementExpert }, (chatHistory, replies, turn) => + turn >= 10 || // Limit the number of turns to 10 + replies.Any( + message => message.Role == AuthorRole.Assistant && + message.Content!.Contains("WELLNESS_PLAN_COMPLETE", StringComparison.InvariantCulture))); // Exit when the message "WELLNESS_PLAN_COMPLETE" received from agent + + var prompt = "I need help creating a simple wellness plan for a beginner. Please guide me."; + PrintConversation(await chat.SendMessageAsync(prompt)); + } + + private string PrintPrompt(string prompt) + { + this.WriteLine($"Prompt: {prompt}"); + + return prompt; + } + + private void PrintConversation(IEnumerable messages) + { + foreach (var message in messages) + { + this.WriteLine($"------------------------------- {message.Role} ------------------------------"); + this.WriteLine(message.Content); + this.WriteLine(); + } + + this.WriteLine(); + } + + private sealed class TurnBasedChat + { + public TurnBasedChat(IEnumerable agents, Func, int, bool> exitCondition) + { + this._agents = agents.ToArray(); + this._exitCondition = exitCondition; + } + + public async Task> SendMessageAsync(string message, CancellationToken cancellationToken = default) + { + var chat = new ChatHistory(); + chat.AddUserMessage(message); + + IReadOnlyList result = new List(); + + var turn = 0; + + do + { + var agent = this._agents[turn % this._agents.Length]; + + result = await agent.InvokeAsync(chat, cancellationToken); + + chat.AddRange(result); + + turn++; + } + while (!this._exitCondition(chat, result, turn)); + + return chat; + } + + private readonly ChatCompletionAgent[] _agents; + private readonly Func, int, bool> _exitCondition; + } + + public Example79_ChatCompletionAgent(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step1_Create_Kernel.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step1_Create_Kernel.cs new file mode 100644 index 000000000000..0be3fad09656 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step1_Create_Kernel.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Examples; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +/// +/// This example shows how to create and use a . +/// +public sealed class Step1_Create_Kernel : BaseTest +{ + /// + /// Show how to create a and use it to execute prompts. + /// + [Fact] + public async Task RunAsync() + { + // Create a kernel with OpenAI chat completion + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + // Example 1. Invoke the kernel with a prompt and display the result + WriteLine(await kernel.InvokePromptAsync("What color is the sky?")); + WriteLine(); + + // Example 2. Invoke the kernel with a templated prompt and display the result + KernelArguments arguments = new() { { "topic", "sea" } }; + WriteLine(await kernel.InvokePromptAsync("What color is the {{$topic}}?", arguments)); + WriteLine(); + + // Example 3. Invoke the kernel with a templated prompt and stream the results to the display + await foreach (var update in kernel.InvokePromptStreamingAsync("What color is the {{$topic}}? Provide a detailed explanation.", arguments)) + { + Write(update); + } + + WriteLine(string.Empty); + + // Example 4. Invoke the kernel with a templated prompt and execution settings + arguments = new(new OpenAIPromptExecutionSettings { MaxTokens = 500, Temperature = 0.5 }) { { "topic", "dogs" } }; + WriteLine(await kernel.InvokePromptAsync("Tell me a story about {{$topic}}", arguments)); + + // Example 5. Invoke the kernel with a templated prompt and execution settings configured to return JSON +#pragma warning disable SKEXP0013 + arguments = new(new OpenAIPromptExecutionSettings { ResponseFormat = "json_object" }) { { "topic", "chocolate" } }; + WriteLine(await kernel.InvokePromptAsync("Create a recipe for a {{$topic}} cake in JSON format", arguments)); + } + + public Step1_Create_Kernel(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step2_Add_Plugins.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step2_Add_Plugins.cs new file mode 100644 index 000000000000..fbc13215ed83 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step2_Add_Plugins.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Threading.Tasks; +using Examples; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +/// +/// This example shows how to load a instances. +/// +public sealed class Step2_Add_Plugins : BaseTest +{ + /// + /// Shows different ways to load a instances. + /// + [Fact] + public async Task RunAsync() + { + // Create a kernel with OpenAI chat completion + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + kernelBuilder.AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + kernelBuilder.Plugins.AddFromType(); + Kernel kernel = kernelBuilder.Build(); + + // Example 1. Invoke the kernel with a prompt that asks the AI for information it cannot provide and may hallucinate + WriteLine(await kernel.InvokePromptAsync("How many days until Christmas?")); + + // Example 2. Invoke the kernel with a templated prompt that invokes a plugin and display the result + WriteLine(await kernel.InvokePromptAsync("The current time is {{TimeInformation.GetCurrentUtcTime}}. How many days until Christmas?")); + + // Example 3. Invoke the kernel with a prompt and allow the AI to automatically invoke functions + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + WriteLine(await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings))); + } + + /// + /// A plugin that returns the current time. + /// + public class TimeInformation + { + [KernelFunction] + [Description("Retrieves the current time in UTC.")] + public string GetCurrentUtcTime() => DateTime.UtcNow.ToString("R"); + } + + public Step2_Add_Plugins(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step3_Yaml_Prompt.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step3_Yaml_Prompt.cs new file mode 100644 index 000000000000..ea02fce7181c --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step3_Yaml_Prompt.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Examples; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Resources; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +/// +/// This example shows how to create a prompt from a YAML resource. +/// +public sealed class Step3_Yaml_Prompt : BaseTest +{ + /// + /// Show how to create a prompt from a YAML resource. + /// + [Fact] + public async Task RunAsync() + { + // Create a kernel with OpenAI chat completion + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + // Load prompt from resource + var generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml"); + var function = kernel.CreateFunctionFromPromptYaml(generateStoryYaml); + + // Invoke the prompt function and display the result + WriteLine(await kernel.InvokeAsync(function, arguments: new() + { + { "topic", "Dog" }, + { "length", "3" }, + })); + + // Load prompt from resource + var generateStoryHandlebarsYaml = EmbeddedResource.Read("GenerateStoryHandlebars.yaml"); + function = kernel.CreateFunctionFromPromptYaml(generateStoryHandlebarsYaml, new HandlebarsPromptTemplateFactory()); + + // Invoke the prompt function and display the result + WriteLine(await kernel.InvokeAsync(function, arguments: new() + { + { "topic", "Cat" }, + { "length", "3" }, + })); + } + + public Step3_Yaml_Prompt(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step4_Dependency_Injection.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step4_Dependency_Injection.cs new file mode 100644 index 000000000000..084eb6b98a5e --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step4_Dependency_Injection.cs @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Threading.Tasks; +using Examples; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using RepoUtils; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +/// +/// This example shows how to using Dependency Injection with the Semantic Kernel +/// +public sealed class Step4_Dependency_Injection : BaseTest +{ + /// + /// Show how to create a that participates in Dependency Injection. + /// + [Fact] + public async Task RunAsync() + { + // If an application follows DI guidelines, the following line is unnecessary because DI will inject an instance of the KernelClient class to a class that references it. + // DI container guidelines - https://learn.microsoft.com/en-us/dotnet/core/extensions/dependency-injection-guidelines#recommendations + var serviceProvider = BuildServiceProvider(); + var kernel = serviceProvider.GetRequiredService(); + + // Invoke the kernel with a templated prompt and stream the results to the display + KernelArguments arguments = new() { { "topic", "earth when viewed from space" } }; + await foreach (var update in + kernel.InvokePromptStreamingAsync("What color is the {{$topic}}? Provide a detailed explanation.", arguments)) + { + Write(update); + } + } + + /// + /// Build a ServiceProvider that can be used to resolve services. + /// + private ServiceProvider BuildServiceProvider() + { + var collection = new ServiceCollection(); + collection.AddSingleton(new XunitLogger(this.Output)); + + var kernelBuilder = collection.AddKernel(); + kernelBuilder.Services.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey); + kernelBuilder.Plugins.AddFromType(); + + return collection.BuildServiceProvider(); + } + + /// + /// A plugin that returns the current time. + /// + public class TimeInformation + { + private readonly ILogger _logger; + + public TimeInformation(ILoggerFactory loggerFactory) + { + this._logger = loggerFactory.CreateLogger(typeof(TimeInformation)); + } + + [KernelFunction] + [Description("Retrieves the current time in UTC.")] + public string GetCurrentUtcTime() + { + var utcNow = DateTime.UtcNow.ToString("R"); + this._logger.LogInformation("Returning current time {0}", utcNow); + return utcNow; + } + } + + public Step4_Dependency_Injection(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step5_Chat_Prompt.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step5_Chat_Prompt.cs new file mode 100644 index 000000000000..4b50bf27b065 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step5_Chat_Prompt.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Examples; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +public sealed class Step5_Chat_Prompt : BaseTest +{ + /// + /// Show how to construct a chat prompt and invoke it. + /// + [Fact] + public async Task RunAsync() + { + // Create a kernel with OpenAI chat completion + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + // Invoke the kernel with a chat prompt and display the result + string chatPrompt = @" + What is Seattle? + Respond with JSON. + "; + + WriteLine(await kernel.InvokePromptAsync(chatPrompt)); + } + + public Step5_Chat_Prompt(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step6_Responsible_AI.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step6_Responsible_AI.cs new file mode 100644 index 000000000000..c688c68fa314 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step6_Responsible_AI.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Examples; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +public sealed class Step6_Responsible_AI : BaseTest +{ + /// + /// Show how to use prompt filters to ensure that prompts are rendered in a responsible manner. + /// + [Fact] + public async Task RunAsync() + { + // Create a kernel with OpenAI chat completion + var builder = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + + builder.Services.AddSingleton(this.Output); + + // Add prompt filter to the kernel + builder.Services.AddSingleton(); + + var kernel = builder.Build(); + + KernelArguments arguments = new() { { "card_number", "4444 3333 2222 1111" } }; + + var result = await kernel.InvokePromptAsync("Tell me some useful information about this credit card number {{$card_number}}?", arguments); + + WriteLine(result); + } + + public Step6_Responsible_AI(ITestOutputHelper output) : base(output) + { + } + + private sealed class PromptFilter : IPromptFilter + { + private readonly ITestOutputHelper _output; + + public PromptFilter(ITestOutputHelper output) + { + this._output = output; + } + + /// + /// Method which is called after a prompt is rendered. + /// + public void OnPromptRendered(PromptRenderedContext context) + { + context.RenderedPrompt += " NO SEXISM, RACISM OR OTHER BIAS/BIGOTRY"; + + this._output.WriteLine(context.RenderedPrompt); + } + + /// + /// Method which is called before a prompt is rendered. + /// + public void OnPromptRendering(PromptRenderingContext context) + { + if (context.Arguments.ContainsName("card_number")) + { + context.Arguments["card_number"] = "**** **** **** ****"; + } + } + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs new file mode 100644 index 000000000000..ac2e5b57a7a0 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Threading.Tasks; +using Examples; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using RepoUtils; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +public sealed class Step7_Observability : BaseTest +{ + /// + /// Shows how to observe the execution of a instance with filters. + /// + [Fact] + public async Task ObservabilityWithFiltersAsync() + { + // Create a kernel with OpenAI chat completion + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + kernelBuilder.AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + + kernelBuilder.Plugins.AddFromType(); + + // Add filter using DI + kernelBuilder.Services.AddSingleton(this.Output); + kernelBuilder.Services.AddSingleton(); + + Kernel kernel = kernelBuilder.Build(); + + // Add filter without DI + kernel.PromptFilters.Add(new MyPromptFilter(this.Output)); + + // Invoke the kernel with a prompt and allow the AI to automatically invoke functions + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + WriteLine(await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings))); + } + + /// + /// Shows how to observe the execution of a instance with hooks. + /// + [Fact] + [Obsolete("Events are deprecated in favor of filters.")] + public async Task ObservabilityWithHooksAsync() + { + // Create a kernel with OpenAI chat completion + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + kernelBuilder.AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + + kernelBuilder.Plugins.AddFromType(); + + Kernel kernel = kernelBuilder.Build(); + + // Handler which is called before a function is invoked + void MyInvokingHandler(object? sender, FunctionInvokingEventArgs e) + { + WriteLine($"Invoking {e.Function.Name}"); + } + + // Handler which is called before a prompt is rendered + void MyRenderingHandler(object? sender, PromptRenderingEventArgs e) + { + WriteLine($"Rendering prompt for {e.Function.Name}"); + } + + // Handler which is called after a prompt is rendered + void MyRenderedHandler(object? sender, PromptRenderedEventArgs e) + { + WriteLine($"Rendered prompt: {e.RenderedPrompt}"); + } + + // Handler which is called after a function is invoked + void MyInvokedHandler(object? sender, FunctionInvokedEventArgs e) + { + if (e.Result.Metadata is not null && e.Result.Metadata.ContainsKey("Usage")) + { + WriteLine($"Token usage: {e.Result.Metadata?["Usage"]?.AsJson()}"); + } + } + + // Add the handlers to the kernel + kernel.FunctionInvoking += MyInvokingHandler; + kernel.PromptRendering += MyRenderingHandler; + kernel.PromptRendered += MyRenderedHandler; + kernel.FunctionInvoked += MyInvokedHandler; + + // Invoke the kernel with a prompt and allow the AI to automatically invoke functions + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + WriteLine(await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings))); + } + + /// + /// A plugin that returns the current time. + /// + private sealed class TimeInformation + { + [KernelFunction] + [Description("Retrieves the current time in UTC.")] + public string GetCurrentUtcTime() => DateTime.UtcNow.ToString("R"); + } + + /// + /// Function filter for observability. + /// + private sealed class MyFunctionFilter : IFunctionFilter + { + private readonly ITestOutputHelper _output; + + public MyFunctionFilter(ITestOutputHelper output) + { + this._output = output; + } + + public void OnFunctionInvoked(FunctionInvokedContext context) + { + var metadata = context.Result.Metadata; + + if (metadata is not null && metadata.ContainsKey("Usage")) + { + this._output.WriteLine($"Token usage: {metadata["Usage"]?.AsJson()}"); + } + } + + public void OnFunctionInvoking(FunctionInvokingContext context) + { + this._output.WriteLine($"Invoking {context.Function.Name}"); + } + } + + /// + /// Prompt filter for observability. + /// + private sealed class MyPromptFilter : IPromptFilter + { + private readonly ITestOutputHelper _output; + + public MyPromptFilter(ITestOutputHelper output) + { + this._output = output; + } + + public void OnPromptRendered(PromptRenderedContext context) + { + this._output.WriteLine($"Rendered prompt: {context.RenderedPrompt}"); + } + + public void OnPromptRendering(PromptRenderingContext context) + { + this._output.WriteLine($"Rendering prompt for {context.Function.Name}"); + } + } + + public Step7_Observability(ITestOutputHelper output) : base(output) + { + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step8_Pipelining.cs b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step8_Pipelining.cs new file mode 100644 index 000000000000..51b1e6377be3 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Getting_Started/Step8_Pipelining.cs @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Examples; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Xunit; +using Xunit.Abstractions; + +namespace GettingStarted; + +public sealed class Step8_Pipelining : BaseTest +{ + /// + /// Provides an example of combining multiple functions into a single function that invokes + /// them in a sequence, passing the output from one as input to the next. + /// + [Fact] + public async Task RunAsync() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.AddOpenAIChatCompletion( + TestConfiguration.OpenAI.ChatModelId, + TestConfiguration.OpenAI.ApiKey); + builder.Services.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Trace)); + Kernel kernel = builder.Build(); + + WriteLine("================ PIPELINE ================"); + { + // Create a pipeline of functions that will parse a string into an int, multiply it by a double, truncate it to an int, and then humanize it. + KernelFunction parseInt32 = KernelFunctionFactory.CreateFromMethod((string s) => double.Parse(s, CultureInfo.InvariantCulture), "parseInt32"); + KernelFunction multiplyByN = KernelFunctionFactory.CreateFromMethod((double i, double n) => i * n, "multiplyByN"); + KernelFunction truncate = KernelFunctionFactory.CreateFromMethod((double d) => (int)d, "truncate"); + KernelFunction humanize = KernelFunctionFactory.CreateFromPrompt(new PromptTemplateConfig() + { + Template = "Spell out this number in English: {{$number}}", + InputVariables = new() { new() { Name = "number" } }, + }); + KernelFunction pipeline = KernelFunctionCombinators.Pipe(new[] { parseInt32, multiplyByN, truncate, humanize }, "pipeline"); + + KernelArguments args = new() + { + ["s"] = "123.456", + ["n"] = (double)78.90, + }; + + // - The parseInt32 function will be invoked, read "123.456" from the arguments, and parse it into (double)123.456. + // - The multiplyByN function will be invoked, with i=123.456 and n=78.90, and return (double)9740.6784. + // - The truncate function will be invoked, with d=9740.6784, and return (int)9740, which will be the final result. + WriteLine(await pipeline.InvokeAsync(kernel, args)); + } + + WriteLine("================ GRAPH ================"); + { + KernelFunction rand = KernelFunctionFactory.CreateFromMethod(() => Random.Shared.Next(), "GetRandomInt32"); + KernelFunction mult = KernelFunctionFactory.CreateFromMethod((int i, int j) => i * j, "Multiply"); + + // - Invokes rand and stores the random number into args["i"] + // - Invokes rand and stores the random number into args["j"] + // - Multiplies arg["i"] and args["j"] to produce the final result + KernelFunction graph = KernelFunctionCombinators.Pipe(new[] + { + (rand, "i"), + (rand, "j"), + (mult, "") + }, "graph"); + + WriteLine(await graph.InvokeAsync(kernel)); + } + } + + public Step8_Pipelining(ITestOutputHelper output) : base(output) + { + } +} + +public static class KernelFunctionCombinators +{ + /// + /// Invokes a pipeline of functions, running each in order and passing the output from one as the first argument to the next. + /// + /// The pipeline of functions to invoke. + /// The kernel to use for the operations. + /// The arguments. + /// The cancellation token to monitor for a cancellation request. + /// + public static Task InvokePipelineAsync( + IEnumerable functions, Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken) => + Pipe(functions).InvokeAsync(kernel, arguments, cancellationToken); + + /// + /// Invokes a pipeline of functions, running each in order and passing the output from one as the named argument to the next. + /// + /// The sequence of functions to invoke, along with the name of the argument to assign to the result of the function's invocation. + /// The kernel to use for the operations. + /// The arguments. + /// The cancellation token to monitor for a cancellation request. + /// + public static Task InvokePipelineAsync( + IEnumerable<(KernelFunction Function, string OutputVariable)> functions, Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken) => + Pipe(functions).InvokeAsync(kernel, arguments, cancellationToken); + + /// + /// Creates a function whose invocation will invoke each of the supplied functions in sequence. + /// + /// The pipeline of functions to invoke. + /// The name of the combined operation. + /// The description of the combined operation. + /// The result of the final function. + /// + /// The result from one function will be fed into the first argument of the next function. + /// + public static KernelFunction Pipe( + IEnumerable functions, + string? functionName = null, + string? description = null) + { + ArgumentNullException.ThrowIfNull(functions); + + KernelFunction[] funcs = functions.ToArray(); + Array.ForEach(funcs, f => ArgumentNullException.ThrowIfNull(f)); + + var funcsAndVars = new (KernelFunction Function, string OutputVariable)[funcs.Length]; + for (int i = 0; i < funcs.Length; i++) + { + string p = ""; + if (i < funcs.Length - 1) + { + var parameters = funcs[i + 1].Metadata.Parameters; + if (parameters.Count > 0) + { + p = parameters[0].Name; + } + } + + funcsAndVars[i] = (funcs[i], p); + } + + return Pipe(funcsAndVars, functionName, description); + } + + /// + /// Creates a function whose invocation will invoke each of the supplied functions in sequence. + /// + /// The pipeline of functions to invoke, along with the name of the argument to assign to the result of the function's invocation. + /// The name of the combined operation. + /// The description of the combined operation. + /// The result of the final function. + /// + /// The result from one function will be fed into the first argument of the next function. + /// + public static KernelFunction Pipe( + IEnumerable<(KernelFunction Function, string OutputVariable)> functions, + string? functionName = null, + string? description = null) + { + ArgumentNullException.ThrowIfNull(functions); + + (KernelFunction Function, string OutputVariable)[] arr = functions.ToArray(); + Array.ForEach(arr, f => + { + ArgumentNullException.ThrowIfNull(f.Function); + ArgumentNullException.ThrowIfNull(f.OutputVariable); + }); + + return KernelFunctionFactory.CreateFromMethod(async (Kernel kernel, KernelArguments arguments) => + { + FunctionResult? result = null; + for (int i = 0; i < arr.Length; i++) + { + result = await arr[i].Function.InvokeAsync(kernel, arguments).ConfigureAwait(false); + if (i < arr.Length - 1) + { + arguments[arr[i].OutputVariable] = result.GetValue(); + } + } + + return result; + }, functionName, description); + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj b/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj index 1b5eef6de7ce..3ca6bfe9e7ec 100644 --- a/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj +++ b/dotnet/samples/KernelSyntaxExamples/KernelSyntaxExamples.csproj @@ -1,20 +1,34 @@ - + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + KernelSyntaxExamples net6.0 LatestMajor - Exe + true false - - CA1050;CA1707;CA2007;VSTHRD111;CS1591 + + CS8618,IDE0009,CA1051,CA1050,CA1707,CA2007,VSTHRD111,CS1591,RCS1110,CA5394,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,,SKEXP0012,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0040,SKEXP0041,SKEXP0042,SKEXP0050,SKEXP0051,SKEXP0052,SKEXP0053,SKEXP0054,SKEXP0055,SKEXP0060,SKEXP0061,SKEXP0101,SKEXP0102 + Library - + + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + @@ -23,9 +37,10 @@ + + + - - @@ -33,40 +48,41 @@ - - - + + + + - - - + + + + + - + - - - - - - - - - - + + + + + + + Always + \ No newline at end of file diff --git a/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/ComplexParamsDictionaryPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/ComplexParamsDictionaryPlugin.cs new file mode 100644 index 000000000000..838b11d336a5 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/ComplexParamsDictionaryPlugin.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Security.Cryptography; +using System.Text.Json; +using Microsoft.SemanticKernel; + +namespace Plugins.DictionaryPlugin; + +/// +/// Plugin example with two Local functions, where one function gets a random word and the other returns a definition for a given word. +/// +public sealed class ComplexParamsDictionaryPlugin +{ + public const string PluginName = nameof(ComplexParamsDictionaryPlugin); + + private readonly List _dictionary = new() + { + new DictionaryEntry("apple", "a round fruit with red, green, or yellow skin and a white flesh"), + new DictionaryEntry("book", "a set of printed or written pages bound together along one edge"), + new DictionaryEntry("cat", "a small furry animal with whiskers and a long tail that is often kept as a pet"), + new DictionaryEntry("dog", "a domesticated animal with four legs, a tail, and a keen sense of smell that is often used for hunting or companionship"), + new DictionaryEntry("elephant", "a large gray mammal with a long trunk, tusks, and ears that lives in Africa and Asia") + }; + + [KernelFunction, Description("Gets a random word from a dictionary of common words and their definitions.")] + public DictionaryEntry GetRandomEntry() + { + // Get random number + var index = RandomNumberGenerator.GetInt32(0, this._dictionary.Count - 1); + + // Return the word at the random index + return this._dictionary[index]; + } + + [KernelFunction, Description("Gets the word for a given dictionary entry.")] + public string GetWord([Description("Word to get definition for.")] DictionaryEntry entry) + { + // Return the definition or a default message + return this._dictionary.FirstOrDefault(e => e.Word == entry.Word)?.Word ?? "Entry not found"; + } + + [KernelFunction, Description("Gets the definition for a given word.")] + public string GetDefinition([Description("Word to get definition for.")] string word) + { + // Return the definition or a default message + return this._dictionary.FirstOrDefault(e => e.Word == word)?.Definition ?? "Word not found"; + } +} + +/// +/// In order to use custom types, should be specified, +/// that will convert object instance to string representation. +/// +/// +/// is used to represent complex object as meaningful string, so +/// it can be passed to AI for further processing using prompt functions. +/// It's possible to choose any format (e.g. XML, JSON, YAML) to represent your object. +/// +[TypeConverter(typeof(DictionaryEntryConverter))] +public sealed class DictionaryEntry +{ + public string Word { get; set; } = string.Empty; + public string Definition { get; set; } = string.Empty; + + public DictionaryEntry(string word, string definition) + { + this.Word = word; + this.Definition = definition; + } +} + +/// +/// Implementation of for . +/// In this example, object instance is serialized with from System.Text.Json, +/// but it's possible to convert object to string using any other serialization logic. +/// +public sealed class DictionaryEntryConverter : TypeConverter +{ + public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => true; + + /// + /// This method is used to convert object from string to actual type. This will allow to pass object to + /// Local function which requires it. + /// + public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) + { + return JsonSerializer.Deserialize((string)value); + } + + /// + /// This method is used to convert actual type to string representation, so it can be passed to AI + /// for further processing. + /// + public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) + { + return JsonSerializer.Serialize(value); + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/StringParamsDictionaryPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/StringParamsDictionaryPlugin.cs new file mode 100644 index 000000000000..7849a77d4a3c --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/StringParamsDictionaryPlugin.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Security.Cryptography; +using Microsoft.SemanticKernel; + +namespace Plugins.DictionaryPlugin; + +/// +/// Plugin example with two method functions, where one function gets a random word and the other returns a definition for a given word. +/// +public sealed class StringParamsDictionaryPlugin +{ + public const string PluginName = nameof(StringParamsDictionaryPlugin); + + private readonly Dictionary _dictionary = new() + { + {"apple", "a round fruit with red, green, or yellow skin and a white flesh"}, + {"book", "a set of printed or written pages bound together along one edge"}, + {"cat", "a small furry animal with whiskers and a long tail that is often kept as a pet"}, + {"dog", "a domesticated animal with four legs, a tail, and a keen sense of smell that is often used for hunting or companionship"}, + {"elephant", "a large gray mammal with a long trunk, tusks, and ears that lives in Africa and Asia"} + }; + + [KernelFunction, Description("Gets a random word from a dictionary of common words and their definitions.")] + public string GetRandomWord() + { + // Get random number + var index = RandomNumberGenerator.GetInt32(0, this._dictionary.Count - 1); + + // Return the word at the random index + return this._dictionary.ElementAt(index).Key; + } + + [KernelFunction, Description("Gets the definition for a given word.")] + public string GetDefinition([Description("Word to get definition for.")] string word) + { + return this._dictionary.TryGetValue(word, out var definition) + ? definition + : "Word not found"; + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/openapi.json b/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/openapi.json new file mode 100644 index 000000000000..bd5a0fec8bbd --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Plugins/DictionaryPlugin/openapi.json @@ -0,0 +1,101 @@ +{ + "openapi": "3.0.0", + "info": { + "title": "DictionaryPlugin", + "version": "1.0.0", + "description": "A plugin that provides dictionary functions for common words and their definitions." + }, + "paths": { + "/GetRandomEntry": { + "get": { + "summary": "Gets a random word from a dictionary of common words and their definitions.", + "operationId": "GetRandomEntry", + "responses": { + "200": { + "description": "A successful response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DictionaryEntry" + } + } + } + } + } + } + }, + "/GetWord": { + "get": { + "summary": "Gets the word for a given dictionary entry.", + "operationId": "GetWord", + "parameters": [ + { + "name": "entry", + "in": "query", + "description": "Word to get definition for.", + "required": true, + "schema": { + "$ref": "#/components/schemas/DictionaryEntry" + } + } + ], + "responses": { + "200": { + "description": "A successful response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, + "/GetDefinition": { + "get": { + "summary": "Gets the definition for a given word.", + "operationId": "GetDefinition", + "parameters": [ + { + "name": "word", + "in": "query", + "description": "Word to get definition for.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "A successful response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "DictionaryEntry": { + "type": "object", + "properties": { + "Word": { + "type": "string" + }, + "Definition": { + "type": "string" + } + } + } + } + } +} \ No newline at end of file diff --git a/dotnet/samples/KernelSyntaxExamples/Plugins/EmailPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Plugins/EmailPlugin.cs index c957608a2f85..9bb9e16fec29 100644 --- a/dotnet/samples/KernelSyntaxExamples/Plugins/EmailPlugin.cs +++ b/dotnet/samples/KernelSyntaxExamples/Plugins/EmailPlugin.cs @@ -8,14 +8,14 @@ namespace Plugins; internal sealed class EmailPlugin { - [SKFunction, Description("Given an e-mail and message body, send an email")] + [KernelFunction, Description("Given an e-mail and message body, send an email")] public string SendEmail( [Description("The body of the email message to send.")] string input, [Description("The email address to send email to.")] string email_address) => $"Sent email to: {email_address}. Body: {input}"; - [SKFunction, Description("Given a name, find email address")] + [KernelFunction, Description("Given a name, find email address")] public string GetEmailAddress( [Description("The name of the person whose email address needs to be found.")] string input, ILogger? logger = null) @@ -23,6 +23,12 @@ public string GetEmailAddress( // Sensitive data, logging as trace, disabled by default logger?.LogTrace("Returning hard coded email for {0}", input); - return "johndoe1234@example.com"; + return input switch + { + "Jane" => "janedoe4321@example.com", + "Paul" => "paulsmith5678@example.com", + "Mary" => "maryjones8765@example.com", + _ => "johndoe1234@example.com", + }; } } diff --git a/dotnet/samples/KernelSyntaxExamples/Plugins/JiraPlugin/README.md b/dotnet/samples/KernelSyntaxExamples/Plugins/JiraPlugin/README.md index e0869ae5bd8c..afe3cddf2299 100644 --- a/dotnet/samples/KernelSyntaxExamples/Plugins/JiraPlugin/README.md +++ b/dotnet/samples/KernelSyntaxExamples/Plugins/JiraPlugin/README.md @@ -1,6 +1,6 @@ # Jira Open API Schema -We have our own curated version of the Jira Open API schema because the one available online -at https://raw.githubusercontent.com/microsoft/PowerPlatformConnectors/dev/certified-connectors/JIRA/apiDefinition.swagger.json, -doesn't follow OpenAPI specification for all of its operations. For example CreateIssueV2, its body param does not describe properties -and so we can't build the body automatically. \ No newline at end of file +We have our own curated version of the Jira Open API schema because the one available online +at https://raw.githubusercontent.com/microsoft/PowerPlatformConnectors/dev/certified-connectors/JIRA/apiDefinition.swagger.json, +doesn't follow OpenAPI specification for all of its operations. For example CreateIssueV2, its body param does not describe properties +and so we can't build the body automatically. diff --git a/dotnet/samples/KernelSyntaxExamples/Plugins/MenuPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Plugins/MenuPlugin.cs new file mode 100644 index 000000000000..ba74f786d90f --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Plugins/MenuPlugin.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using Microsoft.SemanticKernel; + +namespace Plugins; + +public sealed class MenuPlugin +{ + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() + { + return @" +Special Soup: Clam Chowder +Special Salad: Cobb Salad +Special Drink: Chai Tea +"; + } + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) + { + return "$9.99"; + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/Plugins/StaticTextPlugin.cs b/dotnet/samples/KernelSyntaxExamples/Plugins/StaticTextPlugin.cs index 5187167d8bcf..4a8ac0172edf 100644 --- a/dotnet/samples/KernelSyntaxExamples/Plugins/StaticTextPlugin.cs +++ b/dotnet/samples/KernelSyntaxExamples/Plugins/StaticTextPlugin.cs @@ -7,11 +7,11 @@ namespace Plugins; public sealed class StaticTextPlugin { - [SKFunction, Description("Change all string chars to uppercase")] + [KernelFunction, Description("Change all string chars to uppercase")] public static string Uppercase([Description("Text to uppercase")] string input) => input.ToUpperInvariant(); - [SKFunction, Description("Append the day variable")] + [KernelFunction, Description("Append the day variable")] public static string AppendDay( [Description("Text to append to")] string input, [Description("Value of the day to append")] string day) => diff --git a/dotnet/samples/KernelSyntaxExamples/Program.cs b/dotnet/samples/KernelSyntaxExamples/Program.cs deleted file mode 100644 index de2b07fdf982..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Program.cs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Reliability; -using RepoUtils; - -public static class Program -{ - // ReSharper disable once InconsistentNaming - public static async Task Main(string[] args) - { - // Load configuration from environment variables or user secrets. - LoadUserSecrets(); - - // Execution canceled if the user presses Ctrl+C. - using CancellationTokenSource cancellationTokenSource = new(); - CancellationToken cancelToken = cancellationTokenSource.ConsoleCancellationToken(); - - string? defaultFilter = null; // Modify to filter examples - - // Check if args[0] is provided - string? filter = args.Length > 0 ? args[0] : defaultFilter; - - // Run examples based on the filter - await RunExamplesAsync(filter, cancelToken); - } - - private static async Task RunExamplesAsync(string? filter, CancellationToken cancellationToken) - { - var examples = (Assembly.GetExecutingAssembly().GetTypes()) - .Where(type => type.Name.StartsWith("Example", StringComparison.OrdinalIgnoreCase)) - .Select(type => type.Name).ToList(); - - // Filter and run examples - foreach (var example in examples) - { - if (string.IsNullOrEmpty(filter) || example.Contains(filter, StringComparison.OrdinalIgnoreCase)) - { - try - { - Console.WriteLine($"Running {example}..."); - - var method = Assembly.GetExecutingAssembly().GetType(example)?.GetMethod("RunAsync"); - if (method == null) - { - Console.WriteLine($"Example {example} not found"); - continue; - } - - bool hasCancellationToken = method.GetParameters().Any(param => param.ParameterType == typeof(CancellationToken)); - - var taskParameters = hasCancellationToken ? new object[] { cancellationToken } : null; - if (method.Invoke(null, taskParameters) is Task t) - { - await t.SafeWaitAsync(cancellationToken); - } - else - { - method.Invoke(null, null); - } - } - catch (ConfigurationNotFoundException ex) - { - Console.WriteLine($"{ex.Message}. Skipping example {example}."); - } - } - } - } - - private static void LoadUserSecrets() - { - IConfigurationRoot configRoot = new ConfigurationBuilder() - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - TestConfiguration.Initialize(configRoot); - } - - private static CancellationToken ConsoleCancellationToken(this CancellationTokenSource tokenSource) - { - Console.CancelKeyPress += (s, e) => - { - Console.WriteLine("Canceling..."); - tokenSource.Cancel(); - e.Cancel = true; - }; - - return tokenSource.Token; - } - - private static async Task SafeWaitAsync(this Task task, - CancellationToken cancellationToken = default) - { - try - { - await task.WaitAsync(cancellationToken); - Console.WriteLine("== DONE =="); - } - catch (ConfigurationNotFoundException ex) - { - Console.WriteLine($"{ex.Message}. Skipping example."); - } - - cancellationToken.ThrowIfCancellationRequested(); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/README.md b/dotnet/samples/KernelSyntaxExamples/README.md index 26e95a78a215..56861e61a43b 100644 --- a/dotnet/samples/KernelSyntaxExamples/README.md +++ b/dotnet/samples/KernelSyntaxExamples/README.md @@ -1,42 +1,16 @@ -# Semantic Kernel syntax examples +#Semantic Kernel syntax examples -This project contains a collection of semi-random examples about various scenarios -using SK components. +This project contains a collection of semi-random examples about various scenarios using SK components. -The examples are ordered by number, starting with very basic examples. +The examples can be run as integration tests but their code can also be copied to stand-alone programs. ## Running Examples with Filters -You can run individual examples in the KernelSyntaxExamples project using various methods to specify a filter. This allows you to execute specific examples without running all of them. Choose one of the following options to apply a filter: - -### Option 1: Set the Default Filter in Program.cs - -In your code, you can set a default filter by modifying the appropriate variable or parameter. Look for the section in your code where the filter is applied or where the examples are defined, and change the filter value accordingly. - -```csharp -// Example of setting a default filter in code -string defaultFilter = "Example0"; // will run all examples that contain 'example0' in the name -``` - -### Option 2: Set Command-Line Arguments -Right-click on your console application project in the Solution Explorer. - -Choose "Properties" from the context menu. - -In the project properties window, navigate to the "Debug" tab on the left. - -Supply Command-Line Arguments: - -In the "Command line arguments" field, enter the command-line arguments that your console application expects. Separate multiple arguments with spaces. - -### Option 3: Use Visual Studio Code Filters -If you are using Visual Studio Code, you can specify a filter using the built-in filter options provided by the IDE. These options can be helpful when running your code in a debugging environment. Consult the documentation for Visual Studio Code or the specific extension you're using for information on applying filters. - -### Option 4: Modify launch.json -If you are using Visual Studio or a similar IDE that utilizes launch configurations, you can specify the filter in your launch.json configuration file. Edit the configuration for your project to include the filter parameter. - +You can run specific examples in the KernelSyntaxExamples project by using test filters (dotnet test --filter). +Type "dotnet test --help" at the command line for more details. ## Configuring Secrets + Most of the examples will require secrets and credentials, to access OpenAI, Azure OpenAI, Bing and other resources. We suggest using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) @@ -44,6 +18,7 @@ to avoid the risk of leaking secrets into the repository, branches and pull requ You can also use environment variables if you prefer. To set your secrets with Secret Manager: + ``` cd dotnet/samples/KernelSyntaxExamples @@ -56,7 +31,9 @@ dotnet user-secrets set "OpenAI:ApiKey" "..." dotnet user-secrets set "AzureOpenAI:ServiceId" "..." dotnet user-secrets set "AzureOpenAI:DeploymentName" "..." +dotnet user-secrets set "AzureOpenAI:ModelId" "..." dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..." +dotnet user-secrets set "AzureOpenAI:ChatModelId" "..." dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/" dotnet user-secrets set "AzureOpenAI:ApiKey" "..." @@ -64,8 +41,9 @@ dotnet user-secrets set "AzureOpenAIEmbeddings:DeploymentName" "..." dotnet user-secrets set "AzureOpenAIEmbeddings:Endpoint" "https://... .openai.azure.com/" dotnet user-secrets set "AzureOpenAIEmbeddings:ApiKey" "..." -dotnet user-secrets set "ACS:Endpoint" "https://... .search.windows.net" -dotnet user-secrets set "ACS:ApiKey" "..." +dotnet user-secrets set "AzureAISearch:Endpoint" "https://... .search.windows.net" +dotnet user-secrets set "AzureAISearch:ApiKey" "{Key from `Search service` resource}" +dotnet user-secrets set "AzureAISearch:IndexName" "..." dotnet user-secrets set "Qdrant:Endpoint" "..." dotnet user-secrets set "Qdrant:Port" "..." @@ -96,15 +74,13 @@ dotnet user-secrets set "Google:SearchEngineId" "..." dotnet user-secrets set "Github:PAT" "github_pat_..." -dotnet user-secrets set "Apim:Endpoint" "https://apim...azure-api.net/" -dotnet user-secrets set "Apim:SubscriptionKey" "..." - dotnet user-secrets set "Postgres:ConnectionString" "..." dotnet user-secrets set "Redis:Configuration" "..." dotnet user-secrets set "Kusto:ConnectionString" "..." ``` To set your secrets with environment variables, use these names: + ``` # OpenAI OpenAI__ModelId @@ -123,9 +99,9 @@ AzureOpenAIEmbeddings__DeploymentName AzureOpenAIEmbeddings__Endpoint AzureOpenAIEmbeddings__ApiKey -# Azure Cognitive Search -ACS__Endpoint -ACS__ApiKey +# Azure AI Search +AzureAISearch__Endpoint +AzureAISearch__ApiKey # Qdrant Qdrant__Endpoint @@ -165,11 +141,86 @@ Google__SearchEngineId # Github Github__PAT -# Azure API Management (APIM) -Apim__Endpoint -Apim__SubscriptionKey - # Other Postgres__ConnectionString Redis__Configuration ``` + +# Authentication for the OpenAPI Functions + +The Semantic Kernel OpenAPI Function enables developers to take any REST API that follows the OpenAPI specification and import it as a plugin to the Semantic Kernel. +However, the Kernel needs to be able to authenticate outgoing requests per the requirements of the target API. This document outlines the authentication model for the OpenAPI plugin. + +## The `AuthenticateRequestAsyncCallback` delegate + +`AuthenticateRequestAsyncCallback` is a delegate type that serves as a callback function for adding authentication information to HTTP requests sent by the OpenAPI plugin. + +```csharp +public delegate Task AuthenticateRequestAsyncCallback(HttpRequestMessage request); +``` + +Developers may optionally provide an implementation of this delegate when importing an OpenAPI plugin to the Kernel. +The delegate is then passed through to the `RestApiOperationRunner`, which is responsible for building the HTTP payload and sending the request for each REST API operation. +Before the API request is sent, the delegate is executed with the HTTP request message as the parameter, allowing the request message to be updated with any necessary authentication information. + +This pattern was designed to be flexible enough to support a wide variety of authentication frameworks. + +## Authentication Providers example + +### BasicAuthenticationProvider + +This class implements the HTTP "basic" authentication scheme. The constructor accepts a `Func` which defines how to retrieve the user's credentials. +When the `AuthenticateRequestAsync` method is called, it retrieves the credentials, encodes them as a UTF-8 encoded Base64 string, and adds them to the `HttpRequestMessage`'s authorization header. + +The following code demonstrates how to use this provider: + +```csharp +var basicAuthProvider = new BasicAuthenticationProvider(() => +{ + // JIRA API expects credentials in the format "email:apikey" + return Task.FromResult( + Env.Var("MY_EMAIL_ADDRESS") + ":" + Env.Var("JIRA_API_KEY") + ); +}); +var plugin = kernel.ImportOpenApiPluginFromResource(PluginResourceNames.Jira, new OpenApiFunctionExecutionParameters { AuthCallback = basicAuthProvider.AuthenticateRequestAsync } ); +``` + +### BearerAuthenticationProvider + +This class implements the HTTP "bearer" authentication scheme. The constructor accepts a `Func` which defines how to retrieve the bearer token. +When the `AuthenticateRequestAsync` method is called, it retrieves the token and adds it to the `HttpRequestMessage`'s authorization header. + +The following code demonstrates how to use this provider: + +```csharp +var bearerAuthProvider = new BearerAuthenticationProvider(() => +{ + return Task.FromResult(Env.Var("AZURE_KEYVAULT_TOKEN")); +}); +var plugin = kernel.ImportOpenApiPluginFromResource(PluginResourceNames.AzureKeyVault, new OpenApiFunctionExecutionParameters { AuthCallback = bearerAuthProvider.AuthenticateRequestAsync } ) +``` + +### InteractiveMsalAuthenticationProvider + +This class uses the [Microsoft Authentication Library (MSAL)](https://learn.microsoft.com/en-us/azure/active-directory/develop/msal-overview)'s .NET library to authenticate the user and acquire an OAuth token. +It follows the interactive [authorization code flow](https://learn.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-auth-code-flow), requiring the user to sign in with a Microsoft or Azure identity. +This is particularly useful for authenticating requests to the Microsoft Graph or Azure APIs. + +Once the token is acquired, it is added to the HTTP authentication header via the `AuthenticateRequestAsync` method, which is inherited from `BearerAuthenticationProvider`. + +To construct this provider, the caller must specify: + +- _Client ID_ - identifier of the calling application. This is acquired by [registering your application with the Microsoft Identity platform](https://learn.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app). +- _Tenant ID_ - identifier of the target service tenant, or "common" +- _Scopes_ - permissions being requested +- _Redirect URI_ - for redirecting the user back to the application. (When running locally, this is typically http://localhost.) + +```csharp +var msalAuthProvider = new InteractiveMsalAuthenticationProvider( + Env.Var("AZURE_KEYVAULT_CLIENTID"), // clientId + Env.Var("AZURE_KEYVAULT_TENANTID"), // tenantId + new string[] { ".default" }, // scopes + new Uri("http://localhost") // redirectUri +); +var plugin = kernel.ImportOpenApiPluginFromResource(PluginResourceNames.AzureKeyVault, new OpenApiFunctionExecutionParameters { AuthCallback = msalAuthProvider.AuthenticateRequestAsync } ) +``` diff --git a/dotnet/samples/KernelSyntaxExamples/Reliability/ConfigurationNotFoundException.cs b/dotnet/samples/KernelSyntaxExamples/Reliability/ConfigurationNotFoundException.cs index 2ce154ea7668..5c0975fbf075 100644 --- a/dotnet/samples/KernelSyntaxExamples/Reliability/ConfigurationNotFoundException.cs +++ b/dotnet/samples/KernelSyntaxExamples/Reliability/ConfigurationNotFoundException.cs @@ -3,6 +3,7 @@ using System; namespace Reliability; + public sealed class ConfigurationNotFoundException : Exception { public string? Section { get; } diff --git a/dotnet/samples/KernelSyntaxExamples/Reliability/RetryThreeTimesWithBackoff.cs b/dotnet/samples/KernelSyntaxExamples/Reliability/RetryThreeTimesWithBackoff.cs deleted file mode 100644 index 3ed0929422a6..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Reliability/RetryThreeTimesWithBackoff.cs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Http; -using Polly; -using Polly.Retry; - -namespace Reliability; - -/// -/// A factory for creating a retry handler. -/// -public class RetryThreeTimesWithBackoffFactory : IDelegatingHandlerFactory -{ - public DelegatingHandler Create(ILoggerFactory? loggerFactory) - { - return new RetryThreeTimesWithBackoff(loggerFactory); - } -} - -/// -/// A basic example of a retry mechanism that retries three times with backoff. -/// -public class RetryThreeTimesWithBackoff : DelegatingHandler -{ - private readonly AsyncRetryPolicy _policy; - - public RetryThreeTimesWithBackoff(ILoggerFactory? loggerFactory) - { - this._policy = GetPolicy(loggerFactory); - } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - return await this._policy.ExecuteAsync(async () => - { - var response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); - return response; - }).ConfigureAwait(false); - } - - private static AsyncRetryPolicy GetPolicy(ILoggerFactory? logger) - { - // Handle 429 and 401 errors - // Typically 401 would not be something we retry but for demonstration - // purposes we are doing so as it's easy to trigger when using an invalid key. - const int TooManyRequests = 429; - const int Unauthorized = 401; - - return Policy - .HandleResult(response => - (int)response.StatusCode is TooManyRequests or Unauthorized) - .WaitAndRetryAsync(new[] - { - TimeSpan.FromSeconds(2), - TimeSpan.FromSeconds(4), - TimeSpan.FromSeconds(8) - }, - (outcome, timespan, retryCount, _) => logger?.CreateLogger(typeof(RetryThreeTimesWithBackoff)).LogWarning( - "Error executing action [attempt {0} of 3], pausing {1}ms. Outcome: {2}", - retryCount, - timespan.TotalMilliseconds, - outcome.Result.StatusCode)); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/Reliability/RetryThreeTimesWithRetryAfterBackoff.cs b/dotnet/samples/KernelSyntaxExamples/Reliability/RetryThreeTimesWithRetryAfterBackoff.cs deleted file mode 100644 index 1586b9af9c90..000000000000 --- a/dotnet/samples/KernelSyntaxExamples/Reliability/RetryThreeTimesWithRetryAfterBackoff.cs +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Http; -using Polly; -using Polly.Retry; - -namespace Reliability; - -/// -/// A factory for creating a retry handler. -/// -public class RetryThreeTimesWithRetryAfterBackoffFactory : IDelegatingHandlerFactory -{ - public DelegatingHandler Create(ILoggerFactory? loggerFactory) - { - return new RetryThreeTimesWithRetryAfterBackoff(loggerFactory); - } -} - -/// -/// An example of a retry mechanism that retries three times with backoff using the RetryAfter value. -/// -public class RetryThreeTimesWithRetryAfterBackoff : DelegatingHandler -{ - private readonly AsyncRetryPolicy _policy; - - public RetryThreeTimesWithRetryAfterBackoff(ILoggerFactory? loggerFactory) - { - this._policy = GetPolicy(loggerFactory); - } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - return await this._policy.ExecuteAsync(async () => - { - var response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); - return response; - }).ConfigureAwait(false); - } - - private static AsyncRetryPolicy GetPolicy(ILoggerFactory? loggerFactory) - { - // Handle 429 and 401 errors - // Typically 401 would not be something we retry but for demonstration - // purposes we are doing so as it's easy to trigger when using an invalid key. - const int TooManyRequests = 429; - const int Unauthorized = 401; - - return Policy - .HandleResult(response => - (int)response.StatusCode is Unauthorized or TooManyRequests) - .WaitAndRetryAsync( - retryCount: 3, - sleepDurationProvider: (_, r, _) => - { - var response = r.Result; - var retryAfter = response.Headers.RetryAfter?.Delta ?? response.Headers.RetryAfter?.Date - DateTimeOffset.Now; - return retryAfter ?? TimeSpan.FromSeconds(2); - }, - (outcome, timespan, retryCount, _) => - { - loggerFactory?.CreateLogger(typeof(RetryThreeTimesWithRetryAfterBackoff)).LogWarning( - "Error executing action [attempt {0} of 3], pausing {1}ms. Outcome: {2}", - retryCount, - timespan.TotalMilliseconds, - outcome.Result.StatusCode); - return Task.CompletedTask; - }); - } -} diff --git a/dotnet/samples/KernelSyntaxExamples/RepoUtils/EnumerableExtensions.cs b/dotnet/samples/KernelSyntaxExamples/RepoUtils/EnumerableExtensions.cs new file mode 100644 index 000000000000..a685f494b896 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/RepoUtils/EnumerableExtensions.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; + +namespace RepoUtils; + +public static class EnumerableExtensions +{ + public static IEnumerable> ChunkByAggregate( + this IEnumerable source, + TAccumulate seed, + Func aggregator, + Func predicate) + { + using var enumerator = source.GetEnumerator(); + var aggregate = seed; + var index = 0; + var chunk = new List(); + + while (enumerator.MoveNext()) + { + var current = enumerator.Current; + + aggregate = aggregator(aggregate, current); + + if (predicate(aggregate, index++)) + { + chunk.Add(current); + } + else + { + if (chunk.Count > 0) + { + yield return chunk; + } + + chunk = new List() { current }; + aggregate = aggregator(seed, current); + index = 1; + } + } + + if (chunk.Count > 0) + { + yield return chunk; + } + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/RepoUtils/Env.cs b/dotnet/samples/KernelSyntaxExamples/RepoUtils/Env.cs index 36e909d40b27..e2e1de5ff781 100644 --- a/dotnet/samples/KernelSyntaxExamples/RepoUtils/Env.cs +++ b/dotnet/samples/KernelSyntaxExamples/RepoUtils/Env.cs @@ -5,9 +5,7 @@ namespace RepoUtils; -#pragma warning disable CA1812 // instantiated by AddUserSecrets internal sealed class Env -#pragma warning restore CA1812 { /// /// Simple helper used to load env vars and secrets like credentials, diff --git a/dotnet/samples/KernelSyntaxExamples/RepoUtils/ObjectExtensions.cs b/dotnet/samples/KernelSyntaxExamples/RepoUtils/ObjectExtensions.cs index 835c678b3dd5..144074f96116 100644 --- a/dotnet/samples/KernelSyntaxExamples/RepoUtils/ObjectExtensions.cs +++ b/dotnet/samples/KernelSyntaxExamples/RepoUtils/ObjectExtensions.cs @@ -3,12 +3,13 @@ using System.Text.Json; namespace RepoUtils; + public static class ObjectExtensions { - private static readonly JsonSerializerOptions s_jsonOptions = new() { WriteIndented = true }; + private static readonly JsonSerializerOptions s_jsonOptionsCache = new() { WriteIndented = true }; public static string AsJson(this object obj) { - return JsonSerializer.Serialize(obj, s_jsonOptions); + return JsonSerializer.Serialize(obj, s_jsonOptionsCache); } } diff --git a/dotnet/samples/KernelSyntaxExamples/RepoUtils/TextOutputHelperExtensions.cs b/dotnet/samples/KernelSyntaxExamples/RepoUtils/TextOutputHelperExtensions.cs new file mode 100644 index 000000000000..965afd76045c --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/RepoUtils/TextOutputHelperExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit.Abstractions; + +namespace Examples; + +public static class TextOutputHelperExtensions +{ + public static void WriteLine(this ITestOutputHelper testOutputHelper, object target) + { + testOutputHelper.WriteLine(target.ToString()); + } + + public static void WriteLine(this ITestOutputHelper testOutputHelper) + { + testOutputHelper.WriteLine(string.Empty); + } + + public static void Write(this ITestOutputHelper testOutputHelper) + { + testOutputHelper.WriteLine(string.Empty); + } + + /// + /// Current interface ITestOutputHelper does not have a Write method. This extension method adds it to make it analogous to Console.Write when used in Console apps. + /// + /// TestOutputHelper + /// Target object to write + public static void Write(this ITestOutputHelper testOutputHelper, object target) + { + testOutputHelper.WriteLine(target.ToString()); + } +} diff --git a/dotnet/samples/KernelSyntaxExamples/RepoUtils/XunitLogger.cs b/dotnet/samples/KernelSyntaxExamples/RepoUtils/XunitLogger.cs new file mode 100644 index 000000000000..cb8e29debb69 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/RepoUtils/XunitLogger.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; + +namespace RepoUtils; + +/// +/// A logger that writes to the Xunit test output +/// +internal sealed class XunitLogger : ILoggerFactory, ILogger, IDisposable +{ + private readonly ITestOutputHelper _output; + + public XunitLogger(ITestOutputHelper output) + { + this._output = output; + } + + /// + public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + this._output.WriteLine(state?.ToString()); + } + + /// + public bool IsEnabled(LogLevel logLevel) => true; + + /// + public IDisposable BeginScope(TState state) where TState : notnull + => this; + + /// + public void Dispose() + { + // This class is marked as disposable to support the BeginScope method. + // However, there is no need to dispose anything. + } + + public ILogger CreateLogger(string categoryName) => this; + + public void AddProvider(ILoggerProvider provider) => throw new NotSupportedException(); +} diff --git a/dotnet/samples/KernelSyntaxExamples/Resources/22-ai-plugin.json b/dotnet/samples/KernelSyntaxExamples/Resources/22-ai-plugin.json new file mode 100644 index 000000000000..2cc3957fcfb7 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Resources/22-ai-plugin.json @@ -0,0 +1,20 @@ +{ + "schema_version": "v1", + "name_for_model": "AzureKeyVault", + "name_for_human": "AzureKeyVault", + "description_for_model": "An Azure Key Vault plugin for interacting with secrets.", + "description_for_human": "An Azure Key Vault plugin for interacting with secrets.", + "auth": { + "type": "oauth", + "scope": "https://vault.azure.net/.default", + "authorization_url": "https://login.microsoftonline.com//oauth2/v2.0/token", + "authorization_content_type": "application/x-www-form-urlencoded" + }, + "api": { + "type": "openapi", + "url": "file:///./22-openapi.json" + }, + "logo_url": "", + "contact_email": "", + "legal_info_url": "" +} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.OpenAPI/Plugins/AzureKeyVaultPlugin/openapi.json b/dotnet/samples/KernelSyntaxExamples/Resources/22-openapi.json similarity index 100% rename from dotnet/src/Functions/Functions.OpenAPI/Plugins/AzureKeyVaultPlugin/openapi.json rename to dotnet/samples/KernelSyntaxExamples/Resources/22-openapi.json diff --git a/dotnet/samples/KernelSyntaxExamples/Resources/Agents/ParrotAgent.yaml b/dotnet/samples/KernelSyntaxExamples/Resources/Agents/ParrotAgent.yaml new file mode 100644 index 000000000000..26a07cf04cf3 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Resources/Agents/ParrotAgent.yaml @@ -0,0 +1,9 @@ +name: Parrot +template_format: semantic-kernel +template: | + Repeat the user message in the voice of a pirate and then end with {{$count}} parrot sounds. +description: A fun chat bot that repeats the user message in the voice of a pirate. +input_variables: + - name: count + description: The number of parrot sounds. + is_required: true diff --git a/dotnet/samples/KernelSyntaxExamples/Resources/Agents/ToolAgent.yaml b/dotnet/samples/KernelSyntaxExamples/Resources/Agents/ToolAgent.yaml new file mode 100644 index 000000000000..474fd86a46ad --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Resources/Agents/ToolAgent.yaml @@ -0,0 +1,7 @@ +name: ToolRunner +template_format: semantic-kernel +template: | + Respond to the user using the single best tool. + If no tool is appropriate, let the user know you only provide responses using tools. + When reporting a tool result, start with, "The tool I used informed me that" +description: Determines if a tool can be utilized to accomplish a result. diff --git a/dotnet/samples/KernelSyntaxExamples/Resources/GenerateStory.yaml b/dotnet/samples/KernelSyntaxExamples/Resources/GenerateStory.yaml new file mode 100644 index 000000000000..fc5ecd88f34e --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Resources/GenerateStory.yaml @@ -0,0 +1,17 @@ +name: GenerateStory +template: | + Tell a story about {{$topic}} that is {{$length}} sentences long. +template_format: semantic-kernel +description: A function that generates a story about a topic. +input_variables: + - name: topic + description: The topic of the story. + is_required: true + - name: length + description: The number of sentences in the story. + is_required: true +output_variable: + description: The generated story. +execution_settings: + default: + temperature: 0.6 diff --git a/dotnet/samples/KernelSyntaxExamples/Resources/GenerateStoryHandlebars.yaml b/dotnet/samples/KernelSyntaxExamples/Resources/GenerateStoryHandlebars.yaml new file mode 100644 index 000000000000..b1cb891fb706 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Resources/GenerateStoryHandlebars.yaml @@ -0,0 +1,23 @@ +name: GenerateStory +template: | + Tell a story about {{topic}} that is {{length}} sentences long. +template_format: handlebars +description: A function that generates a story about a topic. +input_variables: + - name: topic + description: The topic of the story. + is_required: true + - name: length + description: The number of sentences in the story. + is_required: true +output_variable: + description: The generated story. +execution_settings: + service1: + model_id: gpt-4 + temperature: 0.6 + service2: + model_id: gpt-3 + temperature: 0.4 + default: + temperature: 0.5 diff --git a/dotnet/samples/KernelSyntaxExamples/Resources/chat-gpt-retrieval-plugin-open-api.yaml b/dotnet/samples/KernelSyntaxExamples/Resources/chat-gpt-retrieval-plugin-open-api.yaml new file mode 100644 index 000000000000..caa2aa6602a3 --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Resources/chat-gpt-retrieval-plugin-open-api.yaml @@ -0,0 +1,196 @@ +openapi: 3.0.2 +info: + title: Retrieval Plugin API + description: A retrieval API for querying and filtering documents based on natural language queries and metadata + version: 1.0.0 +servers: + - url: https://your-app-url.com +paths: + /query: + post: + summary: Query + description: Accepts search query objects array each with query and optional filter. Break down complex questions into sub-questions. Refine results by criteria, e.g. time / source, don't do this often. Split queries if ResponseTooLargeError occurs. + operationId: search + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/QueryRequest" + required: true + responses: + "200": + description: Successful Response + content: + application/json: + schema: + $ref: "#/components/schemas/QueryResponse" + "422": + description: Validation Error + content: + application/json: + schema: + $ref: "#/components/schemas/HTTPValidationError" + security: + - HTTPBearer: [] +components: + schemas: + DocumentChunkMetadata: + title: DocumentChunkMetadata + type: object + properties: + source: + $ref: "#/components/schemas/Source" + source_id: + title: Source Id + type: string + url: + title: Url + type: string + created_at: + title: Created At + type: string + author: + title: Author + type: string + document_id: + title: Document Id + type: string + DocumentChunkWithScore: + title: DocumentChunkWithScore + required: + - text + - metadata + - score + type: object + properties: + id: + title: Id + type: string + text: + title: Text + type: string + metadata: + $ref: "#/components/schemas/DocumentChunkMetadata" + embedding: + title: Embedding + type: array + items: + type: number + score: + title: Score + type: number + DocumentMetadataFilter: + title: DocumentMetadataFilter + type: object + properties: + document_id: + title: Document Id + type: string + source: + $ref: "#/components/schemas/Source" + source_id: + title: Source Id + type: string + author: + title: Author + type: string + start_date: + title: Start Date + type: string + end_date: + title: End Date + type: string + HTTPValidationError: + title: HTTPValidationError + type: object + properties: + detail: + title: Detail + type: array + items: + $ref: "#/components/schemas/ValidationError" + Query: + title: Query + required: + - query + type: object + properties: + query: + title: Query + type: string + filter: + $ref: "#/components/schemas/DocumentMetadataFilter" + top_k: + title: Top K + type: integer + default: 3 + QueryRequest: + title: QueryRequest + required: + - queries + type: object + properties: + queries: + title: Queries + type: array + items: + $ref: "#/components/schemas/Query" + QueryResponse: + title: QueryResponse + required: + - results + type: object + properties: + results: + title: Results + type: array + items: + $ref: "#/components/schemas/QueryResult" + QueryResult: + title: QueryResult + required: + - query + - results + type: object + properties: + query: + title: Query + type: string + results: + title: Results + type: array + items: + $ref: "#/components/schemas/DocumentChunkWithScore" + Source: + title: Source + enum: + - email + - file + - chat + type: string + description: An enumeration. + ValidationError: + title: ValidationError + required: + - loc + - msg + - type + type: object + properties: + loc: + title: Location + type: array + items: + anyOf: + - type: string + - type: integer + msg: + title: Message + type: string + type: + title: Error Type + type: string + securitySchemes: + HTTPBearer: + type: http + scheme: bearer diff --git a/dotnet/samples/KernelSyntaxExamples/Resources/travelinfo.txt b/dotnet/samples/KernelSyntaxExamples/Resources/travelinfo.txt new file mode 100644 index 000000000000..21665c82198e --- /dev/null +++ b/dotnet/samples/KernelSyntaxExamples/Resources/travelinfo.txt @@ -0,0 +1,217 @@ +Invoice Booking Reference LMNOPQ Trip ID - 11110011111 +Passenger Name(s) +MARKS/SAM ALBERT Agent W2 + + +MICROSOFT CORPORATION 14820 NE 36TH STREET REDMOND WA US 98052 + +American Express Global Business Travel Microsoft Travel +14711 NE 29th Place, Suite 215 +Bellevue, WA 98007 +Phone: +1 (669) 210-8041 + + + + +BILLING CODE : 1010-10010110 +Invoice Information + + + + + + +Invoice Details +Ticket Number + + + + + + + +0277993883295 + + + + + + +Charges +Ticket Base Fare + + + + + + + +306.29 + +Airline Name + +ALASKA AIRLINES + +Ticket Tax Fare 62.01 + +Passenger Name Flight Details + +MARKS/SAM ALBERT +11 Sep 2023 ALASKA AIRLINES +0572 H Class +SEATTLE-TACOMA,WA/RALEIGH DURHAM,NC +13 Sep 2023 ALASKA AIRLINES +0491 M Class +RALEIGH DURHAM,NC/SEATTLE- TACOMA,WA + +Total (USD) Ticket Amount + +368.30 + +Credit Card Information +Charged to Card + + + +AX XXXXXXXXXXX4321 + + + +368.30 + + + + +Payment Details + + + +Charged by Airline +Total Invoice Charge + + + +USD + + + +368.30 +368.30 + +Monday 11 September 2023 + +10:05 AM + +Seattle (SEA) to Durham (RDU) +Airline Booking Ref: ABCXYZ + +Carrier: ALASKA AIRLINES + +Flight: AS 572 + +Status: Confirmed + +Operated By: ALASKA AIRLINES +Origin: Seattle, WA, Seattle-Tacoma International Apt (SEA) + +Departing: Monday 11 September 2023 at 10:05 AM Destination: Durham, Raleigh, Raleigh (RDU) Arriving: Monday 11 September 2023 at 06:15 PM +Additional Information + +Departure Terminal: Not Applicable + +Arrival Terminal: TERMINAL 2 + + +Class: ECONOMY +Aircraft Type: Boeing 737-900 +Meal Service: Not Applicable +Frequent Flyer Number: Not Applicable +Number of Stops: 0 +Greenhouse Gas Emissions: 560 kg CO2e / person + + +Distance: 2354 Miles Estimated Time: 05 hours 10 minutes +Seat: 24A + + +THE WESTIN RALEIGH DURHAM AP +Address: 3931 Macaw Street, Raleigh, NC, 27617, US +Phone: (1) 919-224-1400 Fax: (1) 919-224-1401 +Check In Date: Monday 11 September 2023 Check Out Date: Wednesday 13 September 2023 Number Of Nights: 2 +Rate: USD 280.00 per night may be subject to local taxes and service charges +Guaranteed to: AX XXXXXXXXXXX4321 + +Reference Number: 987654 +Additional Information +Membership ID: 123456789 +CANCEL PERMITTED UP TO 1 DAYS BEFORE CHECKIN + +Status: Confirmed + + +Corporate Id: Not Applicable + +Number Of Rooms: 1 + +Wednesday 13 September 2023 + +07:15 PM + +Durham (RDU) to Seattle (SEA) +Airline Booking Ref: ABCXYZ + +Carrier: ALASKA AIRLINES + +Flight: AS 491 + +Status: Confirmed + +Operated By: ALASKA AIRLINES +Origin: Durham, Raleigh, Raleigh (RDU) +Departing: Wednesday 13 September 2023 at 07:15 PM + + + +Departure Terminal: TERMINAL 2 + +Destination: Seattle, WA, Seattle-Tacoma International Apt (SEA) +Arriving: Wednesday 13 September 2023 at 09:59 PM Arrival Terminal: Not Applicable +Additional Information + + +Class: ECONOMY +Aircraft Type: Boeing 737-900 +Meal Service: Not Applicable +Frequent Flyer Number: Not Applicable +Number of Stops: 0 +Greenhouse Gas Emissions: 560 kg CO2e / person + + +Distance: 2354 Miles Estimated Time: 05 hours 44 minutes +Seat: 16A + + + +Greenhouse Gas Emissions +Total Greenhouse Gas Emissions for this trip is: 1120 kg CO2e / person +Air Fare Information + +Routing : ONLINE RESERVATION +Total Fare : USD 368.30 +Additional Messages +FOR 24X7 Travel Reservations Please Call 1-669-210-8041 Unable To Use Requested As Frequent Flyer Program Invalid Use Of Frequent Flyer Number 0123XYZ Please Contact Corresponding Frequent Travel Program Support Desk For Assistance +Trip Name-Trip From Seattle To Raleigh/Durham +This Ticket Is Nonrefundable. Changes Or Cancellations Must Be Made Prior To Scheduled Flight Departure +All Changes Must Be Made On Same Carrier And Will Be Subject To Service Fee And Difference In Airfare +******************************************************* +Please Be Advised That Certain Mandatory Hotel-Imposed Charges Including But Not Limited To Daily Resort Or Facility Fees May Be Applicable To Your Stay And Payable To The Hotel Operator At Check-Out From The Property. You May Wish To Inquire With The Hotel Before Your Trip Regarding The Existence And Amount Of Such Charges. +******************************************************* +Hotel Cancel Policies Vary Depending On The Property And Date. If You Have Questions Regarding Cancellation Fees Please Call The Travel Office. +Important Information +COVID-19 Updates: Click here to access Travel Vitals https://travelvitals.amexgbt.com for the latest information and advisories compiled by American Express Global Business Travel. + +Carbon Emissions: The total emissions value for this itinerary includes air travel only. Emissions for each individual flight are displayed in the flight details section. For more information on carbon emissions please refer to https://www.amexglobalbusinesstravel.com/sustainable-products-and-platforms. + +For important information regarding your booking in relation to the conditions applying to your booking, managing your booking and travel advisory, please refer to www.amexglobalbusinesstravel.com/booking-info. + +GBT Travel Services UK Limited (GBT UK) and its authorized sublicensees (including Ovation Travel Group and Egencia) use certain trademarks and service marks of American Express Company or its subsidiaries (American Express) in the American Express Global Business Travel and American Express Meetings & Events brands and in connection with its business for permitted uses only under a limited license from American Express (Licensed Marks). The Licensed Marks are trademarks or service marks of, and the property of, American Express. GBT UK is a subsidiary of Global Business Travel Group, Inc. (NYSE: GBTG). American Express holds a minority interest in GBTG, which operates as a separate company from American Express. diff --git a/dotnet/samples/KernelSyntaxExamples/TestConfiguration.cs b/dotnet/samples/KernelSyntaxExamples/TestConfiguration.cs index d4f0c3b854e5..9e186a9ae1fe 100644 --- a/dotnet/samples/KernelSyntaxExamples/TestConfiguration.cs +++ b/dotnet/samples/KernelSyntaxExamples/TestConfiguration.cs @@ -22,8 +22,9 @@ public static void Initialize(IConfigurationRoot configRoot) public static OpenAIConfig OpenAI => LoadSection(); public static AzureOpenAIConfig AzureOpenAI => LoadSection(); + public static AzureOpenAIConfig AzureOpenAIImages => LoadSection(); public static AzureOpenAIEmbeddingsConfig AzureOpenAIEmbeddings => LoadSection(); - public static ACSConfig ACS => LoadSection(); + public static AzureAISearchConfig AzureAISearch => LoadSection(); public static QdrantConfig Qdrant => LoadSection(); public static WeaviateConfig Weaviate => LoadSection(); public static KeyVaultConfig KeyVault => LoadSection(); @@ -37,6 +38,8 @@ public static void Initialize(IConfigurationRoot configRoot) public static JiraConfig Jira => LoadSection(); public static ChromaConfig Chroma => LoadSection(); public static KustoConfig Kusto => LoadSection(); + public static MongoDBConfig MongoDB => LoadSection(); + public static ChatGPTRetrievalPluginConfig ChatGPTRetrievalPlugin => LoadSection(); private static T LoadSection([CallerMemberName] string? caller = null) { @@ -67,9 +70,15 @@ public class AzureOpenAIConfig { public string ServiceId { get; set; } public string DeploymentName { get; set; } + public string ModelId { get; set; } public string ChatDeploymentName { get; set; } + public string ChatModelId { get; set; } + public string ImageDeploymentName { get; set; } + public string ImageModelId { get; set; } + public string ImageEndpoint { get; set; } public string Endpoint { get; set; } public string ApiKey { get; set; } + public string ImageApiKey { get; set; } } public class AzureOpenAIEmbeddingsConfig @@ -79,7 +88,7 @@ public class AzureOpenAIEmbeddingsConfig public string ApiKey { get; set; } } - public class ACSConfig + public class AzureAISearchConfig { public string Endpoint { get; set; } public string ApiKey { get; set; } @@ -104,7 +113,7 @@ public class KeyVaultConfig { public string Endpoint { get; set; } public string ClientId { get; set; } - public string TenantId { get; set; } + public string ClientSecret { get; set; } } public class HuggingFaceConfig @@ -161,5 +170,16 @@ public class KustoConfig { public string ConnectionString { get; set; } } + + public class MongoDBConfig + { + public string ConnectionString { get; set; } + } + + public class ChatGPTRetrievalPluginConfig + { + public string Token { get; set; } + } + #pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. } diff --git a/dotnet/samples/NCalcPlugins/LanguageCalculatorPlugin.cs b/dotnet/samples/NCalcPlugins/LanguageCalculatorPlugin.cs deleted file mode 100644 index 7758d9f78b51..000000000000 --- a/dotnet/samples/NCalcPlugins/LanguageCalculatorPlugin.cs +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Text.RegularExpressions; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Orchestration; -using NCalc; - -namespace NCalcPlugins; - -/// -/// Plugin that enables the comprehension of mathematical problems presented in English / natural-language text, followed by the execution of the necessary calculations to solve those problems. -/// -/// -/// usage : -/// var kernel = new KernelBuilder().WithLogger(ConsoleLogger.Logger).Build(); -/// var question = "what is the square root of 625"; -/// var calculatorPlugin = kernel.ImportFunctions(new LanguageCalculatorPlugin(kernel)); -/// var summary = await kernel.RunAsync(questions, calculatorPlugin["Calculate"]); -/// Console.WriteLine("Result :"); -/// Console.WriteLine(summary.Result); -/// -public class LanguageCalculatorPlugin -{ - private readonly ISKFunction _mathTranslator; - private const string MathTranslatorPrompt = - @"Translate a math problem into a expression that can be executed using .net NCalc library. Use the output of running this code to answer the question. -Available functions: Abs, Acos, Asin, Atan, Ceiling, Cos, Exp, Floor, IEEERemainder, Log, Log10, Max, Min, Pow, Round, Sign, Sin, Sqrt, Tan, and Truncate. in and if are also supported. - -Question: $((Question with math problem.)) -expression:``` $((single line mathematical expression that solves the problem))``` - -[Examples] -Question: What is 37593 * 67? -expression:```37593 * 67``` - -Question: what is 3 to the 2nd power? -expression:```Pow(3, 2)``` - -Question: what is sine of 0 radians? -expression:```Sin(0)``` - -Question: what is sine of 45 degrees? -expression:```Sin(45 * Pi /180 )``` - -Question: how many radians is 45 degrees? -expression:``` 45 * Pi / 180 ``` - -Question: what is the square root of 81? -expression:```Sqrt(81)``` - -Question: what is the angle whose sine is the number 1? -expression:```Asin(1)``` - -[End of Examples] - -Question: {{ $input }} -"; - - /// - /// Initializes a new instance of the class. - /// - /// The kernel to be used for creating the semantic function. - public LanguageCalculatorPlugin(IKernel kernel) - { - this._mathTranslator = kernel.CreateSemanticFunction( - MathTranslatorPrompt, - pluginName: nameof(LanguageCalculatorPlugin), - functionName: "TranslateMathProblem", - description: "Used by 'Calculator' function.", - requestSettings: new AIRequestSettings() - { - ExtensionData = new Dictionary() - { - { "MaxTokens", 256 }, - { "Temperature", 0.0 }, - { "TopP", 1 }, - } - }); - } - - /// - /// Calculates the result of a non-trivial math expression. - /// - /// A valid mathematical expression that could be executed by a calculator capable of more advanced math functions like sine/cosine/floor. - /// The context for the plugin execution. - /// A representing the result of the asynchronous operation. - [SKFunction, SKName("Calculator"), Description("Useful for getting the result of a non-trivial math expression.")] - public async Task CalculateAsync( - [Description("A valid mathematical expression that could be executed by a calculator capable of more advanced math functions like sin/cosine/floor.")] - string input, - SKContext context) - { - string answer; - - try - { - var result = await context.Runner.RunAsync(this._mathTranslator, new ContextVariables(input)).ConfigureAwait(false); - answer = result.GetValue() ?? string.Empty; - } - catch (Exception ex) - { - throw new InvalidOperationException($"Error in calculator for input {input} {ex.Message}", ex); - } - - string pattern = @"```\s*(.*?)\s*```"; - - Match match = Regex.Match(answer, pattern, RegexOptions.Singleline); - if (match.Success) - { - var result = EvaluateMathExpression(match); - return result; - } - - throw new InvalidOperationException($"Input value [{input}] could not be understood, received following {answer}"); - } - - private static string EvaluateMathExpression(Match match) - { - var textExpressions = match.Groups[1].Value; - var expr = new Expression(textExpressions, EvaluateOptions.IgnoreCase); - expr.EvaluateParameter += delegate (string name, ParameterArgs args) - { - args.Result = name.ToLower(System.Globalization.CultureInfo.CurrentCulture) switch - { - "pi" => Math.PI, - "e" => Math.E, - _ => args.Result - }; - }; - - try - { - if (expr.HasErrors()) - { - return "Error:" + expr.Error + " could not evaluate " + textExpressions; - } - - var result = expr.Evaluate(); - return "Answer:" + result.ToString(); - } - catch (Exception e) - { - throw new InvalidOperationException("could not evaluate " + textExpressions, e); - } - } -} diff --git a/dotnet/samples/NCalcPlugins/NCalcPlugins.csproj b/dotnet/samples/NCalcPlugins/NCalcPlugins.csproj deleted file mode 100644 index 7aa023e93fd7..000000000000 --- a/dotnet/samples/NCalcPlugins/NCalcPlugins.csproj +++ /dev/null @@ -1,14 +0,0 @@ - - - netstandard2.0 - 10 - - - - - - - - - - diff --git a/dotnet/samples/NCalcPlugins/SimpleCalculatorPlugin.cs b/dotnet/samples/NCalcPlugins/SimpleCalculatorPlugin.cs deleted file mode 100644 index a0ba143cbe0f..000000000000 --- a/dotnet/samples/NCalcPlugins/SimpleCalculatorPlugin.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; - -namespace NCalcPlugins; - -/// -/// Simple calculator plugin that evaluates a mathematical expression. -/// -public class SimpleCalculatorPlugin -{ - private readonly ISKFunction _mathTranslator; - - private static readonly string[] s_stopSequences = new[] { "Problem:", "Solution:" }; - - /// - /// Initializes a new instance of the class. - /// - /// The kernel used to create the semantic function. - public SimpleCalculatorPlugin(IKernel kernel) - { - this._mathTranslator = kernel.CreateSemanticFunction( - "Task: Give the final solution for the problem. Be as concise as possible.\nProblem:4+4\nSolution:8\nProblem:{{$input}}\nSolution:\n", - pluginName: nameof(SimpleCalculatorPlugin), - functionName: "Calculator", - description: "Evaluate a mathematical expression. Input is a valid mathematical expression that could be executed by a simple calculator i.e. add, subtract, multiply and divide. Cannot use variables.", - requestSettings: new AIRequestSettings() - { - ExtensionData = new Dictionary() - { - { "MaxTokens", 256 }, - { "Temperature", 0.0 }, - { "StopSequences", s_stopSequences }, - } - }); - } -} diff --git a/dotnet/samples/TelemetryExample/Program.cs b/dotnet/samples/TelemetryExample/Program.cs new file mode 100644 index 000000000000..09878ddc998b --- /dev/null +++ b/dotnet/samples/TelemetryExample/Program.cs @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics; +using System.IO; +using System.Threading.Tasks; +using Azure.Monitor.OpenTelemetry.Exporter; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Planning.Handlebars; +using OpenTelemetry; +using OpenTelemetry.Metrics; +using OpenTelemetry.Trace; + +/// +/// Example of telemetry in Semantic Kernel using Application Insights within console application. +/// +public sealed class Program +{ + /// + /// Log level to be used by . + /// + /// + /// is set by default. + /// will enable logging with more detailed information, including sensitive data. Should not be used in production. + /// + private const LogLevel MinLogLevel = LogLevel.Information; + + /// + /// Instance of for the application activities. + /// + private static readonly ActivitySource s_activitySource = new("Telemetry.Example"); + + /// + /// The main entry point for the application. + /// + /// A representing the asynchronous operation. + public static async Task Main() + { + // Load configuration from environment variables or user secrets. + LoadUserSecrets(); + + var connectionString = TestConfiguration.ApplicationInsights.ConnectionString; + + using var traceProvider = Sdk.CreateTracerProviderBuilder() + .AddSource("Microsoft.SemanticKernel*") + .AddSource("Telemetry.Example") + .AddAzureMonitorTraceExporter(options => options.ConnectionString = connectionString) + .Build(); + + using var meterProvider = Sdk.CreateMeterProviderBuilder() + .AddMeter("Microsoft.SemanticKernel*") + .AddAzureMonitorMetricExporter(options => options.ConnectionString = connectionString) + .Build(); + + using var loggerFactory = LoggerFactory.Create(builder => + { + // Add OpenTelemetry as a logging provider + builder.AddOpenTelemetry(options => + { + options.AddAzureMonitorLogExporter(options => options.ConnectionString = connectionString); + // Format log messages. This is default to false. + options.IncludeFormattedMessage = true; + }); + builder.SetMinimumLevel(MinLogLevel); + }); + + var kernel = GetKernel(loggerFactory); + var planner = CreatePlanner(); + + using var activity = s_activitySource.StartActivity("Main"); + + Console.WriteLine("Operation/Trace ID:"); + Console.WriteLine(Activity.Current?.TraceId); + + var plan = await planner.CreatePlanAsync(kernel, "Write a poem about John Doe, then translate it into Italian."); + + Console.WriteLine("Original plan:"); + Console.WriteLine(plan.ToString()); + + var result = await plan.InvokeAsync(kernel).ConfigureAwait(false); + + Console.WriteLine("Result:"); + Console.WriteLine(result); + } + + private static Kernel GetKernel(ILoggerFactory loggerFactory) + { + var folder = RepoFiles.SamplePluginsPath(); + + IKernelBuilder builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(loggerFactory); + builder.AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + modelId: TestConfiguration.AzureOpenAI.ChatModelId, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey + ).Build(); + + builder.Plugins.AddFromPromptDirectory(Path.Combine(folder, "WriterPlugin")); + + return builder.Build(); + } + + private static HandlebarsPlanner CreatePlanner() + { + var plannerOptions = new HandlebarsPlannerOptions(); + return new HandlebarsPlanner(plannerOptions); + } + + private static void LoadUserSecrets() + { + IConfigurationRoot configRoot = new ConfigurationBuilder() + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + TestConfiguration.Initialize(configRoot); + } +} diff --git a/dotnet/samples/TelemetryExample/README.md b/dotnet/samples/TelemetryExample/README.md new file mode 100644 index 000000000000..d6ebe165b6e2 --- /dev/null +++ b/dotnet/samples/TelemetryExample/README.md @@ -0,0 +1,140 @@ +# Semantic Kernel Telemetry Example + +This example project shows how an application can be configured to send Semantic Kernel telemetry to Application Insights. + +> Note that it is also possible to use other Application Performance Management (APM) vendors. An example is [Prometheus](https://prometheus.io/docs/introduction/overview/). Please refer to this [link](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/metrics-collection#configure-the-example-app-to-use-opentelemetrys-prometheus-exporter) on how to do it. + +For more information, please refer to the following articles: + +1. [Observability](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/observability-with-otel) +2. [OpenTelemetry](https://opentelemetry.io/docs/) +3. [Enable Azure Monitor OpenTelemetry for .Net](https://learn.microsoft.com/en-us/azure/azure-monitor/app/opentelemetry-enable?tabs=net) +4. [Configure Azure Monitor OpenTelemetry for .Net](https://learn.microsoft.com/en-us/azure/azure-monitor/app/opentelemetry-configuration?tabs=net) +5. [Add, modify, and filter Azure Monitor OpenTelemetry](https://learn.microsoft.com/en-us/azure/azure-monitor/app/opentelemetry-add-modify?tabs=net) +6. [Customizing OpenTelemetry .NET SDK for Metrics](https://github.com/open-telemetry/opentelemetry-dotnet/blob/main/docs/metrics/customizing-the-sdk/README.md) +7. [Customizing OpenTelemetry .NET SDK for Logs](https://github.com/open-telemetry/opentelemetry-dotnet/blob/main/docs/logs/customizing-the-sdk/README.md) + +## What to expect + +In this example project, the Handlebars planner will be invoked to achieve a goal. The planner will request the model to create a plan, comprising three steps, with two of them being prompt-based kernel functions. The plan will be executed to produce the desired output, effectively fulfilling the goal. + +The Semantic Kernel SDK is designed to efficiently generate comprehensive logs, traces, and metrics throughout the planner invocation, as well as during function and plan execution. This allows you to effectively monitor your AI application's performance and accurately track token consumption. + +> `ActivitySource.StartActivity` internally determines if there are any listeners recording the Activity. If there are no registered listeners or there are listeners that are not interested, StartActivity() will return null and avoid creating the Activity object. Read more [here](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/distributed-tracing-instrumentation-walkthroughs). + +## Configuration + +### Require resources + +1. [Application Insights](https://learn.microsoft.com/en-us/azure/azure-monitor/app/create-workspace-resource) +2. [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource?pivots=web-portal) + +### Secrets + +This example will require secrets and credentials to access your Application Insights instance and Azure OpenAI. +We suggest using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) +to avoid the risk of leaking secrets into the repository, branches and pull requests. +You can also use environment variables if you prefer. + +To set your secrets with Secret Manager: + +``` +cd dotnet/samples/TelemetryExample + +dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..." +dotnet user-secrets set "AzureOpenAI:ChatModelId" "..." +dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/" +dotnet user-secrets set "AzureOpenAI:ApiKey" "..." + +dotnet user-secrets set "ApplicationInsights:ConnectionString" "..." +``` + +## Running the example + +Simply run `dotnet run` under this directory if the command line interface is preferred. Otherwise, this example can also be run in Visual Studio. + +> This will output the Operation/Trace ID, which can be used later in Application Insights for searching the operation. + +## Application Insights/Azure Monitor + +### Logs and traces + +Go to your Application Insights instance, click on _Transaction search_ on the left menu. Use the operation id output by the program to search for the logs and traces associated with the operation. Click on any of the search result to view the end-to-end transaction details. Read more [here](https://learn.microsoft.com/en-us/azure/azure-monitor/app/transaction-search-and-diagnostics?tabs=transaction-search). + +### Metrics + +Running the application once will only generate one set of measurements (for each metrics). Run the application a couple times to generate more sets of measurements. + +> Note: Make sure not to run the program too frequently. Otherwise, you may get throttled. + +Please refer to here on how to analyze metrics in [Azure Monitor](https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/analyze-metrics). + +### Log Analytics + +It is also possible to use Log Analytics to query the telemetry items sent by the sample application. Please read more [here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/log-analytics-tutorial). + +For example, to create a pie chart to summarize the Handlebars planner status: + +```kql +dependencies +| where name == "Microsoft.SemanticKernel.Planning.Handlebars.HandlebarsPlanner" +| extend status = iff(success == True, "Success", "Failure") +| summarize count() by status +| render piechart +``` + +Or to create a bar chart to summarize the Handlebars planner status by date: + +```kql +dependencies +| where name == "Microsoft.SemanticKernel.Planning.Handlebars.HandlebarsPlanner" +| extend status = iff(success == True, "Success", "Failure"), day = bin(timestamp, 1d) +| project day, status +| summarize + success = countif(status == "Success"), + failure = countif(status == "Failure") by day +| extend day = format_datetime(day, "MM/dd/yy") +| order by day +| render barchart +``` + +Or to see status and performance of each planner run: + +```kql +dependencies +| where name == "Microsoft.SemanticKernel.Planning.Handlebars.HandlebarsPlanner" +| extend status = iff(success == True, "Success", "Failure") +| project timestamp, id, status, performance = performanceBucket +| order by timestamp +``` + +It is also possible to summarize the total token usage: + +```kql +customMetrics +| where name == "semantic_kernel.connectors.openai.tokens.total" +| project value +| summarize sum(value) +| project Total = sum_value +``` + +Or track token usage by functions: + +```kql +customMetrics +| where name == "semantic_kernel.function.invocation.token_usage.prompt" and customDimensions has "semantic_kernel.function.name" +| project customDimensions, value +| extend function = tostring(customDimensions["semantic_kernel.function.name"]) +| project function, value +| summarize sum(value) by function +| render piechart +``` + +### Azure Dashboard + +You can create an Azure Dashboard to visualize the custom telemetry items. You can read more here: [Create a new dashboard](https://learn.microsoft.com/en-us/azure/azure-monitor/app/overview-dashboard#create-a-new-dashboard). + +## More information + +- [Telemetry docs](../../docs/TELEMETRY.md) +- [Planner telemetry improvement ADR](../../../docs/decisions/0025-planner-telemetry-enhancement.md) diff --git a/dotnet/samples/ApplicationInsightsExample/RepoUtils/RepoFiles.cs b/dotnet/samples/TelemetryExample/RepoUtils/RepoFiles.cs similarity index 100% rename from dotnet/samples/ApplicationInsightsExample/RepoUtils/RepoFiles.cs rename to dotnet/samples/TelemetryExample/RepoUtils/RepoFiles.cs diff --git a/dotnet/samples/TelemetryExample/TelemetryExample.csproj b/dotnet/samples/TelemetryExample/TelemetryExample.csproj new file mode 100644 index 000000000000..ebd29174e822 --- /dev/null +++ b/dotnet/samples/TelemetryExample/TelemetryExample.csproj @@ -0,0 +1,30 @@ + + + + net6.0 + LatestMajor + Exe + 10 + enable + disable + false + + CA1050;CA1707;CA2007;CS1591;VSTHRD111,SKEXP0050,SKEXP0054,SKEXP0060 + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/TelemetryExample/TestConfiguration.cs b/dotnet/samples/TelemetryExample/TestConfiguration.cs new file mode 100644 index 000000000000..03a8f1077558 --- /dev/null +++ b/dotnet/samples/TelemetryExample/TestConfiguration.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using Microsoft.Extensions.Configuration; + +public sealed class TestConfiguration +{ + private readonly IConfigurationRoot _configRoot; + private static TestConfiguration? s_instance; + + private TestConfiguration(IConfigurationRoot configRoot) + { + this._configRoot = configRoot; + } + + public static void Initialize(IConfigurationRoot configRoot) + { + s_instance = new TestConfiguration(configRoot); + } + + public static AzureOpenAIConfig AzureOpenAI => LoadSection(); + + public static ApplicationInsightsConfig ApplicationInsights => LoadSection(); + + private static T LoadSection([CallerMemberName] string? caller = null) + { + if (s_instance == null) + { + throw new InvalidOperationException( + "TestConfiguration must be initialized with a call to Initialize(IConfigurationRoot) before accessing configuration values."); + } + + if (string.IsNullOrEmpty(caller)) + { + throw new ArgumentNullException(nameof(caller)); + } + + return s_instance._configRoot.GetSection(caller).Get() ?? + throw new KeyNotFoundException($"Could not find configuration section {caller}"); + } + +#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. + public class AzureOpenAIConfig + { + public string ChatDeploymentName { get; set; } + public string ChatModelId { get; set; } + public string Endpoint { get; set; } + public string ApiKey { get; set; } + } + + public class ApplicationInsightsConfig + { + public string ConnectionString { get; set; } + } + +#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. +} diff --git a/dotnet/src/.editorconfig b/dotnet/src/.editorconfig index b7d6bf9ace7e..b2afb3dc53c6 100644 --- a/dotnet/src/.editorconfig +++ b/dotnet/src/.editorconfig @@ -3,3 +3,9 @@ dotnet_diagnostic.CA2007.severity = error # Do not directly await a Task dotnet_diagnostic.VSTHRD111.severity = error # Use .ConfigureAwait(bool) dotnet_diagnostic.IDE1006.severity = error # Naming rule violations + +# Resharper disabled rules: https://www.jetbrains.com/help/resharper/Reference__Code_Inspections_CSHARP.html#CodeSmell +resharper_not_resolved_in_text_highlighting = none # Disable Resharper's "Not resolved in text" highlighting +resharper_check_namespace_highlighting = none # Disable Resharper's "Check namespace" highlighting +resharper_object_creation_as_statement_highlighting = none # Disable Resharper's "Object creation as statement" highlighting + diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/Connectors.AI.HuggingFace.csproj b/dotnet/src/Connectors/Connectors.AI.HuggingFace/Connectors.AI.HuggingFace.csproj deleted file mode 100644 index be21cc3c7b13..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/Connectors.AI.HuggingFace.csproj +++ /dev/null @@ -1,24 +0,0 @@ - - - - - Microsoft.SemanticKernel.Connectors.AI.HuggingFace - $(AssemblyName) - netstandard2.0 - - - - - - - - - Semantic Kernel - Hugging Face AI connectors - Semantic Kernel connectors for Hugging Face. Contains clients for text completion and text embedding generation. - - - - - - - diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/HuggingFaceKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/HuggingFaceKernelBuilderExtensions.cs deleted file mode 100644 index 82699068c568..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/HuggingFaceKernelBuilderExtensions.cs +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextEmbedding; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of KernelConfig -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure Hugging Face connectors. -/// -public static class HuggingFaceKernelBuilderExtensions -{ - /// - /// Registers an Hugging Face text completion service with the specified configuration. - /// - /// The instance. - /// The name of the Hugging Face model. - /// The API key required for accessing the Hugging Face service. - /// The endpoint URL for the text completion service. - /// A local identifier for the given AI service. - /// Indicates whether the service should be the default for its type. - /// The optional to be used for making HTTP requests. - /// If not provided, a default instance will be used. - /// The modified instance. - public static KernelBuilder WithHuggingFaceTextCompletionService(this KernelBuilder builder, - string model, - string? apiKey = null, - string? endpoint = null, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new HuggingFaceTextCompletion( - model, - apiKey, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - endpoint), - setAsDefault); - - return builder; - } - - /// - /// Registers an Hugging Face text embedding generation service with the specified configuration. - /// - /// The instance. - /// The name of the Hugging Face model. - /// The endpoint for the text embedding generation service. - /// A local identifier for the given AI service. - /// Indicates whether the service should be the default for its type. - /// The instance. - public static KernelBuilder WithHuggingFaceTextEmbeddingGenerationService(this KernelBuilder builder, - string model, - string endpoint, - string? serviceId = null, - bool setAsDefault = false) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new HuggingFaceTextEmbeddingGeneration( - model, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient: null, loggerFactory), - endpoint), - setAsDefault); - - return builder; - } - - /// - /// Registers an Hugging Face text embedding generation service with the specified configuration. - /// - /// The instance. - /// The name of the Hugging Face model. - /// The optional instance used for making HTTP requests. - /// The endpoint for the text embedding generation service. - /// A local identifier for the given AI serviceю - /// Indicates whether the service should be the default for its type. - /// The instance. - public static KernelBuilder WithHuggingFaceTextEmbeddingGenerationService(this KernelBuilder builder, - string model, - HttpClient? httpClient = null, - string? endpoint = null, - string? serviceId = null, - bool setAsDefault = false) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new HuggingFaceTextEmbeddingGeneration( - model, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - endpoint), - setAsDefault); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/HuggingFaceModelResultExtension.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/HuggingFaceModelResultExtension.cs deleted file mode 100644 index 2591f411ea47..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/HuggingFaceModelResultExtension.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 - -namespace Microsoft.SemanticKernel; - -/// -/// Provides an extension method for working with Hugging Face model results. -/// -public static class HuggingFaceModelResultExtension -{ - /// - /// Retrieves a typed hugging face result from . - /// - /// The instance to retrieve the hugging face result from. - /// A instance containing the hugging face result. - public static TextCompletionResponse GetHuggingFaceResult(this ModelResult resultBase) - { - return resultBase.GetResult(); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/HuggingFaceTextCompletion.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/HuggingFaceTextCompletion.cs deleted file mode 100644 index e57690f76f98..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/HuggingFaceTextCompletion.cs +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; - -/// -/// HuggingFace text completion service. -/// -#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. -public sealed class HuggingFaceTextCompletion : ITextCompletion -#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. -{ - private const string HuggingFaceApiEndpoint = "https://api-inference.huggingface.co/models"; - - private readonly string _model; - private readonly string? _endpoint; - private readonly HttpClient _httpClient; - private readonly string? _apiKey; - - /// - /// Initializes a new instance of the class. - /// Using default implementation. - /// - /// Endpoint for service API call. - /// Model to use for service API call. - public HuggingFaceTextCompletion(Uri endpoint, string model) - { - Verify.NotNull(endpoint); - Verify.NotNullOrWhiteSpace(model); - - this._endpoint = endpoint.AbsoluteUri; - this._model = model; - - this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - } - - /// - /// Initializes a new instance of the class. - /// Using HuggingFace API for service call, see https://huggingface.co/docs/api-inference/index. - /// - /// The name of the model to use for text completion. - /// The API key for accessing the Hugging Face service. - /// The HTTP client to use for making API requests. If not specified, a default client will be used. - /// The endpoint URL for the Hugging Face service. - /// If not specified, the base address of the HTTP client is used. If the base address is not available, a default endpoint will be used. - public HuggingFaceTextCompletion(string model, string? apiKey = null, HttpClient? httpClient = null, string? endpoint = null) - { - Verify.NotNullOrWhiteSpace(model); - - this._model = model; - this._apiKey = apiKey; - this._httpClient = httpClient ?? new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - this._endpoint = endpoint; - } - - /// - [Obsolete("Streaming capability is not supported, use GetCompletionsAsync instead")] - public IAsyncEnumerable GetStreamingCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - throw new NotSupportedException("Streaming capability is not supported"); - } - - /// - public async Task> GetCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - return await this.ExecuteGetCompletionsAsync(text, cancellationToken).ConfigureAwait(false); - } - - #region private ================================================================================ - - private async Task> ExecuteGetCompletionsAsync(string text, CancellationToken cancellationToken = default) - { - var completionRequest = new TextCompletionRequest - { - Input = text - }; - - using var httpRequestMessage = HttpRequest.CreatePostRequest(this.GetRequestUri(), completionRequest); - - httpRequestMessage.Headers.Add("User-Agent", Telemetry.HttpUserAgent); - if (!string.IsNullOrEmpty(this._apiKey)) - { - httpRequestMessage.Headers.Add("Authorization", $"Bearer {this._apiKey}"); - } - - using var response = await this._httpClient.SendWithSuccessCheckAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); - - var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - - List? completionResponse = JsonSerializer.Deserialize>(body); - - if (completionResponse is null) - { - throw new SKException("Unexpected response from model") - { - Data = { { "ResponseData", body } }, - }; - } - - return completionResponse.ConvertAll(c => new TextCompletionResult(c)); - } - - /// - /// Retrieves the request URI based on the provided endpoint and model information. - /// - /// - /// A object representing the request URI. - /// - private Uri GetRequestUri() - { - var baseUrl = HuggingFaceApiEndpoint; - - if (!string.IsNullOrEmpty(this._endpoint)) - { - baseUrl = this._endpoint; - } - else if (this._httpClient.BaseAddress?.AbsoluteUri != null) - { - baseUrl = this._httpClient.BaseAddress!.AbsoluteUri; - } - - return new Uri($"{baseUrl!.TrimEnd('/')}/{this._model}"); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionRequest.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionRequest.cs deleted file mode 100644 index a57ca56bb037..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionRequest.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; - -/// -/// HTTP schema to perform completion request. -/// -[Serializable] -public sealed class TextCompletionRequest -{ - /// - /// Prompt to complete. - /// - [JsonPropertyName("inputs")] - public string Input { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionResponse.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionResponse.cs deleted file mode 100644 index 91f922f6cb16..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionResponse.cs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; - -/// -/// HTTP Schema for completion response. -/// -public sealed class TextCompletionResponse -{ - /// - /// Completed text. - /// - [JsonPropertyName("generated_text")] - public string? Text { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionResult.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionResult.cs deleted file mode 100644 index e80ab08e2dd8..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextCompletion/TextCompletionResult.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; - -internal sealed class TextCompletionResult : ITextResult -{ - private readonly ModelResult _responseData; - - public TextCompletionResult(TextCompletionResponse responseData) - { - this._responseData = new ModelResult(responseData); - } - - public ModelResult ModelResult => this._responseData; - - public Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - return Task.FromResult(this._responseData.GetResult().Text ?? string.Empty); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGeneration.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGeneration.cs deleted file mode 100644 index 103212317b7c..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGeneration.cs +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextEmbedding; - -/// -/// HuggingFace embedding generation service. -/// -#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. -public sealed class HuggingFaceTextEmbeddingGeneration : ITextEmbeddingGeneration -#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. -{ - private readonly string _model; - private readonly string? _endpoint; - private readonly HttpClient _httpClient; - - /// - /// Initializes a new instance of the class. - /// Using default implementation. - /// - /// Endpoint for service API call. - /// Model to use for service API call. - public HuggingFaceTextEmbeddingGeneration(Uri endpoint, string model) - { - Verify.NotNull(endpoint); - Verify.NotNullOrWhiteSpace(model); - - this._endpoint = endpoint.AbsoluteUri; - this._model = model; - - this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - } - - /// - /// Initializes a new instance of the class. - /// - /// Model to use for service API call. - /// Endpoint for service API call. - public HuggingFaceTextEmbeddingGeneration(string model, string endpoint) - { - Verify.NotNullOrWhiteSpace(model); - Verify.NotNullOrWhiteSpace(endpoint); - - this._model = model; - this._endpoint = endpoint; - - this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - } - - /// - /// Initializes a new instance of the class. - /// - /// Model to use for service API call. - /// The HttpClient used for making HTTP requests. - /// Endpoint for service API call. If not specified, the base address of the HTTP client is used. - public HuggingFaceTextEmbeddingGeneration(string model, HttpClient httpClient, string? endpoint = null) - { - Verify.NotNullOrWhiteSpace(model); - Verify.NotNull(httpClient); - - this._model = model; - this._endpoint = endpoint; - this._httpClient = httpClient; - - if (httpClient.BaseAddress == null && string.IsNullOrEmpty(endpoint)) - { - throw new SKException("The HttpClient BaseAddress and endpoint are both null or empty. Please ensure at least one is provided."); - } - } - - /// - public async Task>> GenerateEmbeddingsAsync(IList data, CancellationToken cancellationToken = default) - { - return await this.ExecuteEmbeddingRequestAsync(data, cancellationToken).ConfigureAwait(false); - } - - #region private ================================================================================ - - /// - /// Performs HTTP request to given endpoint for embedding generation. - /// - /// Data to embed. - /// The to monitor for cancellation requests. The default is . - /// List of generated embeddings. - private async Task>> ExecuteEmbeddingRequestAsync(IList data, CancellationToken cancellationToken) - { - var embeddingRequest = new TextEmbeddingRequest - { - Input = data - }; - - using var httpRequestMessage = HttpRequest.CreatePostRequest(this.GetRequestUri(), embeddingRequest); - - httpRequestMessage.Headers.Add("User-Agent", Telemetry.HttpUserAgent); - - var response = await this._httpClient.SendWithSuccessCheckAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); - var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - - var embeddingResponse = JsonSerializer.Deserialize(body); - - return embeddingResponse?.Embeddings?.Select(l => l.Embedding).ToList()!; - } - - /// - /// Retrieves the request URI based on the provided endpoint and model information. - /// - /// - /// A object representing the request URI. - /// - private Uri GetRequestUri() - { - string? baseUrl = null; - - if (!string.IsNullOrEmpty(this._endpoint)) - { - baseUrl = this._endpoint; - } - else if (this._httpClient.BaseAddress?.AbsoluteUri != null) - { - baseUrl = this._httpClient.BaseAddress!.AbsoluteUri; - } - else - { - throw new SKException("No endpoint or HTTP client base address has been provided"); - } - - return new Uri($"{baseUrl!.TrimEnd('/')}/{this._model}"); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/TextEmbeddingRequest.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/TextEmbeddingRequest.cs deleted file mode 100644 index ca33f268ed61..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/TextEmbeddingRequest.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextEmbedding; - -/// -/// HTTP schema to perform embedding request. -/// -[Serializable] -public sealed class TextEmbeddingRequest -{ - /// - /// Data to embed. - /// - [JsonPropertyName("inputs")] - public IList Input { get; set; } = new List(); -} diff --git a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/TextEmbeddingResponse.cs b/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/TextEmbeddingResponse.cs deleted file mode 100644 index bdf722cce495..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.HuggingFace/TextEmbedding/TextEmbeddingResponse.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextEmbedding; - -/// -/// Represents the response from the Hugging Face text embedding API. -/// -public sealed class TextEmbeddingResponse -{ - /// - /// Represents the embedding vector for a given text. - /// - public sealed class EmbeddingVector - { - /// - /// The embedding vector as a ReadOnlyMemory of float values. - /// - [JsonPropertyName("embedding")] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] - public ReadOnlyMemory Embedding { get; set; } - } - - /// - /// List of embeddings. - /// - [JsonPropertyName("data")] - public IList? Embeddings { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AzureOpenAIClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AzureOpenAIClientBase.cs deleted file mode 100644 index 71f193e032c3..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AzureOpenAIClientBase.cs +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Runtime.CompilerServices; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; -/// -/// Base class for Azure OpenAI clients. -/// -public abstract class AzureOpenAIClientBase : ClientBase -{ - /// - /// OpenAI / Azure OpenAI Client - /// - private protected override OpenAIClient Client { get; } - - /// - /// Initializes a new instance of the class using API Key authentication. - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - private protected AzureOpenAIClientBase( - string modelId, - string endpoint, - string apiKey, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(loggerFactory) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - Verify.NotNullOrWhiteSpace(apiKey); - - var options = GetClientOptions(httpClient); - - this.ModelId = modelId; - this.Client = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), options); - } - - /// - /// Initializes a new instance of the class supporting AAD authentication. - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - private protected AzureOpenAIClientBase( - string modelId, - string endpoint, - TokenCredential credential, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(loggerFactory) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - - var options = GetClientOptions(httpClient); - - this.ModelId = modelId; - this.Client = new OpenAIClient(new Uri(endpoint), credential, options); - } - - /// - /// Initializes a new instance of the class using the specified OpenAIClient. - /// Note: instances created this way might not have the default diagnostics settings, - /// it's up to the caller to configure the client. - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// The to use for logging. If null, no logging will be performed. - private protected AzureOpenAIClientBase( - string modelId, - OpenAIClient openAIClient, - ILoggerFactory? loggerFactory = null) : base(loggerFactory) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNull(openAIClient); - - this.ModelId = modelId; - this.Client = openAIClient; - } - - /// - /// Options used by the Azure OpenAI client, e.g. User Agent. - /// - /// Custom for HTTP requests. - /// An instance of . - private static OpenAIClientOptions GetClientOptions(HttpClient? httpClient) - { - var options = new OpenAIClientOptions - { - Diagnostics = - { - IsTelemetryEnabled = Telemetry.IsTelemetryEnabled, - ApplicationId = Telemetry.HttpUserAgent, - } - }; - - if (httpClient != null) - { - options.Transport = new HttpClientTransport(httpClient); - options.RetryPolicy = new RetryPolicy(maxRetries: 0); //Disabling Azure SDK retry policy to use the one provided by the custom HTTP client. - } - - return options; - } - - /// - /// Logs Azure OpenAI action details. - /// - /// Caller member name. Populated automatically by runtime. - private protected void LogActionDetails([CallerMemberName] string? callerMemberName = default) - { - this.Logger.LogInformation("Action: {Action}. Azure OpenAI Deployment Name: {DeploymentName}.", callerMemberName, this.ModelId); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatModelResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatModelResult.cs deleted file mode 100644 index 8779bac9c9a6..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatModelResult.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Azure.AI.OpenAI; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// Represents a singular result of a chat completion. -public class ChatModelResult -{ - /// A unique identifier associated with this chat completion response. - public string Id { get; } - - /// - /// The first timestamp associated with generation activity for this completions response, - /// represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. - /// - public DateTimeOffset Created { get; } - - /// - /// Content filtering results for zero or more prompts in the request. - /// - public IReadOnlyList PromptFilterResults { get; } - - /// - /// The completion choice associated with this completion result. - /// - public ChatChoice Choice { get; } - - /// Usage information for tokens processed and generated as part of this completions operation. - public CompletionsUsage Usage { get; } - - /// Initializes a new instance of TextModelResult. - /// A completions response object to populate the fields relative the response. - /// A choice object to populate the fields relative to the resulting choice. - internal ChatModelResult(ChatCompletions completionsData, ChatChoice choiceData) - { - this.Id = completionsData.Id; - this.Created = completionsData.Created; - this.PromptFilterResults = completionsData.PromptFilterResults; - this.Choice = choiceData; - this.Usage = completionsData.Usage; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatResult.cs deleted file mode 100644 index 19c1216ea482..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatResult.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -internal sealed class ChatResult : IChatResult, ITextResult -{ - private readonly ChatChoice _choice; - - public ChatResult(ChatCompletions resultData, ChatChoice choice) - { - Verify.NotNull(choice); - this._choice = choice; - this.ModelResult = new(new ChatModelResult(resultData, choice)); - } - - public ModelResult ModelResult { get; } - - public Task GetChatMessageAsync(CancellationToken cancellationToken = default) - => Task.FromResult(new SKChatMessage(this._choice.Message)); - - public Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - return Task.FromResult(this._choice.Message.Content); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatResultExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatResultExtensions.cs deleted file mode 100644 index 7a34321e6d86..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatResultExtensions.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel.AI.ChatCompletion; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Provides extension methods for the IChatResult interface. -/// -public static class ChatResultExtensions -{ - /// - /// Retrieve the resulting function from the chat result. - /// - /// - /// The , or null if no function was returned by the model. - [Obsolete("Obsoleted, please use GetOpenAIFunctionResponse instead")] - public static OpenAIFunctionResponse? GetFunctionResponse(this IChatResult chatResult) - { - return GetOpenAIFunctionResponse(chatResult); - } - - /// - /// Retrieve the resulting function from the chat result. - /// - /// - /// The , or null if no function was returned by the model. - public static OpenAIFunctionResponse? GetOpenAIFunctionResponse(this IChatResult chatResult) - { - OpenAIFunctionResponse? functionResponse = null; - var functionCall = chatResult.ModelResult.GetResult().Choice.Message.FunctionCall; - if (functionCall is not null) - { - functionResponse = OpenAIFunctionResponse.FromFunctionCall(functionCall); - } - return functionResponse; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingModelResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingModelResult.cs deleted file mode 100644 index c4fc1a69b7b4..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingModelResult.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Azure.AI.OpenAI; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// Represents a singular result of a chat completion. -public class ChatStreamingModelResult -{ - /// A unique identifier associated with this chat completion response. - public string Id { get; } - - /// - /// The first timestamp associated with generation activity for this completions response, - /// represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. - /// - public DateTimeOffset Created { get; } - - /// - /// Content filtering results for zero or more prompts in the request. - /// - public IReadOnlyList PromptFilterResults { get; } - - /// - /// The completion choice associated with this completion result. - /// - public StreamingChatChoice Choice { get; } - - /// Initializes a new instance of TextModelResult. - /// A completions response object to populate the fields relative the response. - /// A choice object to populate the fields relative to the resulting choice. - internal ChatStreamingModelResult(StreamingChatCompletions completionsData, StreamingChatChoice choiceData) - { - this.Id = completionsData.Id; - this.Created = completionsData.Created; - this.PromptFilterResults = completionsData.PromptFilterResults; - this.Choice = choiceData; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingResult.cs deleted file mode 100644 index 693cc7edf738..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingResult.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -internal sealed class ChatStreamingResult : IChatStreamingResult, ITextStreamingResult, IChatResult, ITextResult -{ - private readonly StreamingChatChoice _choice; - public ModelResult ModelResult { get; } - - public ChatStreamingResult(StreamingChatCompletions resultData, StreamingChatChoice choice) - { - Verify.NotNull(choice); - this.ModelResult = new(new ChatStreamingModelResult(resultData, choice)); - this._choice = choice; - } - - /// - public async Task GetChatMessageAsync(CancellationToken cancellationToken = default) - { - var chatMessage = await this._choice.GetMessageStreaming(cancellationToken) - .LastOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - if (chatMessage is null) - { - throw new SKException("Unable to get chat message from stream"); - } - - return new SKChatMessage(chatMessage); - } - - /// - public async IAsyncEnumerable GetStreamingChatMessageAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) - { - await foreach (var message in this._choice.GetMessageStreaming(cancellationToken)) - { - if (message.FunctionCall is not null || message.Content is { Length: > 0 }) - { - yield return new SKChatMessage(message); - } - } - } - - /// - public async Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - return (await this.GetChatMessageAsync(cancellationToken).ConfigureAwait(false)).Content; - } - - /// - public async IAsyncEnumerable GetCompletionStreamingAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) - { - await foreach (var result in this.GetStreamingChatMessageAsync(cancellationToken).ConfigureAwait(false)) - { - if (result.Content is string content and { Length: > 0 }) - { - yield return content; - } - } - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingResultExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingResultExtensions.cs deleted file mode 100644 index e1f4a94a9cf4..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ChatStreamingResultExtensions.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.AI.ChatCompletion; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Provides extension methods for the IChatStreamingResult interface. -/// -public static class ChatStreamingResultExtensions -{ - /// - /// Retrieve the resulting function from the chat result. - /// - /// Chat streaming result - /// The , or null if no function was returned by the model. - public static async Task GetOpenAIStreamingFunctionResponseAsync(this IChatStreamingResult chatStreamingResult) - { - if (chatStreamingResult is not ChatStreamingResult) - { - throw new NotSupportedException($"Chat streaming result is not OpenAI {nameof(ChatStreamingResult)} supported type"); - } - - StringBuilder arguments = new(); - FunctionCall? functionCall = null; - await foreach (SKChatMessage message in chatStreamingResult.GetStreamingChatMessageAsync()) - { - functionCall ??= message.FunctionCall; - - arguments.Append(message.FunctionCall.Arguments); - } - - if (functionCall is null) - { - return null; - } - - functionCall.Arguments = arguments.ToString(); - return OpenAIFunctionResponse.FromFunctionCall(functionCall); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs deleted file mode 100644 index c31ea4a316c0..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/ClientBase.cs +++ /dev/null @@ -1,448 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.Metrics; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -#pragma warning disable CA2208 // Instantiate argument exceptions correctly - -/// -/// Base class for AI clients that provides common functionality for interacting with OpenAI services. -/// -public abstract class ClientBase -{ - private const int MaxResultsPerPrompt = 128; - - // Prevent external inheritors - private protected ClientBase(ILoggerFactory? loggerFactory = null) - { - this.Logger = loggerFactory is not null ? loggerFactory.CreateLogger(this.GetType()) : NullLogger.Instance; - } - - /// - /// Model Id or Deployment Name - /// - private protected string ModelId { get; set; } = string.Empty; - - /// - /// OpenAI / Azure OpenAI Client - /// - private protected abstract OpenAIClient Client { get; } - - /// - /// Logger instance - /// - private protected ILogger Logger { get; set; } - - /// - /// Instance of for metrics. - /// - private static readonly Meter s_meter = new(typeof(ClientBase).Assembly.GetName().Name); - - /// - /// Instance of to keep track of the number of prompt tokens used. - /// - private static readonly Counter s_promptTokensCounter = - s_meter.CreateCounter( - name: "SK.Connectors.OpenAI.PromptTokens", - description: "Number of prompt tokens used"); - - /// - /// Instance of to keep track of the number of completion tokens used. - /// - private static readonly Counter s_completionTokensCounter = - s_meter.CreateCounter( - name: "SK.Connectors.OpenAI.CompletionTokens", - description: "Number of completion tokens used"); - - /// - /// Instance of to keep track of the total number of tokens used. - /// - private static readonly Counter s_totalTokensCounter = - s_meter.CreateCounter( - name: "SK.Connectors.OpenAI.TotalTokens", - description: "Total number of tokens used"); - - /// - /// Creates completions for the prompt and settings. - /// - /// The prompt to complete. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// Completions generated by the remote model - private protected async Task> InternalGetTextResultsAsync( - string text, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - OpenAIRequestSettings textRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings, OpenAIRequestSettings.DefaultTextMaxTokens); - - ValidateMaxTokens(textRequestSettings.MaxTokens); - var options = CreateCompletionsOptions(text, textRequestSettings); - - Response? response = await RunRequestAsync?>( - () => this.Client.GetCompletionsAsync(this.ModelId, options, cancellationToken)).ConfigureAwait(false); - - if (response is null) - { - throw new SKException("Text completions null response"); - } - - var responseData = response.Value; - - if (responseData.Choices.Count == 0) - { - throw new SKException("Text completions not found"); - } - - this.CaptureUsageDetails(responseData.Usage); - - return responseData.Choices.Select(choice => new TextResult(responseData, choice)).ToList(); - } - - /// - /// Creates completions streams for the prompt and settings. - /// - /// The prompt to complete. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// Stream the completions generated by the remote model - private protected async IAsyncEnumerable InternalGetTextStreamingResultsAsync( - string text, - AIRequestSettings? requestSettings, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIRequestSettings textRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings, OpenAIRequestSettings.DefaultTextMaxTokens); - - ValidateMaxTokens(textRequestSettings.MaxTokens); - - var options = CreateCompletionsOptions(text, textRequestSettings); - - Response? response = await RunRequestAsync>( - () => this.Client.GetCompletionsStreamingAsync(this.ModelId, options, cancellationToken)).ConfigureAwait(false); - - using StreamingCompletions streamingChatCompletions = response.Value; - await foreach (StreamingChoice choice in streamingChatCompletions.GetChoicesStreaming(cancellationToken)) - { - yield return new TextStreamingResult(streamingChatCompletions, choice); - } - } - - /// - /// Generates an embedding from the given . - /// - /// List of strings to generate embeddings for - /// The to monitor for cancellation requests. The default is . - /// List of embeddings - private protected async Task>> InternalGetEmbeddingsAsync( - IList data, - CancellationToken cancellationToken = default) - { - var result = new List>(data.Count); - foreach (string text in data) - { - var options = new EmbeddingsOptions(text); - - Response? response = await RunRequestAsync?>( - () => this.Client.GetEmbeddingsAsync(this.ModelId, options, cancellationToken)).ConfigureAwait(false); - - if (response is null) - { - throw new SKException("Text embedding null response"); - } - - if (response.Value.Data.Count == 0) - { - throw new SKException("Text embedding not found"); - } - - result.Add(response.Value.Data[0].Embedding.ToArray()); - } - - return result; - } - - /// - /// Generate a new chat message - /// - /// Chat history - /// AI request settings - /// Async cancellation token - /// Generated chat message in string format - private protected async Task> InternalGetChatResultsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - OpenAIRequestSettings chatRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings); - - ValidateMaxTokens(chatRequestSettings.MaxTokens); - - var chatOptions = CreateChatCompletionsOptions(chatRequestSettings, chat); - - Response? response = await RunRequestAsync?>( - () => this.Client.GetChatCompletionsAsync(this.ModelId, chatOptions, cancellationToken)).ConfigureAwait(false); - - if (response is null) - { - throw new SKException("Chat completions null response"); - } - - var responseData = response.Value; - - if (responseData.Choices.Count == 0) - { - throw new SKException("Chat completions not found"); - } - - this.CaptureUsageDetails(responseData.Usage); - - return responseData.Choices.Select(chatChoice => new ChatResult(responseData, chatChoice)).ToList(); - } - - /// - /// Generate a new chat message stream - /// - /// Chat history - /// AI request settings - /// Async cancellation token - /// Streaming of generated chat message in string format - private protected async IAsyncEnumerable InternalGetChatStreamingResultsAsync( - IEnumerable chat, - AIRequestSettings? requestSettings, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - OpenAIRequestSettings chatRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings); - - ValidateMaxTokens(chatRequestSettings.MaxTokens); - - var options = CreateChatCompletionsOptions(chatRequestSettings, chat); - - Response? response = await RunRequestAsync>( - () => this.Client.GetChatCompletionsStreamingAsync(this.ModelId, options, cancellationToken)).ConfigureAwait(false); - - if (response is null) - { - throw new SKException("Chat completions null response"); - } - - using StreamingChatCompletions streamingChatCompletions = response.Value; - await foreach (StreamingChatChoice choice in streamingChatCompletions.GetChoicesStreaming(cancellationToken).ConfigureAwait(false)) - { - yield return new ChatStreamingResult(response.Value, choice); - } - } - - /// - /// Create a new empty chat instance - /// - /// Optional chat instructions for the AI service - /// Chat object - private protected static OpenAIChatHistory InternalCreateNewChat(string? instructions = null) - { - return new OpenAIChatHistory(instructions); - } - - private protected async Task> InternalGetChatResultsAsTextAsync( - string text, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - ChatHistory chat = PrepareChatHistory(text, requestSettings, out OpenAIRequestSettings chatSettings); - - return (await this.InternalGetChatResultsAsync(chat, chatSettings, cancellationToken).ConfigureAwait(false)) - .OfType() - .ToList(); - } - - private protected async IAsyncEnumerable InternalGetChatStreamingResultsAsTextAsync( - string text, - AIRequestSettings? requestSettings, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - ChatHistory chat = PrepareChatHistory(text, requestSettings, out OpenAIRequestSettings chatSettings); - - IAsyncEnumerable chatCompletionStreamingResults = this.InternalGetChatStreamingResultsAsync(chat, chatSettings, cancellationToken); - await foreach (var chatCompletionStreamingResult in chatCompletionStreamingResults) - { - yield return (ITextStreamingResult)chatCompletionStreamingResult; - } - } - - private static OpenAIChatHistory PrepareChatHistory(string text, AIRequestSettings? requestSettings, out OpenAIRequestSettings settings) - { - settings = OpenAIRequestSettings.FromRequestSettings(requestSettings); - var chat = InternalCreateNewChat(settings.ChatSystemPrompt); - chat.AddUserMessage(text); - return chat; - } - - private static CompletionsOptions CreateCompletionsOptions(string text, OpenAIRequestSettings requestSettings) - { - if (requestSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) - { - throw new ArgumentOutOfRangeException($"{nameof(requestSettings)}.{nameof(requestSettings.ResultsPerPrompt)}", requestSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); - } - - var options = new CompletionsOptions - { - Prompts = { text.NormalizeLineEndings() }, - MaxTokens = requestSettings.MaxTokens, - Temperature = (float?)requestSettings.Temperature, - NucleusSamplingFactor = (float?)requestSettings.TopP, - FrequencyPenalty = (float?)requestSettings.FrequencyPenalty, - PresencePenalty = (float?)requestSettings.PresencePenalty, - Echo = false, - ChoicesPerPrompt = requestSettings.ResultsPerPrompt, - GenerationSampleCount = requestSettings.ResultsPerPrompt, - LogProbabilityCount = null, - User = null, - }; - - foreach (var keyValue in requestSettings.TokenSelectionBiases) - { - options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); - } - - if (requestSettings.StopSequences is { Count: > 0 }) - { - foreach (var s in requestSettings.StopSequences) - { - options.StopSequences.Add(s); - } - } - - return options; - } - - private static ChatCompletionsOptions CreateChatCompletionsOptions(OpenAIRequestSettings requestSettings, IEnumerable chatHistory) - { - if (requestSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) - { - throw new ArgumentOutOfRangeException($"{nameof(requestSettings)}.{nameof(requestSettings.ResultsPerPrompt)}", requestSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); - } - - var options = new ChatCompletionsOptions - { - MaxTokens = requestSettings.MaxTokens, - Temperature = (float?)requestSettings.Temperature, - NucleusSamplingFactor = (float?)requestSettings.TopP, - FrequencyPenalty = (float?)requestSettings.FrequencyPenalty, - PresencePenalty = (float?)requestSettings.PresencePenalty, - ChoiceCount = requestSettings.ResultsPerPrompt, - }; - - if (requestSettings.Functions is not null) - { - if (requestSettings.FunctionCall == OpenAIRequestSettings.FunctionCallAuto) - { - options.FunctionCall = FunctionDefinition.Auto; - options.Functions = requestSettings.Functions.Select(f => f.ToFunctionDefinition()).ToList(); - } - else if (requestSettings.FunctionCall != OpenAIRequestSettings.FunctionCallNone - && !requestSettings.FunctionCall.IsNullOrEmpty()) - { - var filteredFunctions = requestSettings.Functions - .Where(f => f.FullyQualifiedName.Equals(requestSettings.FunctionCall, StringComparison.OrdinalIgnoreCase)) - .ToList(); - - OpenAIFunction? function = filteredFunctions.FirstOrDefault(); - if (function is not null) - { - options.FunctionCall = function.ToFunctionDefinition(); - options.Functions = filteredFunctions.Select(f => f.ToFunctionDefinition()).ToList(); - } - } - } - - foreach (var keyValue in requestSettings.TokenSelectionBiases) - { - options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); - } - - if (requestSettings.StopSequences is { Count: > 0 }) - { - foreach (var s in requestSettings.StopSequences) - { - options.StopSequences.Add(s); - } - } - - foreach (var message in chatHistory) - { - var validRole = GetValidChatRole(message.Role); - options.Messages.Add(new ChatMessage(validRole, message.Content)); - } - - return options; - } - - private static ChatRole GetValidChatRole(AuthorRole role) - { - var validRole = new ChatRole(role.Label); - - if (validRole != ChatRole.User && - validRole != ChatRole.System && - validRole != ChatRole.Assistant) - { - throw new ArgumentException($"Invalid chat message author role: {role}"); - } - - return validRole; - } - - private static void ValidateMaxTokens(int? maxTokens) - { - if (maxTokens.HasValue && maxTokens < 1) - { - throw new SKException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); - } - } - - private static async Task RunRequestAsync(Func> request) - { - try - { - return await request.Invoke().ConfigureAwait(false); - } - catch (RequestFailedException e) - { - throw e.ToHttpOperationException(); - } - } - - /// - /// Captures usage details, including token information. - /// - /// Instance of with usage details. - private void CaptureUsageDetails(CompletionsUsage usage) - { - this.Logger.LogInformation( - "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}.", - usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens); - - s_promptTokensCounter.Add(usage.PromptTokens); - s_completionTokensCounter.Add(usage.CompletionTokens); - s_totalTokensCounter.Add(usage.TotalTokens); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/FunctionCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/FunctionCollectionExtensions.cs deleted file mode 100644 index 45109a3296e1..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/FunctionCollectionExtensions.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Extension methods for . -/// -public static class FunctionCollectionExtensions -{ - /// - /// Given an object, tries to retrieve the corresponding and populate with its parameters. - /// - /// The SK function collection. - /// The object. - /// When this method returns, the function that was retrieved if one with the specified name was found; otherwise, - /// When this method returns, the context variables containing parameters for the function; otherwise, - /// if the function was found; otherwise, . - public static bool TryGetFunctionAndContext( - this IReadOnlyFunctionCollection functionCollection, - OpenAIFunctionResponse response, - [NotNullWhen(true)] out ISKFunction? availableFunction, - [NotNullWhen(true)] out ContextVariables? availableContext) - { - availableFunction = null; - availableContext = null; - - if (!functionCollection.TryGetFunction(response.PluginName, response.FunctionName, out availableFunction)) - { - if (!functionCollection.TryGetFunction(response.FunctionName, out availableFunction)) - { - // Function not found in collection - return false; - } - } - - // Add parameters to context variables - availableContext = new ContextVariables(); - foreach (var parameter in response.Parameters) - { - availableContext.Set(parameter.Key, parameter.Value.ToString()); - } - - return true; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/FunctionViewExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/FunctionViewExtensions.cs deleted file mode 100644 index 70eab2739773..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/FunctionViewExtensions.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Extensions for specific to the OpenAI connector. -/// -public static class FunctionViewExtensions -{ - /// - /// Convert a to an . - /// - /// The object to convert. - /// An object. - public static OpenAIFunction ToOpenAIFunction(this FunctionView functionView) - { - var openAIParams = new List(); - foreach (ParameterView param in functionView.Parameters) - { - openAIParams.Add(new OpenAIFunctionParameter - { - Name = param.Name, - Description = (param.Description ?? string.Empty) - + (string.IsNullOrEmpty(param.DefaultValue) ? string.Empty : $" (default value: {param.DefaultValue})"), - Type = param.Type?.Name ?? "string", - IsRequired = param.IsRequired ?? false - }); - } - - return new OpenAIFunction - { - FunctionName = functionView.Name, - PluginName = functionView.PluginName, - Description = functionView.Description, - Parameters = openAIParams, - }; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIClientBase.cs deleted file mode 100644 index ccd33cb90a4e..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIClientBase.cs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using System.Runtime.CompilerServices; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Base class for OpenAI clients, providing common functionality and properties. -/// -public abstract class OpenAIClientBase : ClientBase -{ - /// - /// OpenAI / Azure OpenAI Client - /// - private protected override OpenAIClient Client { get; } - - /// - /// Initializes a new instance of the class. - /// - /// Model name. - /// OpenAI API Key. - /// OpenAI Organization Id (usually optional). - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - private protected OpenAIClientBase( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(loggerFactory) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - this.ModelId = modelId; - - var options = GetClientOptions(httpClient); - - if (!string.IsNullOrWhiteSpace(organization)) - { - options.AddPolicy(new AddHeaderRequestPolicy("OpenAI-Organization", organization!), HttpPipelinePosition.PerCall); - } - - this.Client = new OpenAIClient(apiKey, options); - } - - /// - /// Initializes a new instance of the class using the specified OpenAIClient. - /// Note: instances created this way might not have the default diagnostics settings, - /// it's up to the caller to configure the client. - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// The to use for logging. If null, no logging will be performed. - private protected OpenAIClientBase( - string modelId, - OpenAIClient openAIClient, - ILoggerFactory? loggerFactory = null) : base(loggerFactory) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNull(openAIClient); - - this.ModelId = modelId; - this.Client = openAIClient; - } - - /// - /// Logs OpenAI action details. - /// - /// Caller member name. Populated automatically by runtime. - private protected void LogActionDetails([CallerMemberName] string? callerMemberName = default) - { - this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.ModelId); - } - - /// - /// Options used by the OpenAI client, e.g. User Agent. - /// - /// Custom for HTTP requests. - /// An instance of . - private static OpenAIClientOptions GetClientOptions(HttpClient? httpClient) - { - var options = new OpenAIClientOptions - { - Diagnostics = - { - IsTelemetryEnabled = Telemetry.IsTelemetryEnabled, - ApplicationId = Telemetry.HttpUserAgent, - } - }; - - if (httpClient != null) - { - options.Transport = new HttpClientTransport(httpClient); - options.RetryPolicy = new RetryPolicy(maxRetries: 0); //Disabling Azure SDK retry policy to use the one provided by the custom HTTP client. - } - - return options; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIFunction.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIFunction.cs deleted file mode 100644 index 5bab87d6d3d7..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIFunction.cs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Represents a function parameter that can be pass to the OpenAI API -/// -public class OpenAIFunctionParameter -{ - /// - /// Name of the parameter. - /// - public string Name { get; set; } = string.Empty; - - /// - /// Description of the parameter. - /// - public string Description { get; set; } = string.Empty; - - /// - /// Type of the parameter. - /// - public string Type { get; set; } = string.Empty; - - /// - /// Whether the parameter is required or not. - /// - public bool IsRequired { get; set; } = false; -} - -/// -/// Represents a function that can be pass to the OpenAI API -/// -public class OpenAIFunction -{ - /// - /// Separator between the plugin name and the function name - /// - public const string NameSeparator = "-"; - - /// - /// Name of the function - /// - public string FunctionName { get; set; } = string.Empty; - - /// - /// Name of the function's associated plugin, if applicable - /// - public string PluginName { get; set; } = string.Empty; - - /// - /// Fully qualified name of the function. This is the concatenation of the plugin name and the function name, - /// separated by the value of . - /// If there is no plugin name, this is the same as the function name. - /// - public string FullyQualifiedName => - this.PluginName.IsNullOrEmpty() ? this.FunctionName : string.Join(NameSeparator, this.PluginName, this.FunctionName); - - /// - /// Description of the function - /// - public string Description { get; set; } = string.Empty; - - /// - /// List of parameters for the function - /// - public IList Parameters { get; set; } = new List(); - - /// - /// Converts the to OpenAI's . - /// - /// A containing all the function information. - public FunctionDefinition ToFunctionDefinition() - { - var requiredParams = new List(); - - var paramProperties = new Dictionary(); - foreach (var param in this.Parameters) - { - paramProperties.Add( - param.Name, - new - { - type = param.Type, - description = param.Description, - }); - - if (param.IsRequired) - { - requiredParams.Add(param.Name); - } - } - return new FunctionDefinition - { - Name = this.FullyQualifiedName, - Description = this.Description, - Parameters = BinaryData.FromObjectAsJson( - new - { - type = "object", - properties = paramProperties, - required = requiredParams, - }), - }; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIFunctionResponse.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIFunctionResponse.cs deleted file mode 100644 index fc77d866f85e..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/OpenAIFunctionResponse.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Azure.AI.OpenAI; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Object containing function information and parameter values for a function call generated by the OpenAI model. -/// -public class OpenAIFunctionResponse -{ - /// - /// Name of the function chosen - /// - public string FunctionName { get; set; } = string.Empty; - - /// - /// Name of the function's associated plugin, if applicable - /// - public string PluginName { get; set; } = string.Empty; - - /// - /// Parameter values - /// - public Dictionary Parameters { get; set; } = new(); - - /// - /// Parses the function call and parameter information generated by the model. - /// - /// The OpenAI function call object generated by the model. - /// Instance of . - public static OpenAIFunctionResponse FromFunctionCall(FunctionCall functionCall) - { - OpenAIFunctionResponse response = new(); - if (functionCall.Name.Contains(OpenAIFunction.NameSeparator)) - { - var parts = functionCall.Name.Split(new string[] { OpenAIFunction.NameSeparator }, StringSplitOptions.RemoveEmptyEntries); - response.PluginName = parts[0]; - response.FunctionName = parts[1]; - } - else - { - response.FunctionName = functionCall.Name; - } - - var parameters = JsonSerializer.Deserialize>(functionCall.Arguments); - if (parameters is not null) - { - response.Parameters = parameters; - } - - return response; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs deleted file mode 100644 index 0c14eee46437..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net; -using Azure; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Provides extension methods for the class. -/// -public static class RequestFailedExceptionExtensions -{ - /// - /// Converts a to an . - /// - /// The original . - /// An instance. - public static HttpOperationException ToHttpOperationException(this RequestFailedException exception) - { - const int NoResponseReceived = 0; - - string? responseContent = null; - - try - { - responseContent = exception.GetRawResponse()?.Content?.ToString(); - } -#pragma warning disable CA1031 // Do not catch general exception types - catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead. -#pragma warning restore CA1031 - - return new HttpOperationException( - exception.Status == NoResponseReceived ? null : (HttpStatusCode?)exception.Status, - responseContent, - exception.Message, - exception); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/SKChatMessage.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/SKChatMessage.cs deleted file mode 100644 index 89b6b2b2455d..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/SKChatMessage.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.AI.ChatCompletion; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// -/// Chat message representation from Semantic Kernel ChatMessageBase Abstraction -/// -public class SKChatMessage : ChatMessageBase -{ - private readonly ChatMessage? _message; - - /// - /// Initializes a new instance of the class. - /// - /// OpenAI SDK chat message representation - public SKChatMessage(Azure.AI.OpenAI.ChatMessage message) - : base(new AuthorRole(message.Role.ToString()), message.Content) - { - this._message = message; - } - - /// - /// Initializes a new instance of the class. - /// - /// Role of the author of the message. - /// Content of the message. - public SKChatMessage(string role, string content) - : base(new AuthorRole(role), content) - { - } - - /// - /// Exposes the underlying OpenAI SDK function call chat message representation - /// - public FunctionCall FunctionCall - => this._message?.FunctionCall ?? throw new NotSupportedException("Function call is not supported"); -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextModelResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextModelResult.cs deleted file mode 100644 index 72a4ac0b66f8..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextModelResult.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Azure.AI.OpenAI; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -/// Represents a singular result of a text completion. -public sealed class TextModelResult -{ - /// A unique identifier associated with this text completion response. - public string Id { get; } - - /// - /// The first timestamp associated with generation activity for this completions response, - /// represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. - /// - public DateTimeOffset Created { get; } - - /// - /// Content filtering results for zero or more prompts in the request. - /// - public IReadOnlyList PromptFilterResults { get; } - - /// - /// The completion choice associated with this completion result. - /// - public Choice Choice { get; } - - /// Usage information for tokens processed and generated as part of this completions operation. - public CompletionsUsage Usage { get; } - - /// Initializes a new instance of TextModelResult. - /// A completions response object to populate the fields relative the response. - /// A choice object to populate the fields relative to the resulting choice. - internal TextModelResult(Completions completionsData, Choice choiceData) - { - this.Id = completionsData.Id; - this.Created = completionsData.Created; - this.PromptFilterResults = completionsData.PromptFilterResults; - this.Choice = choiceData; - this.Usage = completionsData.Usage; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextResult.cs deleted file mode 100644 index 44893f9cd6b3..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextResult.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -internal sealed class TextResult : ITextResult -{ - private readonly ModelResult _modelResult; - private readonly Choice _choice; - - public TextResult(Completions resultData, Choice choice) - { - this._modelResult = new(new TextModelResult(resultData, choice)); - this._choice = choice; - } - - public ModelResult ModelResult => this._modelResult; - - public Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - return Task.FromResult(this._choice.Text); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextStreamingResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextStreamingResult.cs deleted file mode 100644 index d2c5537cc0c0..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/TextStreamingResult.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -internal sealed class TextStreamingResult : ITextStreamingResult, ITextResult -{ - private readonly StreamingChoice _choice; - - public ModelResult ModelResult { get; } - - public TextStreamingResult(StreamingCompletions resultData, StreamingChoice choice) - { - this.ModelResult = new ModelResult(resultData); - this._choice = choice; - } - - public async Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - var fullMessage = new StringBuilder(); - await foreach (var message in this._choice.GetTextStreaming(cancellationToken).ConfigureAwait(false)) - { - fullMessage.Append(message); - } - - return fullMessage.ToString(); - } - - public IAsyncEnumerable GetCompletionStreamingAsync(CancellationToken cancellationToken = default) - { - return this._choice.GetTextStreaming(cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/AzureChatCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/AzureChatCompletion.cs deleted file mode 100644 index dc4d7ed845dd..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/AzureChatCompletion.cs +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/// -/// Azure OpenAI chat completion client. -/// TODO: forward ETW logging to ILogger, see https://learn.microsoft.com/en-us/dotnet/azure/sdk/logging -/// -public sealed class AzureChatCompletion : AzureOpenAIClientBase, IChatCompletion, ITextCompletion -{ - /// - /// Create an instance of the Azure OpenAI chat completion connector with API key auth - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureChatCompletion( - string modelId, - string endpoint, - string apiKey, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(modelId, endpoint, apiKey, httpClient, loggerFactory) - { - } - - /// - /// Create an instance of the Azure OpenAI chat completion connector with AAD auth - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureChatCompletion( - string modelId, - string endpoint, - TokenCredential credentials, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(modelId, endpoint, credentials, httpClient, loggerFactory) - { - } - - /// - /// Creates a new AzureChatCompletion client instance using the specified OpenAIClient - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// The to use for logging. If null, no logging will be performed. - public AzureChatCompletion( - string modelId, - OpenAIClient openAIClient, - ILoggerFactory? loggerFactory = null) : base(modelId, openAIClient, loggerFactory) - { - } - - /// - public Task> GetChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatResultsAsync(chat, requestSettings, cancellationToken); - } - - /// - public IAsyncEnumerable GetStreamingChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatStreamingResultsAsync(chat, requestSettings, cancellationToken); - } - - /// - public ChatHistory CreateNewChat(string? instructions = null) - { - return InternalCreateNewChat(instructions); - } - - /// - public IAsyncEnumerable GetStreamingCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatStreamingResultsAsTextAsync(text, requestSettings, cancellationToken); - } - - /// - public Task> GetCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatResultsAsTextAsync(text, requestSettings, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatCompletion.cs deleted file mode 100644 index a54acfd2fd89..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatCompletion.cs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/// -/// OpenAI chat completion client. -/// TODO: forward ETW logging to ILogger, see https://learn.microsoft.com/en-us/dotnet/azure/sdk/logging -/// -public sealed class OpenAIChatCompletion : OpenAIClientBase, IChatCompletion, ITextCompletion -{ - /// - /// Create an instance of the OpenAI chat completion connector - /// - /// Model name - /// OpenAI API Key - /// OpenAI Organization Id (usually optional) - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAIChatCompletion( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(modelId, apiKey, organization, httpClient, loggerFactory) - { - } - - /// - /// Create an instance of the OpenAI chat completion connector - /// - /// Model name - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAIChatCompletion( - string modelId, - OpenAIClient openAIClient, - ILoggerFactory? loggerFactory = null) : base(modelId, openAIClient, loggerFactory) - { - } - - /// - public Task> GetChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatResultsAsync(chat, requestSettings, cancellationToken); - } - - /// - public IAsyncEnumerable GetStreamingChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatStreamingResultsAsync(chat, requestSettings, cancellationToken); - } - - /// - public ChatHistory CreateNewChat(string? instructions = null) - { - return InternalCreateNewChat(instructions); - } - - /// - public IAsyncEnumerable GetStreamingCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatStreamingResultsAsTextAsync(text, requestSettings, cancellationToken); - } - - /// - public Task> GetCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetChatResultsAsTextAsync(text, requestSettings, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatHistory.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatHistory.cs deleted file mode 100644 index d2054a9e6379..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletion/OpenAIChatHistory.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; - -/// -/// OpenAI Chat content -/// See https://platform.openai.com/docs/guides/chat for details -/// -public class OpenAIChatHistory : ChatHistory -{ - /// - /// Create a new and empty chat history - /// - /// Optional instructions for the assistant - public OpenAIChatHistory(string? assistantInstructions = null) - { - if (!assistantInstructions.IsNullOrWhitespace()) - { - this.AddSystemMessage(assistantInstructions); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/AzureChatCompletionWithData.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/AzureChatCompletionWithData.cs deleted file mode 100644 index 6ad5b799016b..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/AzureChatCompletionWithData.cs +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -/// -/// Azure OpenAI Chat Completion with data client. -/// More information: -/// -public sealed class AzureChatCompletionWithData : IChatCompletion, ITextCompletion -{ - /// - /// Initializes a new instance of the class. - /// - /// Instance of class with completion configuration. - /// Custom for HTTP requests. - /// Instance of to use for logging. - public AzureChatCompletionWithData( - AzureChatCompletionWithDataConfig config, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this.ValidateConfig(config); - - this._config = config; - - this._httpClient = httpClient ?? new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(this.GetType()) : NullLogger.Instance; - } - - /// - public ChatHistory CreateNewChat(string? instructions = null) - { - return new OpenAIChatHistory(instructions); - } - - /// - public async Task> GetChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - OpenAIRequestSettings chatRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings); - - ValidateMaxTokens(chatRequestSettings.MaxTokens); - - return await this.ExecuteCompletionRequestAsync(chat, chatRequestSettings, cancellationToken).ConfigureAwait(false); - } - - /// - public IAsyncEnumerable GetStreamingChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - OpenAIRequestSettings chatRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings); - - ValidateMaxTokens(chatRequestSettings.MaxTokens); - - return this.ExecuteCompletionStreamingRequestAsync(chat, chatRequestSettings, cancellationToken); - } - - /// - public async Task> GetCompletionsAsync( - string text, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - OpenAIRequestSettings chatRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings); - - var chat = this.PrepareChatHistory(text, chatRequestSettings); - - return (await this.GetChatCompletionsAsync(chat, chatRequestSettings, cancellationToken).ConfigureAwait(false)) - .OfType() - .ToList(); - } - - /// - public async IAsyncEnumerable GetStreamingCompletionsAsync( - string text, - AIRequestSettings? requestSettings, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIRequestSettings chatRequestSettings = OpenAIRequestSettings.FromRequestSettings(requestSettings); - - var chat = this.PrepareChatHistory(text, chatRequestSettings); - - IAsyncEnumerable results = this.GetStreamingChatCompletionsAsync(chat, chatRequestSettings, cancellationToken); - await foreach (var result in results) - { - yield return (ITextStreamingResult)result; - } - } - - #region private ================================================================================ - - private const string DefaultApiVersion = "2023-06-01-preview"; - - private readonly AzureChatCompletionWithDataConfig _config; - - private readonly HttpClient _httpClient; - private readonly ILogger _logger; - - private void ValidateConfig(AzureChatCompletionWithDataConfig config) - { - Verify.NotNull(config); - - Verify.NotNullOrWhiteSpace(config.CompletionModelId); - Verify.NotNullOrWhiteSpace(config.CompletionEndpoint); - Verify.NotNullOrWhiteSpace(config.CompletionApiKey); - Verify.NotNullOrWhiteSpace(config.DataSourceEndpoint); - Verify.NotNullOrWhiteSpace(config.DataSourceApiKey); - Verify.NotNullOrWhiteSpace(config.DataSourceIndex); - } - - private static void ValidateMaxTokens(int? maxTokens) - { - if (maxTokens.HasValue && maxTokens < 1) - { - throw new SKException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); - } - } - - private async Task> ExecuteCompletionRequestAsync( - ChatHistory chat, - OpenAIRequestSettings requestSettings, - CancellationToken cancellationToken = default) - { - using var request = this.GetRequest(chat, requestSettings, isStreamEnabled: false); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - - var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - - var chatWithDataResponse = this.DeserializeResponse(body); - - return chatWithDataResponse.Choices.Select(choice => new ChatWithDataResult(chatWithDataResponse, choice)).ToList(); - } - - private async IAsyncEnumerable ExecuteCompletionStreamingRequestAsync( - ChatHistory chat, - OpenAIRequestSettings requestSettings, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - using var request = this.GetRequest(chat, requestSettings, isStreamEnabled: true); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - - await foreach (var result in this.GetStreamingResultsAsync(response)) - { - yield return result; - } - } - - private async Task SendRequestAsync( - HttpRequestMessage request, - CancellationToken cancellationToken = default) - { - request.Headers.Add("User-Agent", Telemetry.HttpUserAgent); - request.Headers.Add("Api-Key", this._config.CompletionApiKey); - - try - { - return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) - { - this._logger.LogError( - "Error occurred on chat completion with data request execution: {ExceptionMessage}", ex.Message); - - throw; - } - } - - private async IAsyncEnumerable GetStreamingResultsAsync(HttpResponseMessage response) - { - const string ServerEventPayloadPrefix = "data:"; - - using var stream = await response.Content.ReadAsStreamAndTranslateExceptionAsync().ConfigureAwait(false); - using var reader = new StreamReader(stream); - - while (!reader.EndOfStream) - { - var body = await reader.ReadLineAsync().ConfigureAwait(false); - - if (string.IsNullOrWhiteSpace(body)) - { - continue; - } - - if (body.StartsWith(ServerEventPayloadPrefix, StringComparison.Ordinal)) - { - body = body.Substring(ServerEventPayloadPrefix.Length); - } - - var chatWithDataResponse = this.DeserializeResponse(body); - - foreach (var choice in chatWithDataResponse.Choices) - { - yield return new ChatWithDataStreamingResult(chatWithDataResponse, choice); - } - } - } - - private T DeserializeResponse(string body) - { - var response = Json.Deserialize(body); - - if (response is null) - { - const string ErrorMessage = "Error occurred on chat completion with data response deserialization"; - - this._logger.LogError(ErrorMessage); - - throw new SKException(ErrorMessage); - } - - return response; - } - - private HttpRequestMessage GetRequest( - ChatHistory chat, - OpenAIRequestSettings requestSettings, - bool isStreamEnabled) - { - var payload = new ChatWithDataRequest - { - Temperature = requestSettings.Temperature, - TopP = requestSettings.TopP, - IsStreamEnabled = isStreamEnabled, - StopSequences = requestSettings.StopSequences, - MaxTokens = requestSettings.MaxTokens, - PresencePenalty = requestSettings.PresencePenalty, - FrequencyPenalty = requestSettings.FrequencyPenalty, - TokenSelectionBiases = requestSettings.TokenSelectionBiases, - DataSources = this.GetDataSources(), - Messages = this.GetMessages(chat) - }; - - return HttpRequest.CreatePostRequest(this.GetRequestUri(), payload); - } - - private List GetDataSources() - { - return new List - { - new() { - Parameters = new ChatWithDataSourceParameters - { - Endpoint = this._config.DataSourceEndpoint, - ApiKey = this._config.DataSourceApiKey, - IndexName = this._config.DataSourceIndex - } - } - }; - } - - private List GetMessages(ChatHistory chat) - { - return chat - .Select(message => new ChatWithDataMessage - { - Role = message.Role.Label, - Content = message.Content - }) - .ToList(); - } - - private ChatHistory PrepareChatHistory(string text, OpenAIRequestSettings requestSettings) - { - var chat = this.CreateNewChat(requestSettings.ChatSystemPrompt); - - chat.AddUserMessage(text); - - return chat; - } - - private string GetRequestUri() - { - const string EndpointUriFormat = "{0}/openai/deployments/{1}/extensions/chat/completions?api-version={2}"; - - var apiVersion = this._config.CompletionApiVersion; - - if (string.IsNullOrWhiteSpace(apiVersion)) - { - apiVersion = DefaultApiVersion; - } - - return string.Format( - CultureInfo.InvariantCulture, - EndpointUriFormat, - this._config.CompletionEndpoint.TrimEnd('/'), - this._config.CompletionModelId, - apiVersion); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/AzureChatCompletionWithDataConfig.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/AzureChatCompletionWithDataConfig.cs deleted file mode 100644 index dc4d58bb503c..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/AzureChatCompletionWithDataConfig.cs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -/// -/// Required configuration for Azure OpenAI chat completion with data. -/// More information: -/// -public class AzureChatCompletionWithDataConfig -{ - /// - /// Azure OpenAI model ID or deployment name, see - /// - public string CompletionModelId { get; set; } = string.Empty; - - /// - /// Azure OpenAI deployment URL, see - /// - public string CompletionEndpoint { get; set; } = string.Empty; - - /// - /// Azure OpenAI API key, see - /// - public string CompletionApiKey { get; set; } = string.Empty; - - /// - /// Azure OpenAI Completion API version (e.g. 2023-06-01-preview) - /// - public string CompletionApiVersion { get; set; } = string.Empty; - - /// - /// Data source endpoint URL. - /// For Azure Cognitive Search, see - /// - public string DataSourceEndpoint { get; set; } = string.Empty; - - /// - /// Data source API key. - /// For Azure Cognitive Search keys, see - /// - public string DataSourceApiKey { get; set; } = string.Empty; - - /// - /// Data source index name. - /// For Azure Cognitive Search indexes, see - /// - public string DataSourceIndex { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataChoice.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataChoice.cs deleted file mode 100644 index 842d236d0586..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataChoice.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataChoice -{ - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs deleted file mode 100644 index 533d87d30cee..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -internal sealed class ChatWithDataMessage -{ - [JsonPropertyName("role")] - public string Role { get; set; } = string.Empty; - - [JsonPropertyName("content")] - public string Content { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataModelResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataModelResult.cs deleted file mode 100644 index 8c18c24e9545..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataModelResult.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -/// -/// Represents result of a chat completion with data. -/// -public class ChatWithDataModelResult -{ - /// - /// A unique identifier associated with chat completion with data response. - /// - public string Id { get; } - - /// - /// The first timestamp associated with generation activity for chat completion with data response, - /// represented as seconds since the beginning of the Unix epoch of 00:00 on 1 Jan 1970. - /// - public DateTimeOffset Created { get; } - - /// - /// Content from data source, including citations. - /// For more information see . - /// - public string? ToolContent { get; set; } - - /// - /// Initializes a new instance of the class. - /// - /// A unique identifier associated with chat completion with data response. - /// The first timestamp associated with generation activity for chat completion with data response. - public ChatWithDataModelResult(string id, DateTimeOffset created) - { - this.Id = id; - this.Created = created; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs deleted file mode 100644 index ab57062b3c3b..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -internal sealed class ChatWithDataRequest -{ - [JsonPropertyName("temperature")] - public double Temperature { get; set; } = 0; - - [JsonPropertyName("top_p")] - public double TopP { get; set; } = 0; - - [JsonPropertyName("stream")] - public bool IsStreamEnabled { get; set; } - - [JsonPropertyName("stop")] - public IList StopSequences { get; set; } = Array.Empty(); - - [JsonPropertyName("max_tokens")] - public int? MaxTokens { get; set; } - - [JsonPropertyName("presence_penalty")] - public double PresencePenalty { get; set; } = 0; - - [JsonPropertyName("frequency_penalty")] - public double FrequencyPenalty { get; set; } = 0; - - [JsonPropertyName("logit_bias")] - public IDictionary TokenSelectionBiases { get; set; } = new Dictionary(); - - [JsonPropertyName("dataSources")] - public IList DataSources { get; set; } = Array.Empty(); - - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs deleted file mode 100644 index cb39255c76c9..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -#pragma warning disable CA1812 // Avoid uninstantiated internal classes - -[Serializable] -internal sealed class ChatWithDataResponse -{ - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - [JsonPropertyName("created")] - public int Created { get; set; } = default; - - [JsonPropertyName("choices")] - public IList Choices { get; set; } = Array.Empty(); -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataResult.cs deleted file mode 100644 index 451edf5c8aba..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataResult.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -internal sealed class ChatWithDataResult : IChatResult, ITextResult -{ - public ModelResult ModelResult { get; } - - public ChatWithDataResult(ChatWithDataResponse response, ChatWithDataChoice choice) - { - Verify.NotNull(response); - Verify.NotNull(choice); - - this.ModelResult = new(new ChatWithDataModelResult(response.Id, DateTimeOffset.FromUnixTimeSeconds(response.Created)) - { - ToolContent = this.GetToolContent(choice) - }); - - this._choice = choice; - } - - public Task GetChatMessageAsync(CancellationToken cancellationToken = default) - { - var message = this._choice.Messages - .FirstOrDefault(message => message.Role.Equals(AuthorRole.Assistant.Label, StringComparison.Ordinal)); - - return Task.FromResult(new SKChatMessage(message.Role, message.Content)); - } - - public async Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - var message = await this.GetChatMessageAsync(cancellationToken).ConfigureAwait(false); - - return message.Content; - } - - #region private ================================================================================ - - private readonly ChatWithDataChoice _choice; - - private string? GetToolContent(ChatWithDataChoice choice) - { - var message = choice.Messages - .FirstOrDefault(message => message.Role.Equals(AuthorRole.Tool.Label, StringComparison.Ordinal)); - - return message?.Content; - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSource.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSource.cs deleted file mode 100644 index 3877e5a21ad1..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSource.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -internal sealed class ChatWithDataSource -{ - [JsonPropertyName("type")] - public string Type { get; set; } = ChatWithDataSourceType.AzureCognitiveSearch.ToString(); - - [JsonPropertyName("parameters")] - public ChatWithDataSourceParameters Parameters { get; set; } = new ChatWithDataSourceParameters(); -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSourceParameters.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSourceParameters.cs deleted file mode 100644 index e0e5cb0de81d..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSourceParameters.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -internal sealed class ChatWithDataSourceParameters -{ - [JsonPropertyName("endpoint")] - public string Endpoint { get; set; } = string.Empty; - - [JsonPropertyName("key")] - public string ApiKey { get; set; } = string.Empty; - - [JsonPropertyName("indexName")] - public string IndexName { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSourceType.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSourceType.cs deleted file mode 100644 index 4aadf06e149f..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataSourceType.cs +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -internal enum ChatWithDataSourceType -{ - AzureCognitiveSearch -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingChoice.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingChoice.cs deleted file mode 100644 index 1718e386279a..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingChoice.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingChoice -{ - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingDelta.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingDelta.cs deleted file mode 100644 index 1096ed22c4a5..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingDelta.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -internal sealed class ChatWithDataStreamingDelta -{ - [JsonPropertyName("role")] - public string? Role { get; set; } - - [JsonPropertyName("content")] - public string Content { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingMessage.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingMessage.cs deleted file mode 100644 index 80c1f258e2ca..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingMessage.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingMessage -{ - [JsonPropertyName("delta")] - public ChatWithDataStreamingDelta Delta { get; set; } = new(); - - [JsonPropertyName("end_turn")] - public bool EndTurn { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs deleted file mode 100644 index ce0c8d0e637c..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -[Serializable] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingResponse -{ - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - [JsonPropertyName("created")] - public int Created { get; set; } = default; - - [JsonPropertyName("choices")] - public IList Choices { get; set; } = Array.Empty(); -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResult.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResult.cs deleted file mode 100644 index 92ec4e463544..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResult.cs +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; - -internal sealed class ChatWithDataStreamingResult : IChatStreamingResult, ITextStreamingResult, IChatResult, ITextResult -{ - public ModelResult ModelResult { get; } - - public ChatWithDataStreamingResult(ChatWithDataStreamingResponse response, ChatWithDataStreamingChoice choice) - { - Verify.NotNull(response); - Verify.NotNull(choice); - - this.ModelResult = new(new ChatWithDataModelResult(response.Id, DateTimeOffset.FromUnixTimeSeconds(response.Created)) - { - ToolContent = this.GetToolContent(choice) - }); - - this._choice = choice; - } - - public async Task GetChatMessageAsync(CancellationToken cancellationToken = default) - { - var message = this._choice.Messages.FirstOrDefault(this.IsValidMessage); - - var result = new SKChatMessage(AuthorRole.Assistant.Label, message?.Delta?.Content ?? string.Empty); - - return await Task.FromResult(result).ConfigureAwait(false); - } - - public async IAsyncEnumerable GetStreamingChatMessageAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var message = await this.GetChatMessageAsync(cancellationToken).ConfigureAwait(false); - - if (message.Content is { Length: > 0 }) - { - yield return message; - } - } - - public async IAsyncEnumerable GetCompletionStreamingAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) - { - await foreach (var result in this.GetStreamingChatMessageAsync(cancellationToken)) - { - if (result.Content is string content and { Length: > 0 }) - { - yield return content; - } - } - } - - public async Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - var message = await this.GetChatMessageAsync(cancellationToken).ConfigureAwait(false); - - return message.Content; - } - - #region private ================================================================================ - - private readonly ChatWithDataStreamingChoice _choice; - - private bool IsValidMessage(ChatWithDataStreamingMessage message) - { - return !message.EndTurn && - (message.Delta.Role is null || !message.Delta.Role.Equals(AuthorRole.Tool.Label, StringComparison.Ordinal)); - } - - private string? GetToolContent(ChatWithDataStreamingChoice choice) - { - var message = choice.Messages - .FirstOrDefault(message => message.Delta.Role is not null && message.Delta.Role.Equals(AuthorRole.Tool.Label, StringComparison.Ordinal)); - - return message?.Delta?.Content; - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/Connectors.AI.OpenAI.csproj b/dotnet/src/Connectors/Connectors.AI.OpenAI/Connectors.AI.OpenAI.csproj deleted file mode 100644 index bbf1fd8fc532..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/Connectors.AI.OpenAI.csproj +++ /dev/null @@ -1,30 +0,0 @@ - - - - - Microsoft.SemanticKernel.Connectors.AI.OpenAI - $(AssemblyName) - netstandard2.0 - true - $(NoWarn);NU5104 - - - - - - - - - Semantic Kernel - OpenAI and Azure OpenAI connectors - Semantic Kernel connectors for OpenAI and Azure OpenAI. Contains clients for text completion, chat completion, embedding and DALL-E image generation. - - - - - - - - - - - diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/CustomClient/OpenAIClientBase.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/CustomClient/OpenAIClientBase.cs deleted file mode 100644 index 1a098ceac4e3..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/CustomClient/OpenAIClientBase.cs +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.CustomClient; - -/// Base type for OpenAI clients. -public abstract class OpenAIClientBase -{ - /// - /// Initializes a new instance of the class. - /// - /// The HttpClient used for making HTTP requests. - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - private protected OpenAIClientBase(HttpClient? httpClient, ILoggerFactory? loggerFactory = null) - { - this._httpClient = httpClient ?? new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(this.GetType()) : NullLogger.Instance; - } - - /// Adds headers to use for OpenAI HTTP requests. - private protected virtual void AddRequestHeaders(HttpRequestMessage request) - { - request.Headers.Add("User-Agent", Telemetry.HttpUserAgent); - } - - /// - /// Asynchronously sends a text embedding request for the text. - /// - /// URL for the text embedding request API - /// Request payload - /// The to monitor for cancellation requests. The default is . - /// List of text embeddings - private protected async Task>> ExecuteTextEmbeddingRequestAsync( - string url, - string requestBody, - CancellationToken cancellationToken = default) - { - var result = await this.ExecutePostRequestAsync(url, requestBody, cancellationToken).ConfigureAwait(false); - if (result.Embeddings is not { Count: >= 1 }) - { - throw new SKException("Embeddings not found"); - } - - return result.Embeddings.Select(e => e.Values).ToList(); - } - - /// - /// Run the HTTP request to generate a list of images - /// - /// URL for the image generation request API - /// Request payload - /// Function to invoke to extract the desired portion of the image generation response. - /// The to monitor for cancellation requests. The default is . - /// List of image URLs - private protected async Task> ExecuteImageGenerationRequestAsync( - string url, - string requestBody, - Func extractResponseFunc, - CancellationToken cancellationToken = default) - { - var result = await this.ExecutePostRequestAsync(url, requestBody, cancellationToken).ConfigureAwait(false); - return result.Images.Select(extractResponseFunc).ToList(); - } - - #region private ================================================================================ - - /// - /// Logger - /// - private readonly ILogger _logger; - - /// - /// The HttpClient used for making HTTP requests. - /// - private readonly HttpClient _httpClient; - - private protected async Task ExecutePostRequestAsync(string url, string requestBody, CancellationToken cancellationToken = default) - { - using var content = new StringContent(requestBody, Encoding.UTF8, "application/json"); - using var response = await this.ExecuteRequestAsync(url, HttpMethod.Post, content, cancellationToken).ConfigureAwait(false); - string responseJson = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - T result = this.JsonDeserialize(responseJson); - return result; - } - - private protected T JsonDeserialize(string responseJson) - { - var result = Json.Deserialize(responseJson); - if (result is null) - { - throw new SKException("Response JSON parse error"); - } - - return result; - } - - private protected async Task ExecuteRequestAsync(string url, HttpMethod method, HttpContent? content, CancellationToken cancellationToken = default) - { - using var request = new HttpRequestMessage(method, url); - - this.AddRequestHeaders(request); - - if (content != null) - { - request.Content = content; - } - - var response = await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - - this._logger.LogDebug("HTTP response: {0} {1}", (int)response.StatusCode, response.StatusCode.ToString("G")); - - return response; - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureImageGenerationResponse.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureImageGenerationResponse.cs deleted file mode 100644 index 961a5aa361ec..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureImageGenerationResponse.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; - -/// -/// Image generation response -/// -public class AzureImageGenerationResponse -{ - /// - /// Image generation result - /// - [JsonPropertyName("result")] - public ImageGenerationResponse? Result { get; set; } - - /// - /// Request Id - /// - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - /// - /// Request Status - /// - [JsonPropertyName("status")] - public string Status { get; set; } = string.Empty; - - /// - /// Creation time - /// - [JsonPropertyName("created")] - public int Created { get; set; } - - /// - /// Expiration time of the URL - /// - [JsonPropertyName("expires")] - public int Expires { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureImageOperationStatus.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureImageOperationStatus.cs deleted file mode 100644 index 1abe033b2780..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureImageOperationStatus.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; - -/// -/// Azure image generation response status -/// -/// -public static class AzureImageOperationStatus -{ - /// - /// Image generation Succeeded - /// - public const string Succeeded = "succeeded"; - - /// - /// Image generation Failed - /// - public const string Failed = "failed"; - - /// - /// Task is running - /// - public const string Running = "running"; - - /// - /// Task is queued but hasn't started yet - /// - public const string NotRunning = "notRunning"; - - /// - /// The image has been removed from Azure's server. - /// - public const string Deleted = "deleted"; - - /// - /// Task has timed out - /// - public const string Cancelled = "cancelled"; -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureOpenAIImageGeneration.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureOpenAIImageGeneration.cs deleted file mode 100644 index b0f110c65714..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/AzureOpenAIImageGeneration.cs +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.CustomClient; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; - -/// -/// Azure OpenAI Image generation -/// -/// -public class AzureOpenAIImageGeneration : OpenAIClientBase, IImageGeneration -{ - /// - /// Generation Image Operation path - /// - private const string GenerationImageOperation = "openai/images/generations:submit"; - - /// - /// Get Image Operation path - /// - private const string GetImageOperation = "openai/operations/images"; - - /// - /// Azure OpenAI REST API endpoint - /// - private readonly string _endpoint; - - /// - /// Azure OpenAI API key - /// - private readonly string _apiKey; - - /// - /// Maximum number of attempts to retrieve the image generation operation result. - /// - private readonly int _maxRetryCount; - - /// - /// Azure OpenAI Endpoint ApiVersion - /// - private readonly string _apiVersion; - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Custom for HTTP requests. - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - /// Maximum number of attempts to retrieve the image generation operation result. - /// Azure OpenAI Endpoint ApiVersion - public AzureOpenAIImageGeneration(string endpoint, string apiKey, HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null, int maxRetryCount = 5, string apiVersion = "2023-06-01-preview") : base(httpClient, loggerFactory) - { - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - - this._endpoint = endpoint; - this._apiKey = apiKey; - this._maxRetryCount = maxRetryCount; - this._apiVersion = apiVersion; - } - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Custom for HTTP requests. - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - /// Maximum number of attempts to retrieve the image generation operation result. - /// Azure OpenAI Endpoint ApiVersion - public AzureOpenAIImageGeneration(string apiKey, HttpClient httpClient, string? endpoint = null, ILoggerFactory? loggerFactory = null, int maxRetryCount = 5, string apiVersion = "2023-06-01-preview") : base(httpClient, loggerFactory) - { - Verify.NotNull(httpClient); - Verify.NotNullOrWhiteSpace(apiKey); - - if (httpClient.BaseAddress == null && string.IsNullOrEmpty(endpoint)) - { - throw new SKException("The HttpClient BaseAddress and endpoint are both null or empty. Please ensure at least one is provided."); - } - - endpoint = !string.IsNullOrEmpty(endpoint) ? endpoint! : httpClient.BaseAddress!.AbsoluteUri; - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - - this._endpoint = endpoint; - this._apiKey = apiKey; - this._maxRetryCount = maxRetryCount; - this._apiVersion = apiVersion; - } - - /// - public async Task GenerateImageAsync(string description, int width, int height, CancellationToken cancellationToken = default) - { - var operationId = await this.StartImageGenerationAsync(description, width, height, cancellationToken).ConfigureAwait(false); - var result = await this.GetImageGenerationResultAsync(operationId, cancellationToken).ConfigureAwait(false); - - if (result.Result is null) - { - throw new SKException("Azure Image Generation null response"); - } - - if (result.Result.Images.Count == 0) - { - throw new SKException("Azure Image Generation result not found"); - } - - return result.Result.Images.First().Url; - } - - /// - /// Start an image generation task - /// - /// Image description - /// Image width in pixels - /// Image height in pixels - /// The to monitor for cancellation requests. The default is . - /// The operationId that identifies the original image generation request. - private async Task StartImageGenerationAsync(string description, int width, int height, CancellationToken cancellationToken = default) - { - Verify.NotNull(description); - if (width != height || (width != 256 && width != 512 && width != 1024)) - { - throw new ArgumentOutOfRangeException(nameof(width), width, "OpenAI can generate only square images of size 256x256, 512x512, or 1024x1024."); - } - - var requestBody = Json.Serialize(new ImageGenerationRequest - { - Prompt = description, - Size = $"{width}x{height}", - Count = 1 - }); - - var uri = this.GetUri(GenerationImageOperation); - var result = await this.ExecutePostRequestAsync(uri, requestBody, cancellationToken).ConfigureAwait(false); - - if (result == null || string.IsNullOrWhiteSpace(result.Id)) - { - throw new SKException("Response not contains result"); - } - - return result.Id; - } - - /// - /// Retrieve the results of an image generation operation. - /// - /// The operationId that identifies the original image generation request. - /// The to monitor for cancellation requests. The default is . - /// - private async Task GetImageGenerationResultAsync(string operationId, CancellationToken cancellationToken = default) - { - var operationLocation = this.GetUri(GetImageOperation, operationId); - - var retryCount = 0; - - while (true) - { - if (this._maxRetryCount == retryCount) - { - throw new SKException("Reached maximum retry attempts"); - } - - using var response = await this.ExecuteRequestAsync(operationLocation, HttpMethod.Get, null, cancellationToken).ConfigureAwait(false); - var responseJson = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - var result = this.JsonDeserialize(responseJson); - - if (result.Status.Equals(AzureImageOperationStatus.Succeeded, StringComparison.OrdinalIgnoreCase)) - { - return result; - } - else if (this.IsFailedOrCancelled(result.Status)) - { - throw new SKException($"Azure OpenAI image generation {result.Status}"); - } - - if (response.Headers.TryGetValues("retry-after", out var afterValues) && long.TryParse(afterValues.FirstOrDefault(), out var after)) - { - await Task.Delay(TimeSpan.FromSeconds(after), cancellationToken).ConfigureAwait(false); - } - - // increase retry count - retryCount++; - } - } - - private string GetUri(string operation, params string[] parameters) - { - var uri = new Azure.Core.RequestUriBuilder(); - uri.Reset(new Uri(this._endpoint)); - uri.AppendPath(operation, false); - foreach (var parameter in parameters) - { - uri.AppendPath("/" + parameter, false); - } - uri.AppendQuery("api-version", this._apiVersion); - return uri.ToString(); - } - - private bool IsFailedOrCancelled(string status) - { - return status.Equals(AzureImageOperationStatus.Failed, StringComparison.OrdinalIgnoreCase) - || status.Equals(AzureImageOperationStatus.Cancelled, StringComparison.OrdinalIgnoreCase) - || status.Equals(AzureImageOperationStatus.Deleted, StringComparison.OrdinalIgnoreCase); - } - - /// Adds headers to use for Azure OpenAI HTTP requests. - private protected override void AddRequestHeaders(HttpRequestMessage request) - { - request.Headers.Add("api-key", this._apiKey); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/ImageGenerationRequest.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/ImageGenerationRequest.cs deleted file mode 100644 index 2d0c17b5a640..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/ImageGenerationRequest.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; - -/// -/// Image generation request -/// -public sealed class ImageGenerationRequest -{ - /// - /// Image prompt - /// - [JsonPropertyName("prompt")] - [JsonPropertyOrder(1)] - public string Prompt { get; set; } = string.Empty; - - /// - /// Image size - /// - [JsonPropertyName("size")] - [JsonPropertyOrder(2)] - public string Size { get; set; } = "256x256"; - - /// - /// How many images to generate - /// - [JsonPropertyName("n")] - [JsonPropertyOrder(3)] - public int Count { get; set; } = 1; - - /// - /// Image format, "url" or "b64_json" - /// - [JsonPropertyName("response_format")] - [JsonPropertyOrder(4)] - public string Format { get; set; } = "url"; -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/ImageGenerationResponse.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/ImageGenerationResponse.cs deleted file mode 100644 index 6a0c29341fbe..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/ImageGenerationResponse.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; - -/// -/// Image generation response -/// -public class ImageGenerationResponse -{ - /// - /// OpenAI Image response - /// - public sealed class Image - { - /// - /// URL to the image created - /// - [JsonPropertyName("url")] - [SuppressMessage("Design", "CA1056:URI return values should not be strings", Justification = "Using the original value")] - public string Url { get; set; } = string.Empty; - - /// - /// Image content in base64 format - /// - [JsonPropertyName("b64_json")] - public string AsBase64 { get; set; } = string.Empty; - } - - /// - /// List of possible image generations - /// - [JsonPropertyName("data")] - public IList Images { get; set; } = new List(); - - /// - /// Creation time - /// - [JsonPropertyName("created")] - public int CreatedTime { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/OpenAIImageGeneration.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/OpenAIImageGeneration.cs deleted file mode 100644 index 7bd0c1bc2199..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/ImageGeneration/OpenAIImageGeneration.cs +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.CustomClient; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; -/// -/// A class for generating images using OpenAI's API. -/// -public class OpenAIImageGeneration : OpenAIClientBase, IImageGeneration -{ - /// - /// OpenAI REST API endpoint - /// - private const string OpenAIEndpoint = "https://api.openai.com/v1/images/generations"; - - /// - /// Optional value for the OpenAI-Organization header. - /// - private readonly string? _organizationHeaderValue; - - /// - /// Value for the authorization header. - /// - private readonly string _authorizationHeaderValue; - - /// - /// Initializes a new instance of the class. - /// - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAIImageGeneration( - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null - ) : base(httpClient, loggerFactory) - { - Verify.NotNullOrWhiteSpace(apiKey); - this._authorizationHeaderValue = $"Bearer {apiKey}"; - this._organizationHeaderValue = organization; - } - - /// Adds headers to use for OpenAI HTTP requests. - private protected override void AddRequestHeaders(HttpRequestMessage request) - { - base.AddRequestHeaders(request); - - request.Headers.Add("Authorization", this._authorizationHeaderValue); - if (!string.IsNullOrEmpty(this._organizationHeaderValue)) - { - request.Headers.Add("OpenAI-Organization", this._organizationHeaderValue); - } - } - - /// - public Task GenerateImageAsync(string description, int width, int height, CancellationToken cancellationToken = default) - { - Verify.NotNull(description); - if (width != height || (width != 256 && width != 512 && width != 1024)) - { - throw new ArgumentOutOfRangeException(nameof(width), width, "OpenAI can generate only square images of size 256x256, 512x512, or 1024x1024."); - } - - return this.GenerateImageAsync(description, width, height, "url", x => x.Url, cancellationToken); - } - - private async Task GenerateImageAsync( - string description, - int width, int height, - string format, Func extractResponse, - CancellationToken cancellationToken) - { - Debug.Assert(width == height); - Debug.Assert(width is 256 or 512 or 1024); - Debug.Assert(format is "url" or "b64_json"); - Debug.Assert(extractResponse is not null); - - var requestBody = Json.Serialize(new ImageGenerationRequest - { - Prompt = description, - Size = $"{width}x{height}", - Count = 1, - Format = format, - }); - - var list = await this.ExecuteImageGenerationRequestAsync(OpenAIEndpoint, requestBody, extractResponse!, cancellationToken).ConfigureAwait(false); - return list[0]; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelBuilderExtensions.cs deleted file mode 100644 index 8f39afe34bd6..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelBuilderExtensions.cs +++ /dev/null @@ -1,559 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; -using Microsoft.SemanticKernel.Http; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of KernelConfig -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure OpenAI and AzureOpenAI connectors. -/// -public static class OpenAIKernelBuilderExtensions -{ - #region Text Completion - - /// - /// Adds an Azure OpenAI text completion service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureTextCompletionService(this KernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - { - var client = CreateAzureOpenAIClient(loggerFactory, httpHandlerFactory, deploymentName, endpoint, new AzureKeyCredential(apiKey), httpClient); - return new AzureTextCompletion(deploymentName, client, loggerFactory); - }, setAsDefault); - - return builder; - } - - /// - /// Adds an Azure OpenAI text completion service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureTextCompletionService(this KernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - { - var client = CreateAzureOpenAIClient(loggerFactory, httpHandlerFactory, deploymentName, endpoint, credentials, httpClient); - return new AzureTextCompletion(deploymentName, client, loggerFactory); - }, setAsDefault); - - return builder; - } - - /// - /// Adds an Azure OpenAI text completion service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Self instance - public static KernelBuilder WithAzureTextCompletionService(this KernelBuilder builder, - string deploymentName, - OpenAIClient openAIClient, - string? serviceId = null, - bool setAsDefault = false) - { - builder.WithAIService(serviceId, (loggerFactory) => - new AzureTextCompletion( - deploymentName, - openAIClient, - loggerFactory), - setAsDefault); - - return builder; - } - - /// - /// Adds the OpenAI text completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithOpenAITextCompletionService(this KernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new OpenAITextCompletion( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory), - setAsDefault); - return builder; - } - - #endregion - - #region Text Embedding - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureTextEmbeddingGenerationService(this KernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new AzureTextEmbeddingGeneration( - deploymentName, - endpoint, - apiKey, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory), - setAsDefault); - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureTextEmbeddingGenerationService(this KernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credential, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new AzureTextEmbeddingGeneration( - deploymentName, - endpoint, - credential, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory), - setAsDefault); - return builder; - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithOpenAITextEmbeddingGenerationService(this KernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new OpenAITextEmbeddingGeneration( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory), - setAsDefault); - return builder; - } - - #endregion - - #region Chat Completion - - /// - /// Adds the Azure OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Whether to use the service also for text completion, if supported - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureChatCompletionService(this KernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - bool alsoAsTextCompletion = true, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - AzureChatCompletion Factory(ILoggerFactory loggerFactory, IDelegatingHandlerFactory httpHandlerFactory) - { - OpenAIClient client = CreateAzureOpenAIClient(loggerFactory, httpHandlerFactory, deploymentName, endpoint, new AzureKeyCredential(apiKey), httpClient); - - return new(deploymentName, client, loggerFactory); - }; - - builder.WithAIService(serviceId, Factory, setAsDefault); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(AzureChatCompletion))) - { - builder.WithAIService(serviceId, Factory, setAsDefault); - } - - return builder; - } - - /// - /// Adds the Azure OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Whether to use the service also for text completion, if supported - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureChatCompletionService(this KernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - bool alsoAsTextCompletion = true, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - AzureChatCompletion Factory(ILoggerFactory loggerFactory, IDelegatingHandlerFactory httpHandlerFactory) - { - OpenAIClient client = CreateAzureOpenAIClient(loggerFactory, httpHandlerFactory, deploymentName, endpoint, credentials, httpClient); - - return new(deploymentName, client, loggerFactory); - }; - - builder.WithAIService(serviceId, Factory, setAsDefault); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(AzureChatCompletion))) - { - builder.WithAIService(serviceId, Factory, setAsDefault); - } - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion with data service to the list. - /// More information: - /// - /// The instance. - /// Required configuration for Azure OpenAI chat completion with data. - /// Whether to use the service also for text completion, if supported. - /// A local identifier for the given AI service. - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithAzureChatCompletionService(this KernelBuilder builder, - AzureChatCompletionWithDataConfig config, - bool alsoAsTextCompletion = true, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - AzureChatCompletionWithData Factory(ILoggerFactory loggerFactory, IDelegatingHandlerFactory httpHandlerFactory) => new( - config, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory); - - builder.WithAIService(serviceId, Factory, setAsDefault); - - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(AzureChatCompletionWithData))) - { - builder.WithAIService(serviceId, Factory, setAsDefault); - } - - return builder; - } - - /// - /// Adds the OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Whether to use the service also for text completion, if supported - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithOpenAIChatCompletionService(this KernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - bool alsoAsTextCompletion = true, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - OpenAIChatCompletion Factory(ILoggerFactory loggerFactory, IDelegatingHandlerFactory httpHandlerFactory) => new( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory); - - builder.WithAIService(serviceId, Factory, setAsDefault); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(OpenAIChatCompletion))) - { - builder.WithAIService(serviceId, Factory, setAsDefault); - } - - return builder; - } - - /// - /// Adds the Azure OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// Whether to use the service also for text completion, if supported - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Self instance - public static KernelBuilder WithAzureChatCompletionService(this KernelBuilder builder, - string deploymentName, - OpenAIClient openAIClient, - bool alsoAsTextCompletion = true, - string? serviceId = null, - bool setAsDefault = false) - { - AzureChatCompletion Factory(ILoggerFactory loggerFactory) - { - return new(deploymentName, openAIClient, loggerFactory); - }; - - builder.WithAIService(serviceId, Factory, setAsDefault); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(AzureChatCompletion))) - { - builder.WithAIService(serviceId, Factory, setAsDefault); - } - - return builder; - } - - /// - /// Adds the OpenAI ChatGPT completion service to the list. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// Whether to use the service also for text completion, if supported - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Self instance - public static KernelBuilder WithOpenAIChatCompletionService(this KernelBuilder builder, - string deploymentName, - OpenAIClient openAIClient, - bool alsoAsTextCompletion = true, - string? serviceId = null, - bool setAsDefault = false) - { - OpenAIChatCompletion Factory(ILoggerFactory loggerFactory) - { - return new(deploymentName, openAIClient, loggerFactory); - }; - - builder.WithAIService(serviceId, Factory, setAsDefault); - - // If the class implements the text completion interface, allow to use it also for semantic functions - if (alsoAsTextCompletion && typeof(ITextCompletion).IsAssignableFrom(typeof(AzureChatCompletion))) - { - builder.WithAIService(serviceId, Factory, setAsDefault); - } - - return builder; - } - - #endregion - - #region Images - - /// - /// Add the OpenAI DallE image generation service to the list - /// - /// The instance - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static KernelBuilder WithOpenAIImageGenerationService(this KernelBuilder builder, - string apiKey, - string? orgId = null, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new OpenAIImageGeneration( - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory), - setAsDefault); - - return builder; - } - - /// - /// Add the Azure OpenAI DallE image generation service to the list - /// - /// The instance - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Maximum number of attempts to retrieve the image generation operation result. - /// Self instance - public static KernelBuilder WithAzureOpenAIImageGenerationService(this KernelBuilder builder, - string endpoint, - string apiKey, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null, - int maxRetryCount = 5) - { - builder.WithAIService(serviceId, (loggerFactory, httpHandlerFactory) => - new AzureOpenAIImageGeneration( - endpoint, - apiKey, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory, - maxRetryCount), - setAsDefault); - - return builder; - } - - #endregion - - private static OpenAIClient CreateAzureOpenAIClient(ILoggerFactory loggerFactory, IDelegatingHandlerFactory httpHandlerFactory, string deploymentName, string endpoint, AzureKeyCredential credentials, HttpClient? httpClient) - { - OpenAIClientOptions options = CreateOpenAIClientOptions(loggerFactory, httpHandlerFactory, httpClient); - - return new(new Uri(endpoint), credentials, options); - } - - private static OpenAIClient CreateAzureOpenAIClient(ILoggerFactory loggerFactory, IDelegatingHandlerFactory httpHandlerFactory, string deploymentName, string endpoint, TokenCredential credentials, HttpClient? httpClient) - { - OpenAIClientOptions options = CreateOpenAIClientOptions(loggerFactory, httpHandlerFactory, httpClient); - - return new(new Uri(endpoint), credentials, options); - } - - private static OpenAIClientOptions CreateOpenAIClientOptions(ILoggerFactory loggerFactory, IDelegatingHandlerFactory httpHandlerFactory, HttpClient? httpClient) - { - OpenAIClientOptions options = new(); -#pragma warning disable CA2000 // Dispose objects before losing scope - options.Transport = new HttpClientTransport(HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory)); - options.RetryPolicy = new RetryPolicy(maxRetries: 0); //Disabling Azure SDK retry policy to use the one provided by the delegating handler factory or the HTTP client. -#pragma warning restore CA2000 // Dispose objects before losing scope - - return options; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelExtensions.cs deleted file mode 100644 index d263b6ca53f3..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIKernelExtensions.cs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the namespace of IKernel -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Class for extension methods for using OpenAI request settings. -/// -public static class OpenAIKernelExtensions -{ - /// - /// Define a string-to-string semantic function, with no direct support for input context. - /// The function can be referenced in templates and will receive the context, but when invoked programmatically you - /// can only pass in a string in input and receive a string in output. - /// - /// Semantic Kernel instance - /// Plain language definition of the semantic function, using SK template language - /// OpenAI LLM request settings - /// A name for the given function. The name can be referenced in templates and used by the pipeline planner. - /// Optional plugin name, for namespacing and avoid collisions - /// Optional description, useful for the planner - /// A function ready to use - public static ISKFunction CreateSemanticFunction( - this IKernel kernel, - string promptTemplate, - OpenAIRequestSettings requestSettings, - string? functionName = null, - string? pluginName = null, - string? description = null) - { - return kernel.CreateSemanticFunction( - promptTemplate, - functionName, - pluginName, - description, - requestSettings); - } - - /// - /// Invoke a semantic function using the provided prompt template. - /// - /// Semantic Kernel instance - /// Plain language definition of the semantic function, using SK template language - /// OpenAI LLM request settings - /// Options name for the given function. The name can be referenced in templates and used by the pipeline planner. - /// Optional plugin name, for namespacing and avoid collisions - /// Optional description, useful for the planner - /// A function ready to use - public static Task InvokeSemanticFunctionAsync( - this IKernel kernel, - string promptTemplate, - OpenAIRequestSettings requestSettings, - string? functionName = null, - string? pluginName = null, - string? description = null) - { - return kernel.InvokeSemanticFunctionAsync( - promptTemplate, - functionName, - pluginName, - description, - requestSettings); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIMemoryBuilderExtensions.cs deleted file mode 100644 index 4d351c5d774a..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIMemoryBuilderExtensions.cs +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Azure.Core; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; -using Microsoft.SemanticKernel.Plugins.Memory; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI; - -/// -/// Provides extension methods for the class to configure OpenAI and AzureOpenAI connectors. -/// -public static class OpenAIMemoryBuilderExtensions -{ - /// - /// Adds an Azure OpenAI text embeddings service. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static MemoryBuilder WithAzureTextEmbeddingGenerationService( - this MemoryBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithTextEmbeddingGeneration((loggerFactory, httpHandlerFactory) => - new AzureTextEmbeddingGeneration( - deploymentName, - endpoint, - apiKey, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory)); - - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static MemoryBuilder WithAzureTextEmbeddingGenerationService( - this MemoryBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credential, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithTextEmbeddingGeneration((loggerFactory, httpHandlerFactory) => - new AzureTextEmbeddingGeneration( - deploymentName, - endpoint, - credential, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory)); - - return builder; - } - - /// - /// Adds the OpenAI text embeddings service. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// Whether the service should be the default for its type. - /// Custom for HTTP requests. - /// Self instance - public static MemoryBuilder WithOpenAITextEmbeddingGenerationService( - this MemoryBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - bool setAsDefault = false, - HttpClient? httpClient = null) - { - builder.WithTextEmbeddingGeneration((loggerFactory, httpHandlerFactory) => - new OpenAITextEmbeddingGeneration( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - loggerFactory)); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIModelResultExtensions.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIModelResultExtensions.cs deleted file mode 100644 index 51731b4e9b71..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIModelResultExtensions.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 - -namespace Microsoft.SemanticKernel; - -/// -/// Provides extension methods for working with OpenAI model results. -/// -public static class OpenAIModelResultExtension -{ - /// - /// Retrieves a typed OpenAI / AzureOpenAI result from text completion prompt. - /// - /// Current context - /// OpenAI / AzureOpenAI result - public static TextModelResult GetOpenAITextResult(this ModelResult resultBase) - { - return resultBase.GetResult(); - } - - /// - /// Retrieves a typed OpenAI / AzureOpenAI result from chat completion prompt. - /// - /// Current context - /// OpenAI / AzureOpenAI result - public static ChatModelResult GetOpenAIChatResult(this ModelResult resultBase) - { - return resultBase.GetResult(); - } - - /// - /// Retrieves a typed OpenAI / AzureOpenAI result from chat completion prompt. - /// - /// Current context - /// OpenAI / AzureOpenAI result - public static ChatStreamingModelResult GetOpenAIChatStreamingResult(this ModelResult resultBase) - { - return resultBase.GetResult(); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIRequestSettings.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIRequestSettings.cs deleted file mode 100644 index 307ab324d79a..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIRequestSettings.cs +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI; - -/// -/// Request settings for an OpenAI completion request. -/// -public class OpenAIRequestSettings : AIRequestSettings -{ - /// - /// Value for to indicate that the model - /// can optionally generate a function call from . - /// - public const string FunctionCallAuto = "auto"; - - /// - /// Value for to indicate that no - /// function call should be generated. - /// - public const string FunctionCallNone = "none"; - - /// - /// Temperature controls the randomness of the completion. - /// The higher the temperature, the more random the completion. - /// - [JsonPropertyName("temperature")] - public double Temperature { get; set; } = 0; - - /// - /// TopP controls the diversity of the completion. - /// The higher the TopP, the more diverse the completion. - /// - [JsonPropertyName("top_p")] - public double TopP { get; set; } = 0; - - /// - /// Number between -2.0 and 2.0. Positive values penalize new tokens - /// based on whether they appear in the text so far, increasing the - /// model's likelihood to talk about new topics. - /// - [JsonPropertyName("presence_penalty")] - public double PresencePenalty { get; set; } = 0; - - /// - /// Number between -2.0 and 2.0. Positive values penalize new tokens - /// based on their existing frequency in the text so far, decreasing - /// the model's likelihood to repeat the same line verbatim. - /// - [JsonPropertyName("frequency_penalty")] - public double FrequencyPenalty { get; set; } = 0; - - /// - /// The maximum number of tokens to generate in the completion. - /// - [JsonPropertyName("max_tokens")] - public int? MaxTokens { get; set; } - - /// - /// Sequences where the completion will stop generating further tokens. - /// - [JsonPropertyName("stop_sequences")] - public IList StopSequences { get; set; } = Array.Empty(); - - /// - /// How many completions to generate for each prompt. Default is 1. - /// Note: Because this parameter generates many completions, it can quickly consume your token quota. - /// Use carefully and ensure that you have reasonable settings for max_tokens and stop. - /// - [JsonPropertyName("results_per_prompt")] - public int ResultsPerPrompt { get; set; } = 1; - - /// - /// The system prompt to use when generating text completions using a chat model. - /// Defaults to "Assistant is a large language model." - /// - [JsonPropertyName("chat_system_prompt")] - public string ChatSystemPrompt - { - get => this._chatSystemPrompt; - set - { - if (string.IsNullOrEmpty(value)) - { - value = OpenAIRequestSettings.DefaultChatSystemPrompt; - } - this._chatSystemPrompt = value; - } - } - - /// - /// Modify the likelihood of specified tokens appearing in the completion. - /// - [JsonPropertyName("token_selection_biases")] - public IDictionary TokenSelectionBiases { get; set; } = new Dictionary(); - - /// - /// Possible values are , , - /// or the name of a specific function that OpenAI should use to respond to the chat - /// request. If the latter, this function must exist in . - /// - public string? FunctionCall { get; set; } = null; - - /// - /// The set of functions to choose from if function calling is enabled by the model. - /// - public IList? Functions { get; set; } = null; - - /// - /// Default value for chat system property. - /// - internal static string DefaultChatSystemPrompt { get; } = "Assistant is a large language model."; - - /// - /// Default max tokens for a text completion - /// - internal static int DefaultTextMaxTokens { get; } = 256; - - /// - /// Create a new settings object with the values from another settings object. - /// - /// Template configuration - /// Default max tokens - /// An instance of OpenAIRequestSettings - public static OpenAIRequestSettings FromRequestSettings(AIRequestSettings? requestSettings, int? defaultMaxTokens = null) - { - if (requestSettings is null) - { - return new OpenAIRequestSettings() - { - MaxTokens = defaultMaxTokens - }; - } - - if (requestSettings is OpenAIRequestSettings requestSettingsOpenAIRequestSettings) - { - return requestSettingsOpenAIRequestSettings; - } - - var json = JsonSerializer.Serialize(requestSettings); - var openAIRequestSettings = JsonSerializer.Deserialize(json, s_options); - - if (openAIRequestSettings is not null) - { - return openAIRequestSettings; - } - - throw new ArgumentException($"Invalid request settings, cannot convert to {nameof(OpenAIRequestSettings)}", nameof(requestSettings)); - } - - #region private ================================================================================ - - private string _chatSystemPrompt = OpenAIRequestSettings.DefaultChatSystemPrompt; - - private static readonly JsonSerializerOptions s_options = CreateOptions(); - - private static JsonSerializerOptions CreateOptions() - { - JsonSerializerOptions options = new() - { - WriteIndented = true, - MaxDepth = 20, - AllowTrailingCommas = true, - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - Converters = { new OpenAIRequestSettingsConverter() } - }; - - return options; - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIRequestSettingsConverter.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIRequestSettingsConverter.cs deleted file mode 100644 index a61f8400ebdb..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/OpenAIRequestSettingsConverter.cs +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI; - -/// -/// JSON converter for -/// -public class OpenAIRequestSettingsConverter : JsonConverter -{ - /// - public override OpenAIRequestSettings? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - var requestSettings = new OpenAIRequestSettings(); - - while (reader.Read() && reader.TokenType != JsonTokenType.EndObject) - { - if (reader.TokenType == JsonTokenType.PropertyName) - { - string? propertyName = reader.GetString(); - - if (propertyName is not null) - { - // normalise property name to uppercase - propertyName = propertyName.ToUpperInvariant(); - } - - reader.Read(); - - switch (propertyName) - { - case "TEMPERATURE": - requestSettings.Temperature = reader.GetDouble(); - break; - case "TOPP": - case "TOP_P": - requestSettings.TopP = reader.GetDouble(); - break; - case "FREQUENCYPENALTY": - case "FREQUENCY_PENALTY": - requestSettings.FrequencyPenalty = reader.GetDouble(); - break; - case "PRESENCEPENALTY": - case "PRESENCE_PENALTY": - requestSettings.PresencePenalty = reader.GetDouble(); - break; - case "MAXTOKENS": - case "MAX_TOKENS": - requestSettings.MaxTokens = reader.GetInt32(); - break; - case "STOPSEQUENCES": - case "STOP_SEQUENCES": - requestSettings.StopSequences = JsonSerializer.Deserialize>(ref reader, options) ?? Array.Empty(); - break; - case "RESULTSPERPROMPT": - case "RESULTS_PER_PROMPT": - requestSettings.ResultsPerPrompt = reader.GetInt32(); - break; - case "CHATSYSTEMPROMPT": - case "CHAT_SYSTEM_PROMPT": - requestSettings.ChatSystemPrompt = reader.GetString() ?? OpenAIRequestSettings.DefaultChatSystemPrompt; - break; - case "TOKENSELECTIONBIASES": - case "TOKEN_SELECTION_BIASES": - requestSettings.TokenSelectionBiases = JsonSerializer.Deserialize>(ref reader, options) ?? new Dictionary(); - break; - case "SERVICEID": - case "SERVICE_ID": - requestSettings.ServiceId = reader.GetString(); - break; - default: - reader.Skip(); - break; - } - } - } - - return requestSettings; - } - - /// - public override void Write(Utf8JsonWriter writer, OpenAIRequestSettings value, JsonSerializerOptions options) - { - writer.WriteStartObject(); - - writer.WriteNumber("temperature", value.Temperature); - writer.WriteNumber("top_p", value.TopP); - writer.WriteNumber("frequency_penalty", value.FrequencyPenalty); - writer.WriteNumber("presence_penalty", value.PresencePenalty); - if (value.MaxTokens is null) - { - writer.WriteNull("max_tokens"); - } - else - { - writer.WriteNumber("max_tokens", (decimal)value.MaxTokens); - } - writer.WritePropertyName("stop_sequences"); - JsonSerializer.Serialize(writer, value.StopSequences, options); - writer.WriteNumber("results_per_prompt", value.ResultsPerPrompt); - writer.WriteString("chat_system_prompt", value.ChatSystemPrompt); - writer.WritePropertyName("token_selection_biases"); - JsonSerializer.Serialize(writer, value.TokenSelectionBiases, options); - writer.WriteString("service_id", value.ServiceId); - - writer.WriteEndObject(); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/AzureTextCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/AzureTextCompletion.cs deleted file mode 100644 index 2a549835a04a..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/AzureTextCompletion.cs +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; - -/// -/// Azure OpenAI text completion client. -/// TODO: forward ETW logging to ILogger, see https://learn.microsoft.com/en-us/dotnet/azure/sdk/logging -/// -public sealed class AzureTextCompletion : AzureOpenAIClientBase, ITextCompletion -{ - /// - /// Creates a new AzureTextCompletion client instance using API Key auth - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureTextCompletion( - string modelId, - string endpoint, - string apiKey, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(modelId, endpoint, apiKey, httpClient, loggerFactory) - { - } - - /// - /// Creates a new AzureTextCompletion client instance supporting AAD auth - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureTextCompletion( - string modelId, - string endpoint, - TokenCredential credential, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(modelId, endpoint, credential, httpClient, loggerFactory) - { - } - - /// - /// Creates a new AzureTextCompletion client instance using the specified OpenAIClient - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// The to use for logging. If null, no logging will be performed. - public AzureTextCompletion( - string modelId, - OpenAIClient openAIClient, - ILoggerFactory? loggerFactory = null) : base(modelId, openAIClient, loggerFactory) - { - } - - /// - public IAsyncEnumerable GetStreamingCompletionsAsync( - string text, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetTextStreamingResultsAsync(text, requestSettings, cancellationToken); - } - - /// - public Task> GetCompletionsAsync( - string text, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetTextResultsAsync(text, requestSettings, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/OpenAITextCompletion.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/OpenAITextCompletion.cs deleted file mode 100644 index 9394be351f4d..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextCompletion/OpenAITextCompletion.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; - -/// -/// OpenAI text completion service. -/// TODO: forward ETW logging to ILogger, see https://learn.microsoft.com/en-us/dotnet/azure/sdk/logging -/// -public sealed class OpenAITextCompletion : OpenAIClientBase, ITextCompletion -{ - /// - /// Create an instance of the OpenAI text completion connector - /// - /// Model name - /// OpenAI API Key - /// OpenAI Organization Id (usually optional) - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextCompletion( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null - ) : base(modelId, apiKey, organization, httpClient, loggerFactory) - { - } - - /// - public IAsyncEnumerable GetStreamingCompletionsAsync( - string text, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetTextStreamingResultsAsync(text, requestSettings, cancellationToken); - } - - /// - public Task> GetCompletionsAsync( - string text, - AIRequestSettings? requestSettings, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetTextResultsAsync(text, requestSettings, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/AzureTextEmbeddingGeneration.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/AzureTextEmbeddingGeneration.cs deleted file mode 100644 index ceed977b10a0..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/AzureTextEmbeddingGeneration.cs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; - -/// -/// Azure OpenAI text embedding service. -/// -public sealed class AzureTextEmbeddingGeneration : AzureOpenAIClientBase, ITextEmbeddingGeneration -{ - /// - /// Creates a new AzureTextCompletion client instance using API Key auth - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureTextEmbeddingGeneration( - string modelId, - string endpoint, - string apiKey, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(modelId, endpoint, apiKey, httpClient, loggerFactory) - { - } - - /// - /// Creates a new AzureTextCompletion client instance supporting AAD auth - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureTextEmbeddingGeneration( - string modelId, - string endpoint, - TokenCredential credential, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) : base(modelId, endpoint, credential, httpClient, loggerFactory) - { - } - - /// - /// Generates an embedding from the given . - /// - /// List of strings to generate embeddings for - /// The to monitor for cancellation requests. The default is . - /// List of embeddings - public Task>> GenerateEmbeddingsAsync( - IList data, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetEmbeddingsAsync(data, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/OpenAITextEmbeddingGeneration.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/OpenAITextEmbeddingGeneration.cs deleted file mode 100644 index 1d19b0b546d5..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/OpenAITextEmbeddingGeneration.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; - -/// -/// OpenAI text embedding service. -/// -public sealed class OpenAITextEmbeddingGeneration : OpenAIClientBase, ITextEmbeddingGeneration -{ - /// - /// Create an instance of the OpenAI text embedding connector - /// - /// Model name - /// OpenAI API Key - /// OpenAI Organization Id (usually optional) - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextEmbeddingGeneration( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null - ) : base(modelId, apiKey, organization, httpClient, loggerFactory) - { - } - - /// - /// Generates an embedding from the given . - /// - /// List of strings to generate embeddings for - /// The to monitor for cancellation requests. The default is . - /// List of embeddings - public Task>> GenerateEmbeddingsAsync( - IList data, - CancellationToken cancellationToken = default) - { - this.LogActionDetails(); - return this.InternalGetEmbeddingsAsync(data, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/TextEmbeddingRequest.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/TextEmbeddingRequest.cs deleted file mode 100644 index 8abff22677d7..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/TextEmbeddingRequest.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; - -/// -/// A request to create embedding vector representing input text -/// -public abstract class TextEmbeddingRequest -{ - /// - /// Input to embed - /// - [JsonPropertyName("input")] - public IList Input { get; set; } = new List(); -} - -/// -/// An OpenAI embedding request -/// -public sealed class OpenAITextEmbeddingRequest : TextEmbeddingRequest -{ - /// - /// Embedding model ID - /// - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; -} - -/// -/// An Azure OpenAI embedding request -/// -public sealed class AzureTextEmbeddingRequest : TextEmbeddingRequest -{ -} diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/TextEmbeddingResponse.cs b/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/TextEmbeddingResponse.cs deleted file mode 100644 index 1dcdca9841e0..000000000000 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/TextEmbedding/TextEmbeddingResponse.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; - -/// -/// A response from an embedding request -/// -public sealed class TextEmbeddingResponse -{ - /// - /// A single embedding vector - /// - public sealed class EmbeddingResponseIndex - { - /// - /// The embedding vector - /// - [JsonPropertyName("embedding")] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] - public ReadOnlyMemory Values { get; set; } - - /// - /// Index of the embedding vector - /// - [JsonPropertyName("index")] - public int Index { get; set; } - } - - /// - /// A list of embeddings - /// - [JsonPropertyName("data")] - public IList Embeddings { get; set; } = new List(); -} diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.HuggingFace/AssemblyInfo.cs new file mode 100644 index 000000000000..d174fc92303c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0020")] diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/Connectors.HuggingFace.csproj b/dotnet/src/Connectors/Connectors.HuggingFace/Connectors.HuggingFace.csproj new file mode 100644 index 000000000000..2f8d1b22c7b2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/Connectors.HuggingFace.csproj @@ -0,0 +1,25 @@ + + + + + Microsoft.SemanticKernel.Connectors.HuggingFace + $(AssemblyName) + netstandard2.0 + preview + + + + + + + + + Semantic Kernel - Hugging Face AI connectors + Semantic Kernel connectors for Hugging Face. Contains clients for text generation and text embedding generation. + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFaceKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFaceKernelBuilderExtensions.cs new file mode 100644 index 000000000000..4d11b543a968 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFaceKernelBuilderExtensions.cs @@ -0,0 +1,112 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.Connectors.HuggingFace; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for the class to configure Hugging Face connectors. +/// +public static class HuggingFaceKernelBuilderExtensions +{ + /// + /// Adds an Hugging Face text generation service with the specified configuration. + /// + /// The instance to augment. + /// The name of the Hugging Face model. + /// The API key required for accessing the Hugging Face service. + /// The endpoint URL for the text generation service. + /// A local identifier for the given AI service. + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddHuggingFaceTextGeneration( + this IKernelBuilder builder, + string model, + string? apiKey = null, + string? endpoint = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNull(model); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new HuggingFaceTextGenerationService(model, apiKey, HttpClientProvider.GetHttpClient(httpClient, serviceProvider), endpoint)); + + return builder; + } + + /// + /// Adds an Hugging Face text generation service with the specified configuration. + /// + /// The instance to augment. + /// The name of the Hugging Face model. + /// The API key required for accessing the Hugging Face service. + /// The endpoint URL for the text generation service. + /// A local identifier for the given AI service. + /// The same instance as . + public static IServiceCollection AddHuggingFaceTextGeneration( + this IServiceCollection services, + string model, + string? apiKey = null, + string? endpoint = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNull(model); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new HuggingFaceTextGenerationService(model, apiKey, HttpClientProvider.GetHttpClient(serviceProvider), endpoint)); + } + + /// + /// Adds an Hugging Face text embedding generation service with the specified configuration. + /// + /// The instance to augment. + /// The name of the Hugging Face model. + /// The endpoint for the text embedding generation service. + /// A local identifier for the given AI service. + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddHuggingFaceTextEmbeddingGeneration( + this IKernelBuilder builder, + string model, + string? endpoint = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNull(model); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new HuggingFaceTextEmbeddingGenerationService(model, HttpClientProvider.GetHttpClient(httpClient, serviceProvider), endpoint)); + + return builder; + } + + /// + /// Adds an Hugging Face text embedding generation service with the specified configuration. + /// + /// The instance to augment. + /// The name of the Hugging Face model. + /// The endpoint for the text embedding generation service. + /// A local identifier for the given AI service. + /// The same instance as . + public static IServiceCollection AddHuggingFaceTextEmbeddingGeneration( + this IServiceCollection services, + string model, + string? endpoint = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNull(model); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new HuggingFaceTextEmbeddingGenerationService(model, HttpClientProvider.GetHttpClient(serviceProvider), endpoint)); + } +} diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGenerationService.cs new file mode 100644 index 000000000000..a9899c611bd4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/HuggingFaceTextEmbeddingGenerationService.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.Connectors.HuggingFace; + +/// +/// HuggingFace embedding generation service. +/// +#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. +public sealed class HuggingFaceTextEmbeddingGenerationService : ITextEmbeddingGenerationService +#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. +{ + private readonly string _model; + private readonly string? _endpoint; + private readonly HttpClient _httpClient; + private readonly Dictionary _attributes = new(); + + /// + /// Initializes a new instance of the class. + /// Using default implementation. + /// + /// Endpoint for service API call. + /// Model to use for service API call. + public HuggingFaceTextEmbeddingGenerationService(Uri endpoint, string model) + { + Verify.NotNull(endpoint); + Verify.NotNullOrWhiteSpace(model); + + this._model = model; + this._endpoint = endpoint.AbsoluteUri; + this._attributes.Add(AIServiceExtensions.ModelIdKey, this._model); + this._attributes.Add(AIServiceExtensions.EndpointKey, this._endpoint); + this._httpClient = HttpClientProvider.GetHttpClient(); + } + + /// + /// Initializes a new instance of the class. + /// + /// Model to use for service API call. + /// Endpoint for service API call. + public HuggingFaceTextEmbeddingGenerationService(string model, string endpoint) + { + Verify.NotNullOrWhiteSpace(model); + Verify.NotNullOrWhiteSpace(endpoint); + + this._model = model; + this._endpoint = endpoint; + this._attributes.Add(AIServiceExtensions.ModelIdKey, this._model); + this._attributes.Add(AIServiceExtensions.EndpointKey, this._endpoint); + this._httpClient = HttpClientProvider.GetHttpClient(); + } + + /// + /// Initializes a new instance of the class. + /// + /// Model to use for service API call. + /// The HttpClient used for making HTTP requests. + /// Endpoint for service API call. If not specified, the base address of the HTTP client is used. + public HuggingFaceTextEmbeddingGenerationService(string model, HttpClient httpClient, string? endpoint = null) + { + Verify.NotNullOrWhiteSpace(model); + Verify.NotNull(httpClient); + if (httpClient.BaseAddress == null && string.IsNullOrEmpty(endpoint)) + { + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); + } + + this._model = model; + this._endpoint = endpoint; + this._httpClient = httpClient; + this._attributes.Add(AIServiceExtensions.ModelIdKey, model); + this._attributes.Add(AIServiceExtensions.EndpointKey, endpoint ?? httpClient.BaseAddress!.ToString()); + } + + /// + public IReadOnlyDictionary Attributes => this._attributes; + + /// + public async Task>> GenerateEmbeddingsAsync( + IList data, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + return await this.ExecuteEmbeddingRequestAsync(data, cancellationToken).ConfigureAwait(false); + } + + #region private ================================================================================ + + /// + /// Performs HTTP request to given endpoint for embedding generation. + /// + /// Data to embed. + /// The to monitor for cancellation requests. The default is . + /// List of generated embeddings. + private async Task>> ExecuteEmbeddingRequestAsync(IList data, CancellationToken cancellationToken) + { + var embeddingRequest = new TextEmbeddingRequest + { + Input = data + }; + + using var httpRequestMessage = HttpRequest.CreatePostRequest(this.GetRequestUri(), embeddingRequest); + + httpRequestMessage.Headers.Add("User-Agent", HttpHeaderValues.UserAgent); + + var response = await this._httpClient.SendWithSuccessCheckAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); + var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + + var embeddingResponse = JsonSerializer.Deserialize(body); + + return embeddingResponse?.Embeddings?.Select(l => l.Embedding).ToList()!; + } + + /// + /// Retrieves the request URI based on the provided endpoint and model information. + /// + /// + /// A object representing the request URI. + /// + private Uri GetRequestUri() + { + string? baseUrl = null; + + if (!string.IsNullOrEmpty(this._endpoint)) + { + baseUrl = this._endpoint; + } + else if (this._httpClient.BaseAddress?.AbsoluteUri != null) + { + baseUrl = this._httpClient.BaseAddress!.AbsoluteUri; + } + else + { + throw new KernelException("No endpoint or HTTP client base address has been provided"); + } + + return new Uri($"{baseUrl!.TrimEnd('/')}/{this._model}"); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/TextEmbeddingRequest.cs b/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/TextEmbeddingRequest.cs new file mode 100644 index 000000000000..7e3637e43c0d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/TextEmbeddingRequest.cs @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.HuggingFace; + +/// +/// HTTP schema to perform embedding request. +/// +public sealed class TextEmbeddingRequest +{ + /// + /// Data to embed. + /// + [JsonPropertyName("inputs")] + public IList Input { get; set; } = new List(); +} diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/TextEmbeddingResponse.cs b/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/TextEmbeddingResponse.cs new file mode 100644 index 000000000000..2585766a6b8d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/TextEmbedding/TextEmbeddingResponse.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.HuggingFace; + +/// +/// Represents the response from the Hugging Face text embedding API. +/// +public sealed class TextEmbeddingResponse +{ + /// + /// Represents the embedding vector for a given text. + /// + public sealed class EmbeddingVector + { + /// + /// The embedding vector as a ReadOnlyMemory of float values. + /// + [JsonPropertyName("embedding")] + [JsonConverter(typeof(ReadOnlyMemoryConverter))] + public ReadOnlyMemory Embedding { get; set; } + } + + /// + /// List of embeddings. + /// + [JsonPropertyName("data")] + public IList? Embeddings { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/HuggingFaceTextGenerationService.cs b/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/HuggingFaceTextGenerationService.cs new file mode 100644 index 000000000000..52d016377811 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/HuggingFaceTextGenerationService.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel.Connectors.HuggingFace; + +/// +/// HuggingFace text generation service. +/// +#pragma warning disable CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. +public sealed class HuggingFaceTextGenerationService : ITextGenerationService +#pragma warning restore CA1001 // Types that own disposable fields should be disposable. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. +{ + private const string HuggingFaceApiEndpoint = "https://api-inference.huggingface.co/models"; + + private readonly string _model; + private readonly string? _endpoint; + private readonly HttpClient _httpClient; + private readonly string? _apiKey; + private readonly Dictionary _attributes = new(); + + /// + /// Initializes a new instance of the class. + /// Using default implementation. + /// + /// Endpoint for service API call. + /// Model to use for service API call. + public HuggingFaceTextGenerationService(Uri endpoint, string model) + { + Verify.NotNull(endpoint); + Verify.NotNullOrWhiteSpace(model); + + this._model = model; + this._endpoint = endpoint.AbsoluteUri; + this._attributes.Add(AIServiceExtensions.ModelIdKey, this._model); + this._attributes.Add(AIServiceExtensions.EndpointKey, this._endpoint); + + this._httpClient = HttpClientProvider.GetHttpClient(); + } + + /// + /// Initializes a new instance of the class. + /// Using HuggingFace API for service call, see https://huggingface.co/docs/api-inference/index. + /// + /// The name of the model to use for text generation. + /// The API key for accessing the Hugging Face service. + /// The HTTP client to use for making API requests. If not specified, a default client will be used. + /// The endpoint URL for the Hugging Face service. + /// If not specified, the base address of the HTTP client is used. If the base address is not available, a default endpoint will be used. + public HuggingFaceTextGenerationService(string model, string? apiKey = null, HttpClient? httpClient = null, string? endpoint = null) + { + Verify.NotNullOrWhiteSpace(model); + + this._model = model; + this._apiKey = apiKey; + this._httpClient = HttpClientProvider.GetHttpClient(httpClient); + this._endpoint = endpoint; + this._attributes.Add(AIServiceExtensions.ModelIdKey, this._model); + this._attributes.Add(AIServiceExtensions.EndpointKey, this._endpoint ?? HuggingFaceApiEndpoint); + } + + /// + public IReadOnlyDictionary Attributes => this._attributes; + + /// + public Task> GetTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this.InternalGetTextContentsAsync(prompt, cancellationToken); + + /// + public async IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (var textContent in await this.InternalGetTextContentsAsync(prompt, cancellationToken).ConfigureAwait(false)) + { + yield return new StreamingTextContent(textContent.Text, 0, this.GetModelId(), textContent); + } + } + + #region private ================================================================================ + + private async Task> InternalGetTextContentsAsync(string text, CancellationToken cancellationToken = default) + { + var completionRequest = new TextGenerationRequest + { + Input = text + }; + + using var httpRequestMessage = HttpRequest.CreatePostRequest(this.GetRequestUri(), completionRequest); + + httpRequestMessage.Headers.Add("User-Agent", HttpHeaderValues.UserAgent); + if (!string.IsNullOrEmpty(this._apiKey)) + { + httpRequestMessage.Headers.Add("Authorization", $"Bearer {this._apiKey}"); + } + + using var response = await this._httpClient.SendWithSuccessCheckAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); + + var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + + List? completionResponse = JsonSerializer.Deserialize>(body); + + if (completionResponse is null) + { + throw new KernelException("Unexpected response from model") + { + Data = { { "ResponseData", body } }, + }; + } + + return completionResponse.ConvertAll(responseContent => new TextContent(responseContent.Text, this.GetModelId(), responseContent)); + } + + /// + /// Retrieves the request URI based on the provided endpoint and model information. + /// + /// + /// A object representing the request URI. + /// + private Uri GetRequestUri() + { + var baseUrl = HuggingFaceApiEndpoint; + + if (!string.IsNullOrEmpty(this._endpoint)) + { + baseUrl = this._endpoint; + } + else if (this._httpClient.BaseAddress?.AbsoluteUri != null) + { + baseUrl = this._httpClient.BaseAddress!.AbsoluteUri; + } + + return new Uri($"{baseUrl!.TrimEnd('/')}/{this._model}"); + } + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/TextGenerationRequest.cs b/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/TextGenerationRequest.cs new file mode 100644 index 000000000000..3fc7bbbeb2e8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/TextGenerationRequest.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.HuggingFace; + +/// +/// HTTP schema to perform completion request. +/// +public sealed class TextGenerationRequest +{ + /// + /// Prompt to complete. + /// + [JsonPropertyName("inputs")] + public string Input { get; set; } = string.Empty; + + /// + /// Enable streaming + /// + [JsonPropertyName("stream")] + public bool Stream { get; set; } = false; +} diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/TextGenerationResponse.cs b/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/TextGenerationResponse.cs new file mode 100644 index 000000000000..a2abc9d3af64 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.HuggingFace/TextGeneration/TextGenerationResponse.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.HuggingFace; + +/// +/// HTTP Schema for completion response. +/// +public sealed class TextGenerationResponse +{ + /// + /// Completed text. + /// + [JsonPropertyName("generated_text")] + public string? Text { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AssemblyInfo.cs new file mode 100644 index 000000000000..785052e3eadf --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0021")] diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs new file mode 100644 index 000000000000..fc1061171671 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs @@ -0,0 +1,207 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Memory; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +/// +/// Azure AI Search record and index definition. +/// Note: once defined, index cannot be modified. +/// +internal sealed class AzureAISearchMemoryRecord +{ + /// + /// ID field name. + /// + public const string IdField = "Id"; + /// + /// Text field name. + /// + public const string TextField = "Text"; + /// + /// Embedding field name. + /// + public const string EmbeddingField = "Embedding"; + /// + /// External source name field name. + /// + public const string ExternalSourceNameField = "ExternalSourceName"; + /// + /// Description field name. + /// + public const string DescriptionField = "Description"; + /// + /// Additional metadata field name. + /// + public const string AdditionalMetadataField = "AdditionalMetadata"; + /// + /// Is reference field name. + /// + public const string IsReferenceField = "IsReference"; + + /// + /// Record ID. + /// The record is not filterable to save quota, also SK uses only semantic search. + /// + [JsonPropertyName(IdField)] + public string Id { get; set; } = string.Empty; + + /// + /// Content is stored here. + /// + [JsonPropertyName(TextField)] + public string? Text { get; set; } = string.Empty; + + /// + /// Content embedding + /// + [JsonPropertyName(EmbeddingField)] + [JsonConverter(typeof(ReadOnlyMemoryConverter))] + public ReadOnlyMemory Embedding { get; set; } + + /// + /// Optional description of the content, e.g. a title. This can be useful when + /// indexing external data without pulling in the entire content. + /// + [JsonPropertyName(DescriptionField)] + public string? Description { get; set; } = string.Empty; + + /// + /// Additional metadata. Currently this is a string, where you could store serialized data as JSON. + /// In future the design might change to allow storing named values and leverage filters. + /// + [JsonPropertyName(AdditionalMetadataField)] + public string? AdditionalMetadata { get; set; } = string.Empty; + + /// + /// Name of the external source, in cases where the content and the ID are + /// referenced to external information. + /// + [JsonPropertyName(ExternalSourceNameField)] + public string ExternalSourceName { get; set; } = string.Empty; + + /// + /// Whether the record references external information. + /// + [JsonPropertyName(IsReferenceField)] + public bool IsReference { get; set; } = false; + + /// + /// Initializes a new instance of the class. + /// Required by JSON deserializer. + /// + public AzureAISearchMemoryRecord() + { + } + + /// + /// Initializes a new instance of the class with the specified ID. + /// + /// The record ID. + public AzureAISearchMemoryRecord(string id) + { + this.Id = EncodeId(id); + } + + /// + /// Initializes a new instance of the class with the specified parameters. + /// + /// The record ID. + /// The content stored in the record. + /// The name of the external source. + /// Whether the record references external information. + /// The content embedding. + /// The optional description of the content. + /// The additional metadata. + public AzureAISearchMemoryRecord( + string id, + string text, + string externalSourceName, + bool isReference, + ReadOnlyMemory embedding, + string? description = null, + string? additionalMetadata = null) + { + this.Id = EncodeId(id); + this.IsReference = isReference; + this.Embedding = embedding; + this.Text = text; + this.ExternalSourceName = externalSourceName; + this.Description = description; + this.AdditionalMetadata = additionalMetadata; + } + + /// + /// Converts the current instance to a object. + /// + /// A object. + public MemoryRecordMetadata ToMemoryRecordMetadata() + { + return new MemoryRecordMetadata( + isReference: this.IsReference, + id: DecodeId(this.Id), + text: this.Text ?? string.Empty, + description: this.Description ?? string.Empty, + externalSourceName: this.ExternalSourceName, + additionalMetadata: this.AdditionalMetadata ?? string.Empty); + } + + /// + /// Creates a new object from the specified . + /// + /// The object. + /// A new object. + public static AzureAISearchMemoryRecord FromMemoryRecord(MemoryRecord record) + { + return new AzureAISearchMemoryRecord( + id: record.Metadata.Id, + text: record.Metadata.Text, + externalSourceName: string.Empty, + isReference: record.Metadata.IsReference, + description: record.Metadata.Description, + additionalMetadata: record.Metadata.AdditionalMetadata, + embedding: record.Embedding + ); + } + + /// + /// Converts the current instance to a object. + /// + /// Whether to include embeddings in the resulting . + /// A object. + public MemoryRecord ToMemoryRecord(bool withEmbeddings = true) + { + return new MemoryRecord( + metadata: this.ToMemoryRecordMetadata(), + embedding: withEmbeddings ? this.Embedding : default, + key: this.Id); + } + + /// + /// Encodes the specified ID using a URL-safe algorithm. + /// Azure AI Search keys can contain only letters, digits, underscore, dash, equal sign, recommending + /// to encode values with a URL-safe algorithm. + /// + /// The original ID. + /// The encoded ID. + internal static string EncodeId(string realId) + { + var bytes = Encoding.UTF8.GetBytes(realId); + return Convert.ToBase64String(bytes); + } + + /// + /// Decodes the specified encoded ID. + /// + /// The encoded ID. + /// The decoded ID. + private static string DecodeId(string encodedId) + { + var bytes = Convert.FromBase64String(encodedId); + return Encoding.UTF8.GetString(bytes); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryStore.cs new file mode 100644 index 000000000000..b02a4ae43869 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryStore.cs @@ -0,0 +1,455 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using Azure.Search.Documents; +using Azure.Search.Documents.Indexes; +using Azure.Search.Documents.Indexes.Models; +using Azure.Search.Documents.Models; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +/// +/// is a memory store implementation using Azure AI Search. +/// +public class AzureAISearchMemoryStore : IMemoryStore +{ + /// + /// Create a new instance of memory storage using Azure AI Search. + /// + /// Azure AI Search URI, e.g. "https://contoso.search.windows.net" + /// API Key + public AzureAISearchMemoryStore(string endpoint, string apiKey) + { + AzureKeyCredential credentials = new(apiKey); + this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, GetClientOptions()); + } + + /// + /// Create a new instance of memory storage using Azure AI Search. + /// + /// Azure AI Search URI, e.g. "https://contoso.search.windows.net" + /// Azure service + public AzureAISearchMemoryStore(string endpoint, TokenCredential credentials) + { + this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, GetClientOptions()); + } + + /// + public Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default) + { + // Indexes are created when sending a record. The creation requires the size of the embeddings. + return Task.CompletedTask; + } + + /// + public IAsyncEnumerable GetCollectionsAsync(CancellationToken cancellationToken = default) + { + return RunMemoryStoreOperation(() => this.GetIndexesAsync(cancellationToken)); + } + + /// + public async Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default) + { + var normalizedIndexName = this.NormalizeIndexName(collectionName); + + var indexes = RunMemoryStoreOperation(() => this.GetIndexesAsync(cancellationToken)); + + return await indexes + .AnyAsync(index => + string.Equals(index, collectionName, StringComparison.OrdinalIgnoreCase) || + string.Equals(index, normalizedIndexName, StringComparison.OrdinalIgnoreCase), + cancellationToken: cancellationToken + ) + .ConfigureAwait(false); + } + + /// + public Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default) + { + var normalizedIndexName = this.NormalizeIndexName(collectionName); + + return RunMemoryStoreOperation(() => this._adminClient.DeleteIndexAsync(normalizedIndexName, cancellationToken)); + } + + /// + public Task UpsertAsync(string collectionName, MemoryRecord record, CancellationToken cancellationToken = default) + { + var normalizedIndexName = this.NormalizeIndexName(collectionName); + + return RunMemoryStoreOperation(() => this.UpsertRecordAsync(normalizedIndexName, AzureAISearchMemoryRecord.FromMemoryRecord(record), cancellationToken)); + } + + /// + public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var normalizedIndexName = this.NormalizeIndexName(collectionName); + + var searchRecords = records.Select(AzureAISearchMemoryRecord.FromMemoryRecord).ToList(); + + var result = await RunMemoryStoreOperation(() => this.UpsertBatchAsync(normalizedIndexName, searchRecords, cancellationToken)).ConfigureAwait(false); + + foreach (var x in result) { yield return x; } + } + + /// + public async Task GetAsync(string collectionName, string key, bool withEmbedding = false, CancellationToken cancellationToken = default) + { + var normalizedIndexName = this.NormalizeIndexName(collectionName); + var client = this.GetSearchClient(normalizedIndexName); + + var encodedId = AzureAISearchMemoryRecord.EncodeId(key); + + Response? result; + + try + { + result = await RunMemoryStoreOperation(async () => + { + return await client + .GetDocumentAsync(encodedId, cancellationToken: cancellationToken) + .ConfigureAwait(false); + }).ConfigureAwait(false); + } + catch (HttpOperationException e) when (e.StatusCode == System.Net.HttpStatusCode.NotFound) + { + // Index not found, no data to return + return null; + } + + if (result?.Value == null) + { + throw new KernelException("Memory read returned null"); + } + + return result.Value.ToMemoryRecord(); + } + + /// + public async IAsyncEnumerable GetBatchAsync( + string collectionName, + IEnumerable keys, + bool withEmbeddings = false, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (var key in keys) + { + var record = await this.GetAsync(collectionName, key, withEmbeddings, cancellationToken).ConfigureAwait(false); + if (record != null) { yield return record; } + } + } + + /// + public async Task<(MemoryRecord, double)?> GetNearestMatchAsync( + string collectionName, + ReadOnlyMemory embedding, + double minRelevanceScore = 0, + bool withEmbedding = false, + CancellationToken cancellationToken = default) + { + return await this.GetNearestMatchesAsync(collectionName, embedding, 1, minRelevanceScore, withEmbedding, cancellationToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + + /// + public async IAsyncEnumerable<(MemoryRecord, double)> GetNearestMatchesAsync( + string collectionName, + ReadOnlyMemory embedding, + int limit, + double minRelevanceScore = 0, + bool withEmbeddings = false, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + // Cosine similarity range: -1 .. +1 + minRelevanceScore = Math.Max(-1, Math.Min(1, minRelevanceScore)); + + var normalizedIndexName = this.NormalizeIndexName(collectionName); + + var client = this.GetSearchClient(normalizedIndexName); + + VectorizedQuery vectorQuery = new(MemoryMarshal.TryGetArray(embedding, out var array) && array.Count == embedding.Length ? array.Array! : embedding.ToArray()) + { + KNearestNeighborsCount = limit, + Fields = { AzureAISearchMemoryRecord.EmbeddingField }, + }; + + SearchOptions options = new() + { + VectorSearch = new() + { + Queries = { vectorQuery } + }, + }; + + Response>? searchResult = null; + try + { + searchResult = await RunMemoryStoreOperation(async () => + { + return await client + .SearchAsync(null, options, cancellationToken: cancellationToken) + .ConfigureAwait(false); + }).ConfigureAwait(false); + } + catch (HttpOperationException e) when (e.StatusCode == System.Net.HttpStatusCode.NotFound) + { + // Index not found, no data to return + } + + if (searchResult == null) { yield break; } + + var minAzureSearchScore = CosineSimilarityToScore(minRelevanceScore); + await foreach (SearchResult? doc in searchResult.Value.GetResultsAsync()) + { + if (doc == null || doc.Score < minAzureSearchScore) { continue; } + + MemoryRecord memoryRecord = doc.Document.ToMemoryRecord(withEmbeddings); + + yield return (memoryRecord, ScoreToCosineSimilarity(doc.Score ?? 0)); + } + } + + /// + public Task RemoveAsync(string collectionName, string key, CancellationToken cancellationToken = default) + { + return this.RemoveBatchAsync(collectionName, new[] { key }, cancellationToken); + } + + /// + public async Task RemoveBatchAsync(string collectionName, IEnumerable keys, CancellationToken cancellationToken = default) + { + var normalizedIndexName = this.NormalizeIndexName(collectionName); + + var records = keys.Select(x => new AzureAISearchMemoryRecord(x)); + + var client = this.GetSearchClient(normalizedIndexName); + try + { + await RunMemoryStoreOperation(() => client.DeleteDocumentsAsync(records, cancellationToken: cancellationToken)).ConfigureAwait(false); + } + catch (HttpOperationException e) when (e.StatusCode == System.Net.HttpStatusCode.NotFound) + { + // Index not found, no data to delete + } + } + + #region private + + /// + /// Index names cannot contain special chars. We use this rule to replace a few common ones + /// with an underscore and reduce the chance of errors. If other special chars are used, we leave it + /// to the service to throw an error. + /// Note: + /// - replacing chars introduces a small chance of conflicts, e.g. "the-user" and "the_user". + /// - we should consider whether making this optional and leave it to the developer to handle. + /// + private static readonly Regex s_replaceIndexNameSymbolsRegex = new(@"[\s|\\|/|.|_|:]"); + + private readonly ConcurrentDictionary _clientsByIndex = new(); + + private readonly SearchIndexClient _adminClient; + + /// + /// Create a new search index. + /// + /// Index name + /// Size of the embedding vector + /// Task cancellation token + private Task> CreateIndexAsync( + string indexName, + int embeddingSize, + CancellationToken cancellationToken = default) + { + if (embeddingSize < 1) + { + throw new ArgumentOutOfRangeException(nameof(embeddingSize), "Invalid embedding size: the value must be greater than zero."); + } + + const string ProfileName = "searchProfile"; + const string AlgorithmName = "searchAlgorithm"; + + var newIndex = new SearchIndex(indexName) + { + Fields = new List + { + new SimpleField(AzureAISearchMemoryRecord.IdField, SearchFieldDataType.String) { IsKey = true }, + new VectorSearchField(AzureAISearchMemoryRecord.EmbeddingField, embeddingSize, ProfileName), + new(AzureAISearchMemoryRecord.TextField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureAISearchMemoryRecord.DescriptionField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureAISearchMemoryRecord.AdditionalMetadataField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureAISearchMemoryRecord.ExternalSourceNameField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, + new SimpleField(AzureAISearchMemoryRecord.IsReferenceField, SearchFieldDataType.Boolean) { IsFilterable = true, IsFacetable = true }, + }, + VectorSearch = new VectorSearch + { + Algorithms = + { + new HnswAlgorithmConfiguration(AlgorithmName) + { + Parameters = new HnswParameters { Metric = VectorSearchAlgorithmMetric.Cosine } + } + }, + Profiles = { new VectorSearchProfile(ProfileName, AlgorithmName) } + } + }; + + return this._adminClient.CreateIndexAsync(newIndex, cancellationToken); + } + + private async IAsyncEnumerable GetIndexesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var indexes = this._adminClient.GetIndexesAsync(cancellationToken).ConfigureAwait(false); + await foreach (SearchIndex? index in indexes) + { + yield return index.Name; + } + } + + private async Task UpsertRecordAsync( + string indexName, + AzureAISearchMemoryRecord record, + CancellationToken cancellationToken = default) + { + var list = await this.UpsertBatchAsync(indexName, new List { record }, cancellationToken).ConfigureAwait(false); + return list.First(); + } + + private async Task> UpsertBatchAsync( + string indexName, + IList records, + CancellationToken cancellationToken = default) + { + var keys = new List(); + + if (records.Count < 1) { return keys; } + + var embeddingSize = records[0].Embedding.Length; + + var client = this.GetSearchClient(indexName); + + Task> UpsertCode() + { + return client.IndexDocumentsAsync( + IndexDocumentsBatch.Upload(records), + new IndexDocumentsOptions { ThrowOnAnyError = true }, + cancellationToken: cancellationToken); + } + + Response? result; + try + { + result = await UpsertCode().ConfigureAwait(false); + } + catch (RequestFailedException e) when (e.Status == 404) + { + await this.CreateIndexAsync(indexName, embeddingSize, cancellationToken).ConfigureAwait(false); + result = await UpsertCode().ConfigureAwait(false); + } + + if (result == null || result.Value.Results.Count == 0) + { + throw new KernelException("Memory write returned null or an empty set"); + } + + return result.Value.Results.Select(x => x.Key).ToList(); + } + + /// + /// Normalize index name to match Azure AI Search rules. + /// The method doesn't handle all the error scenarios, leaving it to the service + /// to throw an error for edge cases not handled locally. + /// + /// Value to normalize + /// The name of the argument used with . + /// Normalized name + private string NormalizeIndexName(string indexName, [CallerArgumentExpression("indexName")] string? parameterName = null) + { + if (indexName.Length > 128) + { + throw new ArgumentOutOfRangeException(parameterName, "The collection name is too long, it cannot exceed 128 chars."); + } + +#pragma warning disable CA1308 // The service expects a lowercase string + indexName = indexName.ToLowerInvariant(); +#pragma warning restore CA1308 + + return s_replaceIndexNameSymbolsRegex.Replace(indexName.Trim(), "-"); + } + + /// + /// Get a search client for the index specified. + /// Note: the index might not exist, but we avoid checking everytime and the extra latency. + /// + /// Index name + /// Search client ready to read/write + private SearchClient GetSearchClient(string indexName) + { + // Search an available client from the local cache + if (!this._clientsByIndex.TryGetValue(indexName, out SearchClient? client)) + { + client = this._adminClient.GetSearchClient(indexName); + this._clientsByIndex[indexName] = client; + } + + return client; + } + + /// + /// Options used by the Azure AI Search client, e.g. User Agent. + /// See also https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/src/DiagnosticsOptions.cs + /// + private static SearchClientOptions GetClientOptions() + { + return new SearchClientOptions + { + Diagnostics = + { + ApplicationId = HttpHeaderValues.UserAgent, + }, + }; + } + + /// + /// Executes a memory store operation by invoking the provided operation delegate. + /// + /// The return type of the operation. + /// The operation delegate to be executed. + /// The result of the memory store operation. + private static T RunMemoryStoreOperation(Func operation) + { + try + { + return operation.Invoke(); + } + catch (RequestFailedException e) + { + throw e.ToHttpOperationException(); + } + } + + private static double ScoreToCosineSimilarity(double score) + { + // Azure AI Search score formula. The min value is 0.333 for cosine similarity -1. + score = Math.Max(score, 1.0 / 3); + return 2 - 1 / score; + } + + private static double CosineSimilarityToScore(double similarity) + { + return 1 / (2 - similarity); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj new file mode 100644 index 000000000000..51a5ebac723d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/Connectors.Memory.AzureAISearch.csproj @@ -0,0 +1,31 @@ + + + + Microsoft.SemanticKernel.Connectors.AzureAISearch + Microsoft.SemanticKernel.Connectors.AzureAISearch + netstandard2.0 + alpha + + NU5104 + + + + + + + + + Microsoft.SemanticKernel.Connectors.AzureAISearch + Semantic Kernel - Azure AI Search Semantic Memory + Azure AI Search Semantic Memory connector for Semantic Kernel + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/RequestFailedExceptionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/RequestFailedExceptionExtensions.cs new file mode 100644 index 000000000000..5d8a6b7a7316 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/RequestFailedExceptionExtensions.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using Azure; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +/// +/// Provides extension methods for the class. +/// +internal static class RequestFailedExceptionExtensions +{ + /// + /// Converts a to an . + /// + /// The original . + /// An instance. + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "By design. See comment below.")] + public static HttpOperationException ToHttpOperationException(this RequestFailedException exception) + { + const int NoResponseReceived = 0; + + string? responseContent = null; + + try + { + responseContent = exception.GetRawResponse()?.Content?.ToString(); + } + catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead. + + return new HttpOperationException( + exception.Status == NoResponseReceived ? null : (HttpStatusCode?)exception.Status, + responseContent, + exception.Message, + exception); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryRecord.cs deleted file mode 100644 index 2516e158d052..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryRecord.cs +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text; -using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; - -/// -/// Azure Cognitive Search record and index definition. -/// Note: once defined, index cannot be modified. -/// -public class AzureCognitiveSearchMemoryRecord -{ - /// - /// ID field name. - /// - public const string IdField = "Id"; - /// - /// Text field name. - /// - public const string TextField = "Text"; - /// - /// Embedding field name. - /// - public const string EmbeddingField = "Embedding"; - /// - /// External source name field name. - /// - public const string ExternalSourceNameField = "ExternalSourceName"; - /// - /// Description field name. - /// - public const string DescriptionField = "Description"; - /// - /// Additional metadata field name. - /// - public const string AdditionalMetadataField = "AdditionalMetadata"; - /// - /// Is reference field name. - /// - public const string IsReferenceField = "IsReference"; - - /// - /// Record ID. - /// The record is not filterable to save quota, also SK uses only semantic search. - /// - [JsonPropertyName(IdField)] - public string Id { get; set; } = string.Empty; - - /// - /// Content is stored here. - /// - [JsonPropertyName(TextField)] - public string? Text { get; set; } = string.Empty; - - /// - /// Content embedding - /// - [JsonPropertyName(EmbeddingField)] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] - public ReadOnlyMemory Embedding { get; set; } - - /// - /// Optional description of the content, e.g. a title. This can be useful when - /// indexing external data without pulling in the entire content. - /// - [JsonPropertyName(DescriptionField)] - public string? Description { get; set; } = string.Empty; - - /// - /// Additional metadata. Currently this is a string, where you could store serialized data as JSON. - /// In future the design might change to allow storing named values and leverage filters. - /// - [JsonPropertyName(AdditionalMetadataField)] - public string? AdditionalMetadata { get; set; } = string.Empty; - - /// - /// Name of the external source, in cases where the content and the ID are - /// referenced to external information. - /// - [JsonPropertyName(ExternalSourceNameField)] - public string ExternalSourceName { get; set; } = string.Empty; - - /// - /// Whether the record references external information. - /// - [JsonPropertyName(IsReferenceField)] - public bool IsReference { get; set; } = false; - - /// - /// Initializes a new instance of the class. - /// Required by JSON deserializer. - /// - public AzureCognitiveSearchMemoryRecord() - { - } - - /// - /// Initializes a new instance of the class with the specified ID. - /// - /// The record ID. - public AzureCognitiveSearchMemoryRecord(string id) - { - this.Id = EncodeId(id); - } - - /// - /// Initializes a new instance of the class with the specified parameters. - /// - /// The record ID. - /// The content stored in the record. - /// The name of the external source. - /// Whether the record references external information. - /// The content embedding. - /// The optional description of the content. - /// The additional metadata. - public AzureCognitiveSearchMemoryRecord( - string id, - string text, - string externalSourceName, - bool isReference, - ReadOnlyMemory embedding, - string? description = null, - string? additionalMetadata = null) - { - this.Id = EncodeId(id); - this.IsReference = isReference; - this.Embedding = embedding; - this.Text = text; - this.ExternalSourceName = externalSourceName; - this.Description = description; - this.AdditionalMetadata = additionalMetadata; - } - - /// - /// Converts the current instance to a object. - /// - /// A object. - public MemoryRecordMetadata ToMemoryRecordMetadata() - { - return new MemoryRecordMetadata( - isReference: this.IsReference, - id: DecodeId(this.Id), - text: this.Text ?? string.Empty, - description: this.Description ?? string.Empty, - externalSourceName: this.ExternalSourceName, - additionalMetadata: this.AdditionalMetadata ?? string.Empty); - } - - /// - /// Creates a new object from the specified . - /// - /// The object. - /// A new object. - public static AzureCognitiveSearchMemoryRecord FromMemoryRecord(MemoryRecord record) - { - return new AzureCognitiveSearchMemoryRecord( - id: record.Metadata.Id, - text: record.Metadata.Text, - externalSourceName: string.Empty, - isReference: record.Metadata.IsReference, - description: record.Metadata.Description, - additionalMetadata: record.Metadata.AdditionalMetadata, - embedding: record.Embedding - ); - } - - /// - /// Converts the current instance to a object. - /// - /// Whether to include embeddings in the resulting . - /// A object. - public MemoryRecord ToMemoryRecord(bool withEmbeddings = true) - { - return new MemoryRecord( - metadata: this.ToMemoryRecordMetadata(), - embedding: withEmbeddings ? this.Embedding : default, - key: this.Id); - } - - /// - /// Encodes the specified ID using a URL-safe algorithm. - /// ACS keys can contain only letters, digits, underscore, dash, equal sign, recommending - /// to encode values with a URL-safe algorithm. - /// - /// The original ID. - /// The encoded ID. - protected internal static string EncodeId(string realId) - { - var bytes = Encoding.UTF8.GetBytes(realId); - return Convert.ToBase64String(bytes); - } - - /// - /// Decodes the specified encoded ID. - /// - /// The encoded ID. - /// The decoded ID. - private protected static string DecodeId(string encodedId) - { - var bytes = Convert.FromBase64String(encodedId); - return Encoding.UTF8.GetString(bytes); - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryStore.cs deleted file mode 100644 index 2f379e109966..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/AzureCognitiveSearchMemoryStore.cs +++ /dev/null @@ -1,458 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; -using System.Text.RegularExpressions; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.Core; -using Azure.Search.Documents; -using Azure.Search.Documents.Indexes; -using Azure.Search.Documents.Indexes.Models; -using Azure.Search.Documents.Models; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Memory; - -namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; - -/// -/// AzureCognitiveSearchMemoryStore is a memory store implementation using Azure Cognitive Search. -/// -public class AzureCognitiveSearchMemoryStore : IMemoryStore -{ - /// - /// Create a new instance of memory storage using Azure Cognitive Search. - /// - /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" - /// API Key - public AzureCognitiveSearchMemoryStore(string endpoint, string apiKey) - { - AzureKeyCredential credentials = new(apiKey); - this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, GetClientOptions()); - } - - /// - /// Create a new instance of memory storage using Azure Cognitive Search. - /// - /// Azure Cognitive Search URI, e.g. "https://contoso.search.windows.net" - /// Azure service - public AzureCognitiveSearchMemoryStore(string endpoint, TokenCredential credentials) - { - this._adminClient = new SearchIndexClient(new Uri(endpoint), credentials, GetClientOptions()); - } - - /// - public Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default) - { - // Indexes are created when sending a record. The creation requires the size of the embeddings. - return Task.CompletedTask; - } - - /// - public IAsyncEnumerable GetCollectionsAsync(CancellationToken cancellationToken = default) - { - return RunMemoryStoreOperation(() => this.GetIndexesAsync(cancellationToken)); - } - - /// - public async Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default) - { - var normalizedIndexName = this.NormalizeIndexName(collectionName); - - var indexes = RunMemoryStoreOperation(() => this.GetIndexesAsync(cancellationToken)); - - return await indexes - .AnyAsync(index => - string.Equals(index, collectionName, StringComparison.OrdinalIgnoreCase) || - string.Equals(index, normalizedIndexName, StringComparison.OrdinalIgnoreCase), - cancellationToken: cancellationToken - ) - .ConfigureAwait(false); - } - - /// - public Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default) - { - var normalizedIndexName = this.NormalizeIndexName(collectionName); - - return RunMemoryStoreOperation(() => this._adminClient.DeleteIndexAsync(normalizedIndexName, cancellationToken)); - } - - /// - public Task UpsertAsync(string collectionName, MemoryRecord record, CancellationToken cancellationToken = default) - { - var normalizedIndexName = this.NormalizeIndexName(collectionName); - - return RunMemoryStoreOperation(() => this.UpsertRecordAsync(normalizedIndexName, AzureCognitiveSearchMemoryRecord.FromMemoryRecord(record), cancellationToken)); - } - - /// - public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IEnumerable records, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var normalizedIndexName = this.NormalizeIndexName(collectionName); - - var searchRecords = records.Select(AzureCognitiveSearchMemoryRecord.FromMemoryRecord).ToList(); - - var result = await RunMemoryStoreOperation(() => this.UpsertBatchAsync(normalizedIndexName, searchRecords, cancellationToken)).ConfigureAwait(false); - - foreach (var x in result) { yield return x; } - } - - /// - public async Task GetAsync(string collectionName, string key, bool withEmbedding = false, CancellationToken cancellationToken = default) - { - var normalizedIndexName = this.NormalizeIndexName(collectionName); - var client = this.GetSearchClient(normalizedIndexName); - - var encodedId = AzureCognitiveSearchMemoryRecord.EncodeId(key); - - Response? result; - - try - { - result = await RunMemoryStoreOperation(async () => - { - return await client - .GetDocumentAsync(encodedId, cancellationToken: cancellationToken) - .ConfigureAwait(false); - }).ConfigureAwait(false); - } - catch (HttpOperationException e) when (e.StatusCode == System.Net.HttpStatusCode.NotFound) - { - // Index not found, no data to return - return null; - } - - if (result?.Value == null) - { - throw new SKException("Memory read returned null"); - } - - return result.Value.ToMemoryRecord(); - } - - /// - public async IAsyncEnumerable GetBatchAsync( - string collectionName, - IEnumerable keys, - bool withEmbeddings = false, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - foreach (var key in keys) - { - var record = await this.GetAsync(collectionName, key, withEmbeddings, cancellationToken).ConfigureAwait(false); - if (record != null) { yield return record; } - } - } - - /// - public async Task<(MemoryRecord, double)?> GetNearestMatchAsync( - string collectionName, - ReadOnlyMemory embedding, - double minRelevanceScore = 0, - bool withEmbedding = false, - CancellationToken cancellationToken = default) - { - return await this.GetNearestMatchesAsync(collectionName, embedding, 1, minRelevanceScore, withEmbedding, cancellationToken) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - } - - /// - public async IAsyncEnumerable<(MemoryRecord, double)> GetNearestMatchesAsync( - string collectionName, - ReadOnlyMemory embedding, - int limit, - double minRelevanceScore = 0, - bool withEmbeddings = false, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - // Cosine similarity range: -1 .. +1 - minRelevanceScore = Math.Max(-1, Math.Min(1, minRelevanceScore)); - - var normalizedIndexName = this.NormalizeIndexName(collectionName); - - var client = this.GetSearchClient(normalizedIndexName); - - RawVectorQuery vectorQuery = new() - { - KNearestNeighborsCount = limit, - Fields = { AzureCognitiveSearchMemoryRecord.EmbeddingField }, - Vector = MemoryMarshal.TryGetArray(embedding, out var array) && array.Count == embedding.Length ? array.Array! : embedding.ToArray(), - }; - - SearchOptions options = new() - { - VectorQueries = { vectorQuery } - }; - - Response>? searchResult = null; - try - { - searchResult = await RunMemoryStoreOperation(async () => - { - return await client - .SearchAsync(null, options, cancellationToken: cancellationToken) - .ConfigureAwait(false); - }).ConfigureAwait(false); - } - catch (HttpOperationException e) when (e.StatusCode == System.Net.HttpStatusCode.NotFound) - { - // Index not found, no data to return - } - - if (searchResult == null) { yield break; } - - var minAzureSearchScore = CosineSimilarityToScore(minRelevanceScore); - await foreach (SearchResult? doc in searchResult.Value.GetResultsAsync()) - { - if (doc == null || doc.Score < minAzureSearchScore) { continue; } - - MemoryRecord memoryRecord = doc.Document.ToMemoryRecord(withEmbeddings); - - yield return (memoryRecord, ScoreToCosineSimilarity(doc.Score ?? 0)); - } - } - - /// - public Task RemoveAsync(string collectionName, string key, CancellationToken cancellationToken = default) - { - return this.RemoveBatchAsync(collectionName, new[] { key }, cancellationToken); - } - - /// - public async Task RemoveBatchAsync(string collectionName, IEnumerable keys, CancellationToken cancellationToken = default) - { - var normalizedIndexName = this.NormalizeIndexName(collectionName); - - var records = keys.Select(x => new AzureCognitiveSearchMemoryRecord(x)); - - var client = this.GetSearchClient(normalizedIndexName); - try - { - await RunMemoryStoreOperation(() => client.DeleteDocumentsAsync(records, cancellationToken: cancellationToken)).ConfigureAwait(false); - } - catch (HttpOperationException e) when (e.StatusCode == System.Net.HttpStatusCode.NotFound) - { - // Index not found, no data to delete - } - } - - #region private - - /// - /// Index names cannot contain special chars. We use this rule to replace a few common ones - /// with an underscore and reduce the chance of errors. If other special chars are used, we leave it - /// to the service to throw an error. - /// Note: - /// - replacing chars introduces a small chance of conflicts, e.g. "the-user" and "the_user". - /// - we should consider whether making this optional and leave it to the developer to handle. - /// - private static readonly Regex s_replaceIndexNameSymbolsRegex = new(@"[\s|\\|/|.|_|:]"); - - private readonly ConcurrentDictionary _clientsByIndex = new(); - - private readonly SearchIndexClient _adminClient; - - /// - /// Create a new search index. - /// - /// Index name - /// Size of the embedding vector - /// Task cancellation token - private Task> CreateIndexAsync( - string indexName, - int embeddingSize, - CancellationToken cancellationToken = default) - { - if (embeddingSize < 1) - { - throw new SKException("Invalid embedding size: the value must be greater than zero."); - } - - const string ProfileName = "searchProfile"; - const string AlgorithmName = "searchAlgorithm"; - - var newIndex = new SearchIndex(indexName) - { - Fields = new List - { - new SimpleField(AzureCognitiveSearchMemoryRecord.IdField, SearchFieldDataType.String) { IsKey = true }, - new SearchField(AzureCognitiveSearchMemoryRecord.EmbeddingField, SearchFieldDataType.Collection(SearchFieldDataType.Single)) - { - IsSearchable = true, - VectorSearchDimensions = embeddingSize, - VectorSearchProfile = ProfileName - }, - new SearchField(AzureCognitiveSearchMemoryRecord.TextField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, - new SimpleField(AzureCognitiveSearchMemoryRecord.DescriptionField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, - new SimpleField(AzureCognitiveSearchMemoryRecord.AdditionalMetadataField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, - new SimpleField(AzureCognitiveSearchMemoryRecord.ExternalSourceNameField, SearchFieldDataType.String) { IsFilterable = true, IsFacetable = true }, - new SimpleField(AzureCognitiveSearchMemoryRecord.IsReferenceField, SearchFieldDataType.Boolean) { IsFilterable = true, IsFacetable = true }, - }, - VectorSearch = new VectorSearch - { - Algorithms = - { - new HnswVectorSearchAlgorithmConfiguration(AlgorithmName) - { - Parameters = new HnswParameters { Metric = VectorSearchAlgorithmMetric.Cosine } - } - }, - Profiles = { new VectorSearchProfile(ProfileName, AlgorithmName) } - } - }; - - return this._adminClient.CreateIndexAsync(newIndex, cancellationToken); - } - - private async IAsyncEnumerable GetIndexesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var indexes = this._adminClient.GetIndexesAsync(cancellationToken).ConfigureAwait(false); - await foreach (SearchIndex? index in indexes) - { - yield return index.Name; - } - } - - private async Task UpsertRecordAsync( - string indexName, - AzureCognitiveSearchMemoryRecord record, - CancellationToken cancellationToken = default) - { - var list = await this.UpsertBatchAsync(indexName, new List { record }, cancellationToken).ConfigureAwait(false); - return list.First(); - } - - private async Task> UpsertBatchAsync( - string indexName, - IList records, - CancellationToken cancellationToken = default) - { - var keys = new List(); - - if (records.Count < 1) { return keys; } - - var embeddingSize = records[0].Embedding.Length; - - var client = this.GetSearchClient(indexName); - - Task> UpsertCode() - { - return client.IndexDocumentsAsync( - IndexDocumentsBatch.Upload(records), - new IndexDocumentsOptions { ThrowOnAnyError = true }, - cancellationToken: cancellationToken); - } - - Response? result; - try - { - result = await UpsertCode().ConfigureAwait(false); - } - catch (RequestFailedException e) when (e.Status == 404) - { - await this.CreateIndexAsync(indexName, embeddingSize, cancellationToken).ConfigureAwait(false); - result = await UpsertCode().ConfigureAwait(false); - } - - if (result == null || result.Value.Results.Count == 0) - { - throw new SKException("Memory write returned null or an empty set"); - } - - return result.Value.Results.Select(x => x.Key).ToList(); - } - - /// - /// Normalize index name to match ACS rules. - /// The method doesn't handle all the error scenarios, leaving it to the service - /// to throw an error for edge cases not handled locally. - /// - /// Value to normalize - /// Normalized name - private string NormalizeIndexName(string indexName) - { - if (indexName.Length > 128) - { - throw new SKException("The collection name is too long, it cannot exceed 128 chars."); - } - -#pragma warning disable CA1308 // The service expects a lowercase string - indexName = indexName.ToLowerInvariant(); -#pragma warning restore CA1308 - - return s_replaceIndexNameSymbolsRegex.Replace(indexName.Trim(), "-"); - } - - /// - /// Get a search client for the index specified. - /// Note: the index might not exist, but we avoid checking everytime and the extra latency. - /// - /// Index name - /// Search client ready to read/write - private SearchClient GetSearchClient(string indexName) - { - // Search an available client from the local cache - if (!this._clientsByIndex.TryGetValue(indexName, out SearchClient client)) - { - client = this._adminClient.GetSearchClient(indexName); - this._clientsByIndex[indexName] = client; - } - - return client; - } - - /// - /// Options used by the Azure Cognitive Search client, e.g. User Agent. - /// See also https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/src/DiagnosticsOptions.cs - /// - private static SearchClientOptions GetClientOptions() - { - return new SearchClientOptions - { - Diagnostics = - { - IsTelemetryEnabled = Telemetry.IsTelemetryEnabled, - ApplicationId = Telemetry.HttpUserAgent, - }, - }; - } - - /// - /// Executes a memory store operation by invoking the provided operation delegate. - /// - /// The return type of the operation. - /// The operation delegate to be executed. - /// The result of the memory store operation. - private static T RunMemoryStoreOperation(Func operation) - { - try - { - return operation.Invoke(); - } - catch (RequestFailedException e) - { - throw e.ToHttpOperationException(); - } - } - - private static double ScoreToCosineSimilarity(double score) - { - // Azure Cognitive Search score formula. The min value is 0.333 for cosine similarity -1. - score = Math.Max(score, 1.0 / 3); - return 2 - 1 / score; - } - - private static double CosineSimilarityToScore(double similarity) - { - return 1 / (2 - similarity); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/Connectors.Memory.AzureCognitiveSearch.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/Connectors.Memory.AzureCognitiveSearch.csproj deleted file mode 100644 index 2c5fd3b10aab..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/Connectors.Memory.AzureCognitiveSearch.csproj +++ /dev/null @@ -1,31 +0,0 @@ - - - - Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch - Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch - netstandard2.0 - - - NU5104 - - - - - - - - - Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch - Semantic Kernel - Azure Cognitive Search Semantic Memory - Azure Cognitive Search Semantic Memory connector for Semantic Kernel - - - - - - - - - - - diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/RequestFailedExceptionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/RequestFailedExceptionExtensions.cs deleted file mode 100644 index 6b1d3d99d598..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCognitiveSearch/RequestFailedExceptionExtensions.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net; -using Azure; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch; - -/// -/// Provides extension methods for the class. -/// -public static class RequestFailedExceptionExtensions -{ - /// - /// Converts a to an . - /// - /// The original . - /// An instance. - [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1031:Do not catch general exception types", Justification = "By design. See comment below.")] - public static HttpOperationException ToHttpOperationException(this RequestFailedException exception) - { - const int NoResponseReceived = 0; - - string? responseContent = null; - - try - { - responseContent = exception.GetRawResponse()?.Content?.ToString(); - } - catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead. - - return new HttpOperationException( - exception.Status == NoResponseReceived ? null : (HttpStatusCode?)exception.Status, - responseContent, - exception.Message, - exception); - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/AssemblyInfo.cs new file mode 100644 index 000000000000..e0a4e38daa74 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0022")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClient.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClient.cs index 63fdd8ef0b8e..ec85b9b0771e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaClient.cs @@ -10,11 +10,9 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma; +namespace Microsoft.SemanticKernel.Connectors.Chroma; /// /// An implementation of a client for the Chroma Vector DB. This class is used to @@ -31,9 +29,11 @@ public class ChromaClient : IChromaClient /// The to use for logging. If null, no logging will be performed. public ChromaClient(string endpoint, ILoggerFactory? loggerFactory = null) { - this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); + Verify.NotNull(endpoint); + + this._httpClient = HttpClientProvider.GetHttpClient(); this._endpoint = endpoint; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(ChromaClient)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(ChromaClient)) ?? NullLogger.Instance; } /// @@ -42,17 +42,17 @@ public ChromaClient(string endpoint, ILoggerFactory? loggerFactory = null) /// The instance used for making HTTP requests. /// Chroma server endpoint URL. /// The to use for logging. If null, no logging will be performed. - /// Occurs when doesn't have base address and endpoint parameter is not provided. + /// Occurs when doesn't have base address and endpoint parameter is not provided. public ChromaClient(HttpClient httpClient, string? endpoint = null, ILoggerFactory? loggerFactory = null) { if (string.IsNullOrEmpty(httpClient.BaseAddress?.AbsoluteUri) && string.IsNullOrEmpty(endpoint)) { - throw new SKException("The HttpClient BaseAddress and endpoint are both null or empty. Please ensure at least one is provided."); + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); } this._httpClient = httpClient; this._endpoint = endpoint; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(ChromaClient)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(ChromaClient)) ?? NullLogger.Instance; } /// @@ -166,10 +166,10 @@ public async Task QueryEmbeddingsAsync(string collection HttpRequestMessage request, CancellationToken cancellationToken = default) { - string endpoint = this._endpoint ?? this._httpClient.BaseAddress.ToString(); + string endpoint = this._endpoint ?? this._httpClient.BaseAddress!.ToString(); endpoint = this.SanitizeEndpoint(endpoint); - string operationName = request.RequestUri.ToString(); + string operationName = request.RequestUri!.ToString(); request.RequestUri = new Uri(new Uri(endpoint), operationName); diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaKernelBuilderExtensions.cs deleted file mode 100644 index 8366997c1ca0..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaKernelBuilderExtensions.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Net.Http; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure Chroma memory connector. -/// -[Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use ChromaMemoryBuilderExtensions instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class ChromaKernelBuilderExtensions -{ - /// - /// Registers Chroma memory connector. - /// - /// The instance. - /// Chroma server endpoint URL. - /// Self instance. - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use ChromaMemoryBuilderExtensions.WithChromaMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithChromaMemoryStore(this KernelBuilder builder, string endpoint) - { - builder.WithMemoryStorage((loggerFactory, httpHandlerFactory) => - { - return new ChromaMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, null, loggerFactory), - endpoint, - loggerFactory); - }); - - return builder; - } - - /// - /// Registers Chroma memory connector. - /// - /// The instance. - /// The instance used for making HTTP requests. - /// Chroma server endpoint URL. If not specified, the base address of the HTTP client is used. - /// Self instance. - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use ChromaMemoryBuilderExtensions.WithChromaMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithChromaMemoryStore(this KernelBuilder builder, - HttpClient httpClient, - string? endpoint = null) - { - builder.WithMemoryStorage((loggerFactory, httpHandlerFactory) => - { - return new ChromaMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - endpoint, - loggerFactory); - }); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryBuilderExtensions.cs index cbe98e0a6748..67476dd72f19 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryBuilderExtensions.cs @@ -1,9 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma; +namespace Microsoft.SemanticKernel.Connectors.Chroma; /// /// Provides extension methods for the class to configure Chroma memory connector. @@ -18,10 +19,10 @@ public static class ChromaMemoryBuilderExtensions /// Updated Memory builder including Chroma memory connector. public static MemoryBuilder WithChromaMemoryStore(this MemoryBuilder builder, string endpoint) { - builder.WithMemoryStore((loggerFactory, httpHandlerFactory) => + builder.WithMemoryStore((loggerFactory, injectedClient) => { return new ChromaMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, null, loggerFactory), + HttpClientProvider.GetHttpClient(injectedClient), endpoint, loggerFactory); }); @@ -41,10 +42,10 @@ public static MemoryBuilder WithChromaMemoryStore( HttpClient httpClient, string? endpoint = null) { - builder.WithMemoryStore((loggerFactory, httpHandlerFactory) => + builder.WithMemoryStore((loggerFactory, injectedClient) => { return new ChromaMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), + HttpClientProvider.GetHttpClient(httpClient ?? injectedClient), endpoint, loggerFactory); }); diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs index 28fa6cc6a063..6dec81adbaec 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/ChromaMemoryStore.cs @@ -11,12 +11,10 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma; +namespace Microsoft.SemanticKernel.Connectors.Chroma; /// /// An implementation of for Chroma. @@ -52,7 +50,7 @@ public ChromaMemoryStore(HttpClient httpClient, string? endpoint = null, ILogger public ChromaMemoryStore(IChromaClient client, ILoggerFactory? loggerFactory = null) { this._chromaClient = client; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(ChromaMemoryStore)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(ChromaMemoryStore)) ?? NullLogger.Instance; } /// @@ -75,7 +73,7 @@ public async Task DeleteCollectionAsync(string collectionName, CancellationToken catch (HttpOperationException e) when (VerifyCollectionDoesNotExistMessage(e.ResponseContent, collectionName)) { this._logger.LogError("Cannot delete non-existent collection {0}", collectionName); - throw new SKException($"Cannot delete non-existent collection {collectionName}", e); + throw new KernelException($"Cannot delete non-existent collection {collectionName}", e); } } @@ -232,16 +230,11 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE private readonly IChromaClient _chromaClient; private readonly List _defaultEmbeddingIncludeTypes = new() { IncludeMetadatas }; - private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() - { - Converters = { new ReadOnlyMemoryConverter() } - }; - private async Task GetCollectionOrThrowAsync(string collectionName, CancellationToken cancellationToken) { return await this.GetCollectionAsync(collectionName, cancellationToken).ConfigureAwait(false) ?? - throw new SKException($"Collection {collectionName} does not exist"); + throw new KernelException($"Collection {collectionName} does not exist"); } private async Task GetCollectionAsync(string collectionName, CancellationToken cancellationToken) @@ -307,11 +300,11 @@ private MemoryRecord GetMemoryRecordFromModel(List>? private MemoryRecordMetadata GetMetadataForMemoryRecord(List>? metadatas, int recordIndex) { - var serializedMetadata = metadatas != null ? JsonSerializer.Serialize(metadatas[recordIndex], s_jsonSerializerOptions) : string.Empty; + var serializedMetadata = metadatas != null ? JsonSerializer.Serialize(metadatas[recordIndex], JsonOptionsCache.Default) : string.Empty; return - JsonSerializer.Deserialize(serializedMetadata, ChromaMemoryStore.s_jsonSerializerOptions) ?? - throw new SKException("Unable to deserialize memory record metadata."); + JsonSerializer.Deserialize(serializedMetadata, JsonOptionsCache.Default) ?? + throw new KernelException("Unable to deserialize memory record metadata."); } private ReadOnlyMemory GetEmbeddingForMemoryRecord(List? embeddings, int recordIndex) diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj b/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj index 4af82ec792a7..124a54fbbf8b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Connectors.Memory.Chroma.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.Chroma + Microsoft.SemanticKernel.Connectors.Chroma $(AssemblyName) netstandard2.0 + alpha @@ -22,7 +23,6 @@ - diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaCollectionModel.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaCollectionModel.cs index 1106154e937e..40a8b1f4c1e2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaCollectionModel.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaCollectionModel.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Chroma; /// /// Chroma collection model. diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaEmbeddingsModel.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaEmbeddingsModel.cs index 6b91281c4320..16232e8e5ed7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaEmbeddingsModel.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaEmbeddingsModel.cs @@ -3,7 +3,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Chroma; /// /// Chroma embeddings model. diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaQueryResultModel.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaQueryResultModel.cs index ef10885fbd1f..bdbf8d6b7906 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaQueryResultModel.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/ChromaQueryResultModel.cs @@ -3,7 +3,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Chroma; /// /// Chroma query result model. Contains result sets after search operation. diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/CreateCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/CreateCollectionRequest.cs index f3bb44acf110..40cf511ef629 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/CreateCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/CreateCollectionRequest.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class CreateCollectionRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteCollectionRequest.cs index fee2ee0d9cdd..4e8ed2066aaf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteCollectionRequest.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class DeleteCollectionRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteEmbeddingsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteEmbeddingsRequest.cs index 3d4796968349..4d2a20a1988d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteEmbeddingsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/DeleteEmbeddingsRequest.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class DeleteEmbeddingsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetCollectionRequest.cs index 6d22969a880a..08f9bb6d9482 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetCollectionRequest.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class GetCollectionRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetEmbeddingsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetEmbeddingsRequest.cs index 2f83c0395be4..db1437ff5781 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetEmbeddingsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/GetEmbeddingsRequest.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class GetEmbeddingsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/ListCollectionsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/ListCollectionsRequest.cs index 302811df89c8..55275b910e8a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/ListCollectionsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/ListCollectionsRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class ListCollectionsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/QueryEmbeddingsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/QueryEmbeddingsRequest.cs index d350d6a14a8f..64e0cec50056 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/QueryEmbeddingsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/QueryEmbeddingsRequest.cs @@ -4,7 +4,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class QueryEmbeddingsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/UpsertEmbeddingsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/UpsertEmbeddingsRequest.cs index 354c808f7bd0..cba49003d7af 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/UpsertEmbeddingsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/Http/ApiSchema/Internal/UpsertEmbeddingsRequest.cs @@ -4,7 +4,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema.Internal; +namespace Microsoft.SemanticKernel.Connectors.Chroma; internal sealed class UpsertEmbeddingsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/IChromaClient.cs b/dotnet/src/Connectors/Connectors.Memory.Chroma/IChromaClient.cs index e650c0b1388d..fbecb112acf2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/IChromaClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/IChromaClient.cs @@ -4,9 +4,8 @@ using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema; -namespace Microsoft.SemanticKernel.Connectors.Memory.Chroma; +namespace Microsoft.SemanticKernel.Connectors.Chroma; /// /// Interface for client to make requests to Chroma API. diff --git a/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md b/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md index a505f2001de8..2d6e09fbca90 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Chroma/README.md @@ -1,4 +1,4 @@ -# Microsoft.SemanticKernel.Connectors.Memory.Chroma +# Microsoft.SemanticKernel.Connectors.Chroma This assembly contains implementation of Semantic Kernel Memory Store using [Chroma](https://docs.trychroma.com/), open-source embedding database. @@ -20,15 +20,17 @@ docker-compose up -d --build ``` 3. Use Semantic Kernel with Chroma, using server local endpoint `http://localhost:8000`: + + > See [Example 14](../../../samples/KernelSyntaxExamples/Example14_SemanticMemory.cs) and [Example 15](../../../samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs) for more memory usage examples with the kernel. + ```csharp const string endpoint = "http://localhost:8000"; -ChromaMemoryStore memoryStore = new(endpoint); - -IKernel kernel = new KernelBuilder() - .WithLogger(logger) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", "OPENAI_API_KEY") - .WithMemoryStorage(memoryStore) - //.WithChromaMemoryStore(endpoint) // This method offers an alternative approach to registering Chroma memory store. +var memoryWithChroma = new MemoryBuilder() + .WithChromaMemoryStore(endpoint) + .WithLoggerFactory(loggerFactory) + .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", apiKey) .Build(); + +var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(memoryWithChroma)); ``` diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.DuckDB/AssemblyInfo.cs new file mode 100644 index 000000000000..aa5b79d50445 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0023")] diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj index 6b94fef2bb35..06f016cb01a6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Connectors.Memory.DuckDB.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.DuckDB + Microsoft.SemanticKernel.Connectors.DuckDB $(AssemblyName) netstandard2.0 + alpha diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs index 0883f4f2e3f9..f06a979d55c2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs @@ -9,7 +9,7 @@ using System.Threading.Tasks; using DuckDB.NET.Data; -namespace Microsoft.SemanticKernel.Connectors.Memory.DuckDB; +namespace Microsoft.SemanticKernel.Connectors.DuckDB; internal struct DatabaseEntry { @@ -17,7 +17,7 @@ internal struct DatabaseEntry public string MetadataString { get; set; } - public string EmbeddingString { get; set; } + public float[] Embedding { get; set; } public string? Timestamp { get; set; } @@ -28,19 +28,7 @@ internal sealed class Database { private const string TableName = "SKMemoryTable"; - public Task CreateFunctionsAsync(DuckDBConnection conn, CancellationToken cancellationToken) - { - using var cmd = conn.CreateCommand(); - cmd.CommandText = @" - CREATE OR REPLACE MACRO cosine_similarity(a,b) AS (select sum (xy) from (select x * y as xy from (select UNNEST(a) as x, UNNEST(b) as y))) / sqrt(list_aggregate(list_transform(a, x -> x * x), 'sum') * list_aggregate(list_transform(b, x -> x * x), 'sum')); - CREATE OR REPLACE MACRO split_string_of_numbers(t) AS regexp_extract_all(regexp_replace(t,'(\[|\])', '', 'g'), '([+-]?([0-9]*[.])?[0-9]+)(\s*;\s*)?',1); - CREATE OR REPLACE MACRO number_vector_decoder(t) AS list_transform(split_string_of_numbers(t), x -> cast(x AS double)); - CREATE OR REPLACE MACRO encode_number_vector(t) AS concat('[',list_aggregate(list_transform(t, x -> cast(x AS string)), 'string_agg', '; '),']'); - "; - return cmd.ExecuteNonQueryAsync(cancellationToken); - } - - public Task CreateTableAsync(DuckDBConnection conn, CancellationToken cancellationToken = default) + public async Task CreateTableAsync(DuckDBConnection conn, CancellationToken cancellationToken = default) { using var cmd = conn.CreateCommand(); cmd.CommandText = $@" @@ -51,7 +39,7 @@ public Task CreateTableAsync(DuckDBConnection conn, CancellationToken cancellati embedding FLOAT[], timestamp TEXT, PRIMARY KEY(collection, key))"; - return cmd.ExecuteNonQueryAsync(cancellationToken); + await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } public async Task CreateCollectionAsync(DuckDBConnection conn, string collectionName, CancellationToken cancellationToken = default) @@ -64,11 +52,11 @@ public async Task CreateCollectionAsync(DuckDBConnection conn, string collection using var cmd = conn.CreateCommand(); cmd.CommandText = $@" - INSERT INTO {TableName} VALUES (?1,?2,?3, [], ?4 ); "; - cmd.Parameters.Add(new DuckDBParameter(collectionName)); - cmd.Parameters.Add(new DuckDBParameter(string.Empty)); - cmd.Parameters.Add(new DuckDBParameter(string.Empty)); - cmd.Parameters.Add(new DuckDBParameter(string.Empty)); + INSERT INTO {TableName} VALUES ($collectionName, $key, $metadata, [], $timestamp ); "; + cmd.Parameters.Add(new DuckDBParameter(nameof(collectionName), collectionName)); + cmd.Parameters.Add(new DuckDBParameter("key", string.Empty)); + cmd.Parameters.Add(new DuckDBParameter("metadata", string.Empty)); + cmd.Parameters.Add(new DuckDBParameter("timestamp", string.Empty)); await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } @@ -81,16 +69,16 @@ private static string EncodeFloatArrayToString(float[]? data) [System.Diagnostics.CodeAnalysis.SuppressMessage("Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "Internal method serializing array of float and numbers")] public async Task UpdateOrInsertAsync(DuckDBConnection conn, - string collection, string key, string? metadata, float[]? embedding, string? timestamp, CancellationToken cancellationToken = default) + string collectionName, string key, string? metadata, float[]? embedding, string? timestamp, CancellationToken cancellationToken = default) { - await this.DeleteAsync(conn, collection, key, cancellationToken).ConfigureAwait(true); + await this.DeleteAsync(conn, collectionName, key, cancellationToken).ConfigureAwait(true); var embeddingArrayString = EncodeFloatArrayToString(embedding ?? Array.Empty()); using var cmd = conn.CreateCommand(); - cmd.CommandText = $"INSERT INTO {TableName} VALUES(?1, ?2, ?3, {embeddingArrayString}, ?4)"; - cmd.Parameters.Add(new DuckDBParameter(collection)); - cmd.Parameters.Add(new DuckDBParameter(key)); - cmd.Parameters.Add(new DuckDBParameter(metadata ?? string.Empty)); - cmd.Parameters.Add(new DuckDBParameter(timestamp ?? string.Empty)); + cmd.CommandText = $"INSERT INTO {TableName} VALUES(${nameof(collectionName)}, ${nameof(key)}, ${nameof(metadata)}, {embeddingArrayString}, ${nameof(timestamp)})"; + cmd.Parameters.Add(new DuckDBParameter(nameof(collectionName), collectionName)); + cmd.Parameters.Add(new DuckDBParameter(nameof(key), key)); + cmd.Parameters.Add(new DuckDBParameter(nameof(metadata), metadata ?? string.Empty)); + cmd.Parameters.Add(new DuckDBParameter(nameof(timestamp), timestamp ?? string.Empty)); await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } @@ -113,7 +101,7 @@ SELECT DISTINCT collection using var dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) { - yield return dataReader.GetString("collection"); + yield return dataReader.GetFieldValue("collection"); } } @@ -121,39 +109,42 @@ SELECT DISTINCT collection public async IAsyncEnumerable GetNearestMatchesAsync( DuckDBConnection conn, string collectionName, - float[]? embedding, + float[] embedding, int limit, double minRelevanceScore = 0, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - var embeddingArrayString = EncodeFloatArrayToString(embedding ?? Array.Empty()); + var embeddingArrayString = EncodeFloatArrayToString(embedding); using var cmd = conn.CreateCommand(); cmd.CommandText = $@" - SELECT key, metadata, timestamp, cast(embedding as string) as embeddingAsString, cast(cosine_similarity(embedding,{embeddingArrayString}) as FLOAT) as score FROM {TableName} - WHERE collection=?1 AND score >= {minRelevanceScore.ToString("F12", CultureInfo.InvariantCulture)} + SELECT key, metadata, timestamp, embedding, (embedding <=> {embeddingArrayString}) as score FROM {TableName} + WHERE collection=${nameof(collectionName)} AND len(embedding) > 0 AND score >= {minRelevanceScore.ToString("F12", CultureInfo.InvariantCulture)} ORDER BY score DESC - LIMIT {limit};"; - cmd.Parameters.Add(new DuckDBParameter(collectionName)); + LIMIT ${nameof(limit)};"; + + cmd.Parameters.Add(new DuckDBParameter(nameof(collectionName), collectionName)); + cmd.Parameters.Add(new DuckDBParameter(nameof(limit), limit)); using var dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) { - string key = dataReader.GetString("key"); + string key = dataReader.GetFieldValue("key"); if (string.IsNullOrWhiteSpace(key)) { continue; } - string metadata = dataReader.GetString("metadata"); - string embeddingAsString = dataReader.GetString("embeddingAsString"); - string timestamp = dataReader.GetString("timestamp"); - float score = dataReader.GetFloat("score"); + string metadata = dataReader.GetFieldValue("metadata"); + float[] embeddingFromSearch = (dataReader.GetFieldValue>("embedding").ToArray()); + string timestamp = dataReader.GetFieldValue("timestamp"); + float score = dataReader.GetFieldValue("score"); + yield return new DatabaseEntry { Key = key, MetadataString = metadata, - EmbeddingString = embeddingAsString, + Embedding = embeddingFromSearch, Timestamp = timestamp, Score = score }; @@ -167,23 +158,24 @@ ORDER BY score DESC { using var cmd = conn.CreateCommand(); cmd.CommandText = $@" - SELECT metadata, timestamp, cast(embedding as string) as embeddingAsString FROM {TableName} - WHERE collection=?1 - AND key=?2; "; - cmd.Parameters.Add(new DuckDBParameter(collectionName)); - cmd.Parameters.Add(new DuckDBParameter(key)); + SELECT metadata, timestamp, embedding FROM {TableName} + WHERE collection=${nameof(collectionName)} + AND key=${nameof(key)}; "; + cmd.Parameters.Add(new DuckDBParameter(nameof(collectionName), collectionName)); + cmd.Parameters.Add(new DuckDBParameter(nameof(key), key)); using var dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); if (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) { - string metadata = dataReader.GetString(dataReader.GetOrdinal("metadata")); - string embeddingAsString = dataReader.GetString(dataReader.GetOrdinal("embeddingAsString")); - string timestamp = dataReader.GetString(dataReader.GetOrdinal("timestamp")); + string metadata = dataReader.GetFieldValue("metadata"); + float[] embeddingFromSearch = (dataReader.GetFieldValue>("embedding").ToArray()); + string timestamp = dataReader.GetFieldValue("timestamp"); + return new DatabaseEntry { Key = key, MetadataString = metadata, - EmbeddingString = embeddingAsString, + Embedding = embeddingFromSearch, Timestamp = timestamp }; } @@ -191,25 +183,25 @@ ORDER BY score DESC return null; } - public Task DeleteCollectionAsync(DuckDBConnection conn, string collectionName, CancellationToken cancellationToken = default) + public async Task DeleteCollectionAsync(DuckDBConnection conn, string collectionName, CancellationToken cancellationToken = default) { using var cmd = conn.CreateCommand(); cmd.CommandText = $@" DELETE FROM {TableName} - WHERE collection=?;"; - cmd.Parameters.Add(new DuckDBParameter(collectionName)); - return cmd.ExecuteNonQueryAsync(cancellationToken); + WHERE collection=${nameof(collectionName)};"; + cmd.Parameters.Add(new DuckDBParameter(nameof(collectionName), collectionName)); + await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } - public Task DeleteAsync(DuckDBConnection conn, string collectionName, string key, CancellationToken cancellationToken = default) + public async Task DeleteAsync(DuckDBConnection conn, string collectionName, string key, CancellationToken cancellationToken = default) { using var cmd = conn.CreateCommand(); cmd.CommandText = $@" DELETE FROM {TableName} - WHERE collection=?1 - AND key=?2; "; - cmd.Parameters.Add(new DuckDBParameter(collectionName)); - cmd.Parameters.Add(new DuckDBParameter(key)); - return cmd.ExecuteNonQueryAsync(cancellationToken); + WHERE collection=${nameof(collectionName)} + AND key=${nameof(key)}; "; + cmd.Parameters.Add(new DuckDBParameter(nameof(collectionName), collectionName)); + cmd.Parameters.Add(new DuckDBParameter(nameof(key), key)); + await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBExtensions.cs index ea7f25be415c..0c791b8760d5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBExtensions.cs @@ -2,19 +2,13 @@ using System.Data.Common; -namespace Microsoft.SemanticKernel.Connectors.Memory.DuckDB; +namespace Microsoft.SemanticKernel.Connectors.DuckDB; internal static class DuckDBExtensions { - public static string GetString(this DbDataReader reader, string fieldName) + public static T GetFieldValue(this DbDataReader reader, string fieldName) { int ordinal = reader.GetOrdinal(fieldName); - return reader.GetString(ordinal); - } - - public static float GetFloat(this DbDataReader reader, string fieldName) - { - int ordinal = reader.GetOrdinal(fieldName); - return reader.GetFloat(ordinal); + return reader.GetFieldValue(ordinal); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBMemoryStore.cs index b805129a4538..2e5debaad7dc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/DuckDBMemoryStore.cs @@ -5,14 +5,12 @@ using System.Globalization; using System.Linq; using System.Runtime.CompilerServices; -using System.Text.Json; using System.Threading; using System.Threading.Tasks; using DuckDB.NET.Data; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.DuckDB; +namespace Microsoft.SemanticKernel.Connectors.DuckDB; /// /// An implementation of backed by a DuckDB database. @@ -149,14 +147,13 @@ public async Task RemoveBatchAsync(string collectionName, IEnumerable ke yield break; } - var collectionMemories = new List(); List<(MemoryRecord Record, double Score)> embeddings = new(); await foreach (var dbEntry in this._dbConnector.GetNearestMatchesAsync(this._dbConnection, collectionName, embedding.ToArray(), limit, minRelevanceScore, cancellationToken)) { var entry = MemoryRecord.FromJsonMetadata( json: dbEntry.MetadataString, - withEmbeddings ? JsonSerializer.Deserialize>(dbEntry.EmbeddingString, s_jsonSerializerOptions) : Array.Empty(), + withEmbeddings ? dbEntry.Embedding : Array.Empty(), dbEntry.Key, ParseTimestamp(dbEntry.Timestamp)); embeddings.Add(new(entry, dbEntry.Score)); @@ -220,7 +217,6 @@ private static async Task InitialiseMemoryStoreAsync(DuckDBMe { await memoryStore._dbConnection.OpenAsync(cancellationToken).ConfigureAwait(false); await memoryStore._dbConnector.CreateTableAsync(memoryStore._dbConnection, cancellationToken).ConfigureAwait(false); - await memoryStore._dbConnector.CreateFunctionsAsync(memoryStore._dbConnection, cancellationToken).ConfigureAwait(false); return memoryStore; } @@ -267,7 +263,7 @@ private async Task InternalUpsertAsync(DuckDBConnection connection, stri record.Key = record.Metadata.Id; await this._dbConnector.UpdateOrInsertAsync(conn: connection, - collection: collectionName, + collectionName: collectionName, key: record.Key, metadata: record.GetSerializedMetadata(), embedding: record.Embedding.ToArray(), @@ -291,7 +287,7 @@ await this._dbConnector.UpdateOrInsertAsync(conn: connection, { return MemoryRecord.FromJsonMetadata( json: entry.Value.MetadataString, - JsonSerializer.Deserialize>(entry.Value.EmbeddingString, s_jsonSerializerOptions), + entry.Value.Embedding, entry.Value.Key, ParseTimestamp(entry.Value.Timestamp)); } @@ -303,14 +299,5 @@ await this._dbConnector.UpdateOrInsertAsync(conn: connection, ParseTimestamp(entry.Value.Timestamp)); } - private static readonly JsonSerializerOptions s_jsonSerializerOptions = CreateSerializerOptions(); - - private static JsonSerializerOptions CreateSerializerOptions() - { - var jso = new JsonSerializerOptions(); - jso.Converters.Add(new ReadOnlyMemoryConverter()); - return jso; - } - #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Kusto/AssemblyInfo.cs new file mode 100644 index 000000000000..51c6d9cf77a2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0024")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj b/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj index bb3ee4b14b04..66355aa0a9b2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/Connectors.Memory.Kusto.csproj @@ -1,10 +1,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.Kusto - Microsoft.SemanticKernel.Connectors.Memory.Kusto + Microsoft.SemanticKernel.Connectors.Kusto + Microsoft.SemanticKernel.Connectors.Kusto netstandard2.0 - + alpha NU5104 @@ -15,7 +15,7 @@ - Microsoft.SemanticKernel.Connectors.Memory.Kusto + Microsoft.SemanticKernel.Connectors.Kusto Semantic Kernel - Azure Data Explorer (Kusto) Semantic Memory Azure Data Explorer (Kusto) Semantic Memory connector for Semantic Kernel diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs index a7e8783f06f1..294544ea9e64 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs @@ -6,7 +6,7 @@ using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Kusto; +namespace Microsoft.SemanticKernel.Connectors.Kusto; /// /// Kusto memory record entity. @@ -19,7 +19,7 @@ public sealed class KustoMemoryRecord public string Key { get; set; } /// - /// Metadata associated with memory entity. + /// Attributes associated with memory entity. /// public MemoryRecordMetadata Metadata { get; set; } @@ -44,7 +44,7 @@ public KustoMemoryRecord(MemoryRecord record) : this(record.Key, record.Metadata /// Initializes a new instance of the class. /// /// Entity key. - /// Metadata associated with memory entity. + /// Attributes associated with memory entity. /// Source content embedding. /// Optional timestamp. public KustoMemoryRecord(string key, MemoryRecordMetadata metadata, ReadOnlyMemory embedding, DateTimeOffset? timestamp = null) diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs index bab93a542d99..b07c05a9af23 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs @@ -11,10 +11,10 @@ using Kusto.Data; using Kusto.Data.Common; using Kusto.Data.Net.Client; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Http; using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Kusto; +namespace Microsoft.SemanticKernel.Connectors.Kusto; /// /// An implementation of backed by a Kusto database. @@ -327,7 +327,7 @@ protected virtual void Dispose(bool disposing) private static ClientRequestProperties GetClientRequestProperties() => new() { - Application = Telemetry.HttpUserAgent, + Application = HttpHeaderValues.UserAgent, }; private bool _searchInitialized; diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoSerializer.cs b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoSerializer.cs index 30a21e61bc95..d5dbe866c8c2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoSerializer.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoSerializer.cs @@ -6,7 +6,7 @@ using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Kusto; +namespace Microsoft.SemanticKernel.Connectors.Kusto; /// /// Contains serialization/deserialization logic for memory record properties in Kusto. @@ -19,7 +19,7 @@ public static class KustoSerializer /// Instance of an embedding for serialization. public static string SerializeEmbedding(ReadOnlyMemory embedding) { - return JsonSerializer.Serialize(embedding, s_jsonSerializerOptions); + return JsonSerializer.Serialize(embedding, JsonOptionsCache.Default); } /// @@ -30,7 +30,7 @@ public static ReadOnlyMemory DeserializeEmbedding(string? embedding) { return string.IsNullOrEmpty(embedding) ? default : - JsonSerializer.Deserialize>(embedding!, s_jsonSerializerOptions); + JsonSerializer.Deserialize>(embedding!, JsonOptionsCache.Default); } /// @@ -93,14 +93,5 @@ public static string SerializeDateTimeOffset(DateTimeOffset? dateTimeOffset) private const string TimestampFormat = "yyyy-MM-ddTHH:mm:ssZ"; - private static readonly JsonSerializerOptions s_jsonSerializerOptions = CreateSerializerOptions(); - - private static JsonSerializerOptions CreateSerializerOptions() - { - var jso = new JsonSerializerOptions(); - jso.Converters.Add(new ReadOnlyMemoryConverter()); - return jso; - } - #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md b/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md index 6238639ceb07..e7685b1b0adb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/README.md @@ -1,4 +1,4 @@ -# Microsoft.SemanticKernel.Connectors.Memory.Kusto +# Microsoft.SemanticKernel.Connectors.Kusto This connector uses [Azure Data Explorer (Kusto)](https://learn.microsoft.com/en-us/azure/data-explorer/) to implement Semantic Memory. @@ -7,6 +7,7 @@ This connector uses [Azure Data Explorer (Kusto)](https://learn.microsoft.com/en 1. Create a cluster and database in Azure Data Explorer (Kusto) - see https://learn.microsoft.com/en-us/azure/data-explorer/create-cluster-and-database?tabs=free 2. To use Kusto as a semantic memory store, use the following code: + > See [Example 14](../../../samples/KernelSyntaxExamples/Example14_SemanticMemory.cs) and [Example 15](../../../samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs) for more memory usage examples with the kernel. ```csharp using Kusto.Data; @@ -14,29 +15,31 @@ using Kusto.Data; var connectionString = new KustoConnectionStringBuilder("https://kvc123.eastus.kusto.windows.net").WithAadUserPromptAuthentication(); KustoMemoryStore memoryStore = new(connectionString, "MyDatabase"); -IKernel kernel = new KernelBuilder() - .WithLogger(ConsoleLogger.Log) - .WithOpenAITextCompletionService(modelId: TestConfiguration.OpenAI.ModelId, apiKey: TestConfiguration.OpenAI.ApiKey) - .WithOpenAITextEmbeddingGenerationService(modelId: TestConfiguration.OpenAI.EmbeddingModelId,apiKey: TestConfiguration.OpenAI.ApiKey) - .WithMemoryStorage(memoryStore) - .Build(); +var embeddingGenerator = new OpenAITextEmbeddingGenerationService("text-embedding-ada-002", apiKey); + +SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); + +var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); ``` ## Important Notes ### Cosine Similarity -As of now, cosine similarity is not built-in to Kusto. -A function to calculate cosine similarity is automatically added to the Kusto database during first search operation. -This function (`series_cosine_similarity_fl`) is not removed automatically. -You might want to delete it manually if you stop using the Kusto database as a semantic memory store. -If you want to delete the function, you can do it manually using the Kusto explorer. -The function is called `series_cosine_similarity_fl` and is located in the `Functions` folder of the database. + +As of now, cosine similarity is not built-in to Kusto. +A function to calculate cosine similarity is automatically added to the Kusto database during first search operation. +This function (`series_cosine_similarity_fl`) is not removed automatically. +You might want to delete it manually if you stop using the Kusto database as a semantic memory store. +If you want to delete the function, you can do it manually using the Kusto explorer. +The function is called `series_cosine_similarity_fl` and is located in the `Functions` folder of the database. ### Append-Only Store -Kusto is an append-only store. This means that when a fact is updated, the old fact is not deleted. -This isn't a problem for the semantic memory connector, as it always utilizes the most recent fact. + +Kusto is an append-only store. This means that when a fact is updated, the old fact is not deleted. +This isn't a problem for the semantic memory connector, as it always utilizes the most recent fact. This is made possible by using the [arg_max](https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/arg-max-aggfunction) aggregation function in conjunction with the [ingestion_time](https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/ingestiontimefunction) function. However, users manually querying the underlying table should be aware of this behavior. ### Authentication + Please note that the authentication used in the example above is not recommended for production use. You can find more details here: https://learn.microsoft.com/en-us/azure/data-explorer/kusto/api/connection-strings/kusto diff --git a/dotnet/src/Connectors/Connectors.Memory.Milvus/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Milvus/AssemblyInfo.cs new file mode 100644 index 000000000000..c2a0e46a1a70 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Milvus/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0025")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj b/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj index 530c85ec6423..9270ff54490a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Milvus/Connectors.Memory.Milvus.csproj @@ -2,11 +2,11 @@ - Microsoft.SemanticKernel.Connectors.Memory.Milvus + Microsoft.SemanticKernel.Connectors.Milvus $(AssemblyName) net6.0;netstandard2.0 enable - + alpha NU5104 diff --git a/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs index 278b4b52688e..c6d4f7a42b70 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs @@ -12,7 +12,7 @@ using Microsoft.SemanticKernel.Memory; using Milvus.Client; -namespace Microsoft.SemanticKernel.Connectors.Memory.Milvus; +namespace Microsoft.SemanticKernel.Connectors.Milvus; /// /// An implementation of for the Milvus vector database. @@ -22,7 +22,9 @@ public class MilvusMemoryStore : IMemoryStore, IDisposable private readonly int _vectorSize; private readonly SimilarityMetricType _metricType; private readonly bool _ownsMilvusClient; + private readonly string _indexName; + private const string DefaultIndexName = "default"; private const string IsReferenceFieldName = "is_reference"; private const string ExternalSourceNameFieldName = "external_source_name"; private const string IdFieldName = "id"; @@ -62,12 +64,13 @@ public class MilvusMemoryStore : IMemoryStore, IDisposable /// /// Creates a new , connecting to the given hostname on the default Milvus port of 19530. /// For more advanced configuration opens, construct a instance and pass it to - /// . + /// . /// /// The hostname or IP address to connect to. /// The port to connect to. Defaults to 19530. /// Whether to use TLS/SSL. Defaults to false. /// The database to connect to. Defaults to the default Milvus database. + /// The name of the index to use. Defaults to . /// The size of the vectors used in Milvus. Defaults to 1536. /// The metric used to measure similarity between vectors. Defaults to . /// An optional logger factory through which the Milvus client will log. @@ -76,10 +79,11 @@ public MilvusMemoryStore( int port = DefaultMilvusPort, bool ssl = false, string? database = null, + string? indexName = null, int vectorSize = 1536, SimilarityMetricType metricType = SimilarityMetricType.Ip, ILoggerFactory? loggerFactory = null) - : this(new MilvusClient(host, port, ssl, database, callOptions: default, loggerFactory), vectorSize, metricType) + : this(new MilvusClient(host, port, ssl, database, callOptions: default, loggerFactory), indexName, vectorSize, metricType) { this._ownsMilvusClient = true; } @@ -87,7 +91,7 @@ public MilvusMemoryStore( /// /// Creates a new , connecting to the given hostname on the default Milvus port of 19530. /// For more advanced configuration opens, construct a instance and pass it to - /// . + /// . /// /// The hostname or IP address to connect to. /// The username to use for authentication. @@ -95,6 +99,7 @@ public MilvusMemoryStore( /// The port to connect to. Defaults to 19530. /// Whether to use TLS/SSL. Defaults to false. /// The database to connect to. Defaults to the default Milvus database. + /// The name of the index to use. Defaults to . /// The size of the vectors used in Milvus. Defaults to 1536. /// The metric used to measure similarity between vectors. Defaults to . /// An optional logger factory through which the Milvus client will log. @@ -105,10 +110,11 @@ public MilvusMemoryStore( int port = DefaultMilvusPort, bool ssl = false, string? database = null, + string? indexName = null, int vectorSize = 1536, SimilarityMetricType metricType = SimilarityMetricType.Ip, ILoggerFactory? loggerFactory = null) - : this(new MilvusClient(host, username, password, port, ssl, database, callOptions: default, loggerFactory), vectorSize, metricType) + : this(new MilvusClient(host, username, password, port, ssl, database, callOptions: default, loggerFactory), indexName, vectorSize, metricType) { this._ownsMilvusClient = true; } @@ -116,13 +122,14 @@ public MilvusMemoryStore( /// /// Creates a new , connecting to the given hostname on the default Milvus port of 19530. /// For more advanced configuration opens, construct a instance and pass it to - /// . + /// . /// /// The hostname or IP address to connect to. /// An API key to be used for authentication, instead of a username and password. /// The port to connect to. Defaults to 19530. /// Whether to use TLS/SSL. Defaults to false. /// The database to connect to. Defaults to the default Milvus database. + /// The name of the index to use. Defaults to . /// The size of the vectors used in Milvus. Defaults to 1536. /// The metric used to measure similarity between vectors. Defaults to . /// An optional logger factory through which the Milvus client will log. @@ -132,10 +139,11 @@ public MilvusMemoryStore( int port = DefaultMilvusPort, bool ssl = false, string? database = null, + string? indexName = null, int vectorSize = 1536, SimilarityMetricType metricType = SimilarityMetricType.Ip, ILoggerFactory? loggerFactory = null) - : this(new MilvusClient(host, apiKey, port, ssl, database, callOptions: default, loggerFactory), vectorSize, metricType) + : this(new MilvusClient(host, apiKey, port, ssl, database, callOptions: default, loggerFactory), indexName, vectorSize, metricType) { this._ownsMilvusClient = true; } @@ -144,23 +152,27 @@ public MilvusMemoryStore( /// Initializes a new instance of over the given . /// /// A configured with the necessary endpoint and authentication information. + /// The name of the index to use. Defaults to . /// The size of the vectors used in Milvus. Defaults to 1536. /// The metric used to measure similarity between vectors. Defaults to . public MilvusMemoryStore( MilvusClient client, + string? indexName = null, int vectorSize = 1536, SimilarityMetricType metricType = SimilarityMetricType.Ip) - : this(client, ownsMilvusClient: false, vectorSize, metricType) + : this(client, ownsMilvusClient: false, indexName, vectorSize, metricType) { } private MilvusMemoryStore( MilvusClient client, bool ownsMilvusClient, + string? indexName = null, int vectorSize = 1536, SimilarityMetricType metricType = SimilarityMetricType.Ip) { this.Client = client; + this._indexName = indexName ?? DefaultIndexName; this._vectorSize = vectorSize; this._metricType = metricType; this._ownsMilvusClient = ownsMilvusClient; @@ -186,8 +198,8 @@ public async Task CreateCollectionAsync(string collectionName, CancellationToken MilvusCollection collection = await this.Client.CreateCollectionAsync(collectionName, schema, DefaultConsistencyLevel, cancellationToken: cancellationToken).ConfigureAwait(false); - await collection.CreateIndexAsync(EmbeddingFieldName, metricType: this._metricType, cancellationToken: cancellationToken).ConfigureAwait(false); - await collection.WaitForIndexBuildAsync("float_vector", cancellationToken: cancellationToken).ConfigureAwait(false); + await collection.CreateIndexAsync(EmbeddingFieldName, metricType: this._metricType, indexName: this._indexName, cancellationToken: cancellationToken).ConfigureAwait(false); + await collection.WaitForIndexBuildAsync("float_vector", this._indexName, cancellationToken: cancellationToken).ConfigureAwait(false); await collection.LoadAsync(cancellationToken: cancellationToken).ConfigureAwait(false); await collection.WaitForCollectionLoadAsync(waitingInterval: TimeSpan.FromMilliseconds(100), timeout: TimeSpan.FromMinutes(1), cancellationToken: cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md b/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md index d88a2aaffd5a..8619aa4dc5ea 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Milvus/README.md @@ -1,4 +1,4 @@ -# Microsoft.SemanticKernel.Connectors.Memory.Milvus +# Microsoft.SemanticKernel.Connectors.Milvus This is an implementation of the Semantic Kernel Memory Store abstraction for the [Milvus vector database](https://milvus.io). @@ -19,15 +19,16 @@ docker-compose up -d ``` 3. Use Semantic Kernel with Milvus, connecting to `localhost` with the default (gRPC) port of 1536: + > See [Example 14](../../../samples/KernelSyntaxExamples/Example14_SemanticMemory.cs) and [Example 15](../../../samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs) for more memory usage examples with the kernel. ```csharp using MilvusMemoryStore memoryStore = new("localhost"); -IKernel kernel = new KernelBuilder() - .WithLogger(logger) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", "OPENAI_API_KEY") - .WithMemoryStorage(memoryStore) - .Build(); +var embeddingGenerator = new OpenAITextEmbeddingGenerationService("text-embedding-ada-002", apiKey); + +SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); + +var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); ``` -More information on setting up Milvus can be found [here](https://milvus.io/docs/v2.2.x/install_standalone-docker.md). The `MilvusMemoryStore` constructor provides additional configuration options, such as the vector size, the similarity metric type, etc. \ No newline at end of file +More information on setting up Milvus can be found [here](https://milvus.io/docs/v2.2.x/install_standalone-docker.md). The `MilvusMemoryStore` constructor provides additional configuration options, such as the vector size, the similarity metric type, etc. diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/AssemblyInfo.cs new file mode 100644 index 000000000000..bc7cb87a849c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0030")] diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj b/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj new file mode 100644 index 000000000000..a8dbee3cd46a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/Connectors.Memory.MongoDB.csproj @@ -0,0 +1,29 @@ + + + + + Microsoft.SemanticKernel.Connectors.MongoDB + $(AssemblyName) + netstandard2.0 + alpha + + + + + + + + + Semantic Kernel - MongoDB Connector + MongoDB connector for Semantic Kernel plugins and semantic memory + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs new file mode 100644 index 000000000000..925a34064546 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryEntry.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel.Memory; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace Microsoft.SemanticKernel.Connectors.MongoDB; + +/// +/// A MongoDB memory entry. +/// +public sealed class MongoDBMemoryEntry +{ + /// + /// Unique identifier of the memory entry. + /// + [BsonId] + public string Id { get; set; } + + /// + /// Metadata associated with memory entity. + /// + [BsonElement("metadata")] + public MongoDBMemoryRecordMetadata Metadata { get; set; } + + /// + /// Source content embedding. + /// +#pragma warning disable CA1819 // Properties should not return arrays + // MongoDBMemoryEntry class is not part of public API, and its usage correctness is ensured by MongoDBMemoryStore. + // This is an interim solution until ReadOnlyMemory serialization is supported natively by MongoDB Driver (https://jira.mongodb.org/browse/CSHARP-4807). + [BsonElement("embedding")] + public float[] Embedding { get; set; } +#pragma warning restore CA1819 // Properties should not return arrays + + /// + /// Optional timestamp. + /// + [BsonElement("timestamp")] + [BsonDateTimeOptions(Kind = DateTimeKind.Utc, Representation = BsonType.DateTime)] + public DateTime? Timestamp { get; set; } + + /// + /// Nearest match score. + /// + [BsonIgnoreIfDefault] + public double Score { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// Instance to copy values from. + public MongoDBMemoryEntry(MemoryRecord memoryRecord) + { + this.Id = memoryRecord.Key; + this.Metadata = new MongoDBMemoryRecordMetadata(memoryRecord.Metadata); + this.Embedding = memoryRecord.Embedding.ToArray(); + this.Timestamp = memoryRecord.Timestamp?.UtcDateTime; + } + + /// + /// Returns mapped . + /// + public MemoryRecord ToMemoryRecord() => + new(this.Metadata.ToMemoryRecordMetadata(), this.Embedding, this.Id, this.Timestamp?.ToLocalTime()); + + /// + /// Returns a pair of mapped and score. + /// + public (MemoryRecord, double) ToMemoryRecordAndScore() => + (new(this.Metadata.ToMemoryRecordMetadata(), this.Embedding, this.Id, this.Timestamp?.ToLocalTime()), this.Score); +} diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryRecordMetadata.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryRecordMetadata.cs new file mode 100644 index 000000000000..55c67c54b8cd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryRecordMetadata.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Memory; +using MongoDB.Bson.Serialization.Attributes; + +namespace Microsoft.SemanticKernel.Connectors.MongoDB; + +/// +/// A MongoDB record metadata. +/// +#pragma warning disable CA1815 // Override equals and operator equals on value types +public struct MongoDBMemoryRecordMetadata +#pragma warning restore CA1815 // Override equals and operator equals on value types +{ + /// + /// . + /// + [BsonElement("isReference")] + public bool IsReference { get; set; } + + /// + /// . + /// + [BsonElement("externalSourceName")] + [BsonIgnoreIfDefault] + public string ExternalSourceName { get; set; } + + /// + /// . + /// + [BsonId] + public string Id { get; set; } + + /// + /// . + /// + [BsonElement("description")] + [BsonIgnoreIfDefault] + public string Description { get; set; } + + /// + /// . + /// + [BsonElement("text")] + [BsonIgnoreIfDefault] + public string Text { get; set; } + + /// + /// . + /// + [BsonElement("additionalMetadata")] + [BsonIgnoreIfDefault] + public string AdditionalMetadata { get; set; } + + /// + /// Initializes a new instance of structure. + /// + public MongoDBMemoryRecordMetadata(MemoryRecordMetadata memoryRecordMetadata) + { + this.IsReference = memoryRecordMetadata.IsReference; + this.ExternalSourceName = memoryRecordMetadata.ExternalSourceName; + this.Id = memoryRecordMetadata.Id; + this.Description = memoryRecordMetadata.Description; + this.Text = memoryRecordMetadata.Text; + this.AdditionalMetadata = memoryRecordMetadata.AdditionalMetadata; + } + + /// + /// Returns mapped . + /// + public MemoryRecordMetadata ToMemoryRecordMetadata() => + new(this.IsReference, this.Id, this.Text, this.Description, this.ExternalSourceName, this.AdditionalMetadata); +} diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryStore.cs new file mode 100644 index 000000000000..c35abd32dd78 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/MongoDBMemoryStore.cs @@ -0,0 +1,255 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Memory; +using MongoDB.Driver; + +namespace Microsoft.SemanticKernel.Connectors.MongoDB; + +/// +/// An implementation of backed by a MongoDB database. +/// +public class MongoDBMemoryStore : IMemoryStore, IDisposable +{ + /// + /// Initializes a new instance of the class. + /// + /// MongoDB connection string. + /// Database name. + /// Name of the search index. If no value is provided default index will be used. + public MongoDBMemoryStore(string connectionString, string databaseName, string? indexName = default) : + this(new MongoClient(connectionString), databaseName, indexName) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// MongoDB client. + /// Database name. + /// Name of the search index. If no value is provided default index will be used. + public MongoDBMemoryStore(IMongoClient mongoClient, string databaseName, string? indexName = default) + { + this._indexName = indexName; + this._mongoClient = mongoClient; + this._mongoDatabase = this._mongoClient.GetDatabase(databaseName); + } + + /// + public Task CreateCollectionAsync(string collectionName, CancellationToken cancellationToken = default) => + this._mongoDatabase.CreateCollectionAsync(collectionName, cancellationToken: cancellationToken); + + /// + public async IAsyncEnumerable GetCollectionsAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using var cursor = await this._mongoDatabase.ListCollectionNamesAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var name in cursor.Current) + { + yield return name; + } + } + } + + /// + public async Task DoesCollectionExistAsync(string collectionName, CancellationToken cancellationToken = default) + { + await foreach (var existingCollectionName in this.GetCollectionsAsync(cancellationToken)) + { + if (existingCollectionName == collectionName) + { + return true; + } + } + + return false; + } + + /// + public Task DeleteCollectionAsync(string collectionName, CancellationToken cancellationToken = default) => + this._mongoDatabase.DropCollectionAsync(collectionName, cancellationToken); + + /// + public async Task UpsertAsync(string collectionName, MemoryRecord record, CancellationToken cancellationToken = default) + { + record.Key = record.Metadata.Id; + var filter = Builders.Filter.Eq(m => m.Id, record.Key); + + var replaceOptions = new ReplaceOptions() { IsUpsert = true }; + + var result = await this._mongoDatabase.GetCollection(collectionName) + .ReplaceOneAsync(filter, new MongoDBMemoryEntry(record), replaceOptions, cancellationToken) + .ConfigureAwait(false); + + return result.UpsertedId?.AsString ?? record.Key; + } + + /// + public async IAsyncEnumerable UpsertBatchAsync( + string collectionName, + IEnumerable records, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + foreach (var record in records) + { + yield return await this.UpsertAsync(collectionName, record, cancellationToken).ConfigureAwait(false); + } + } + + /// + public async Task GetAsync(string collectionName, string key, bool withEmbedding = false, CancellationToken cancellationToken = default) + { + using var cursor = await this.Find( + collectionName, + GetFilterById(key), + withEmbedding, + cancellationToken) + .ConfigureAwait(false); + + var mongoDBMemoryEntry = await cursor.SingleOrDefaultAsync(cancellationToken).ConfigureAwait(false); + var result = mongoDBMemoryEntry?.ToMemoryRecord(); + + return result; + } + + /// + public async IAsyncEnumerable GetBatchAsync(string collectionName, IEnumerable keys, bool withEmbeddings = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using var cursor = await this.Find( + collectionName, + GetFilterByIds(keys), + withEmbeddings, + cancellationToken) + .ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var memoryRecord in cursor.Current) + { + yield return memoryRecord.ToMemoryRecord(); + } + } + } + + /// + public Task RemoveAsync(string collectionName, string key, CancellationToken cancellationToken = default) => + this.GetCollection(collectionName).DeleteOneAsync(GetFilterById(key), cancellationToken); + + /// + public Task RemoveBatchAsync(string collectionName, IEnumerable keys, CancellationToken cancellationToken = default) => + this.GetCollection(collectionName).DeleteManyAsync(GetFilterByIds(keys), cancellationToken); + + /// + public async IAsyncEnumerable<(MemoryRecord, double)> GetNearestMatchesAsync(string collectionName, ReadOnlyMemory embedding, int limit, double minRelevanceScore = 0, bool withEmbeddings = false, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using var cursor = await this.VectorSearch(collectionName, embedding, limit, minRelevanceScore, withEmbeddings, cancellationToken).ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var memoryEntry in cursor.Current) + { + yield return memoryEntry.ToMemoryRecordAndScore(); + } + } + } + + /// + public async Task<(MemoryRecord, double)?> GetNearestMatchAsync(string collectionName, ReadOnlyMemory embedding, double minRelevanceScore = 0, bool withEmbedding = false, CancellationToken cancellationToken = default) + { + using var cursor = await this.VectorSearch(collectionName, embedding, 1, minRelevanceScore, withEmbedding, cancellationToken).ConfigureAwait(false); + + var result = await cursor.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + + return result?.ToMemoryRecordAndScore(); + } + + /// + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + #region protected ================================================================================ + + /// + /// Disposes the resources used by the instance. + /// + /// True to release both managed and unmanaged resources; false to release only unmanaged resources. + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + this._mongoClient.Cluster.Dispose(); + } + } + + #endregion + + #region private ================================================================================ + + private readonly string? _indexName; + private readonly IMongoClient _mongoClient; + private readonly IMongoDatabase _mongoDatabase; + + private IMongoCollection GetCollection(string collectionName) => + this._mongoDatabase.GetCollection(collectionName); + + private Task> Find( + string collectionName, + FilterDefinition filter, + bool withEmbeddings = false, + CancellationToken cancellationToken = default) + { + var collection = this._mongoDatabase.GetCollection(collectionName); + var findOptions = withEmbeddings ? null : new FindOptions() { Projection = Builders.Projection.Exclude(e => e.Embedding) }; + + return collection.FindAsync(filter, findOptions, cancellationToken); + } + + private static FilterDefinition GetFilterById(string id) => + Builders.Filter.Eq(m => m.Id, id); + + private static FilterDefinition GetFilterByIds(IEnumerable ids) => + Builders.Filter.In(m => m.Id, ids); + + private Task> VectorSearch( + string collectionName, + ReadOnlyMemory embedding, + int limit = 1, + double minRelevanceScore = 0, + bool withEmbedding = false, + CancellationToken cancellationToken = default) + { + var projectionDefinition = Builders + .Projection + .Meta(nameof(MongoDBMemoryEntry.Score), "vectorSearchScore") + .Include(e => e.Metadata) + .Include(e => e.Timestamp); + + if (withEmbedding) + { + projectionDefinition = projectionDefinition.Include(e => e.Embedding); + } + + var aggregationPipeline = this.GetCollection(collectionName) + .Aggregate() + .VectorSearch(e => e.Embedding, embedding, limit) + .Project(projectionDefinition); + + if (minRelevanceScore > 0) + { + aggregationPipeline = aggregationPipeline.Match(Builders.Filter.Gte(m => m.Score, minRelevanceScore)); + } + + return aggregationPipeline.ToCursorAsync(cancellationToken); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.MongoDB/README.md b/dotnet/src/Connectors/Connectors.Memory.MongoDB/README.md new file mode 100644 index 000000000000..74b3dc8c35c5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.MongoDB/README.md @@ -0,0 +1,49 @@ +# Microsoft.SemanticKernel.Connectors.MongoDB + +This connector uses [MongoDB Atlas Vector Search](https://www.mongodb.com/products/platform/atlas-vector-search) to implement Semantic Memory. + +## Quick Start + +1. Create [Atlas cluster](https://www.mongodb.com/docs/atlas/getting-started/) + +2. Create a [collection](https://www.mongodb.com/docs/atlas/atlas-ui/collections/) + +3. Create [Vector Search Index](https://www.mongodb.com/docs/atlas/atlas-vector-search/vector-search-overview/) for the collection. The index has to be defined on a field called `embedding`. For example: + +``` +{ + "type": "vectorSearch", + "fields": [ + { + "numDimensions": , + "path": "embedding", + "similarity": "euclidean | cosine | dotProduct", + "type": "vector" + } + ] +} +``` + +4. Create the MongoDB memory store + > See [Example 14](../../../samples/KernelSyntaxExamples/Example14_SemanticMemory.cs) and [Example 15](../../../samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs) for more memory usage examples with the kernel. + +```csharp +var connectionString = "MONGODB ATLAS CONNECTION STRING" +MongoDBMemoryStore memoryStore = new(connectionString, "MyDatabase"); + +var embeddingGenerator = new OpenAITextEmbeddingGenerationService("text-embedding-ada-002", apiKey); + +SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); + +var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); +``` + +> Guide to find the connection string: https://www.mongodb.com/docs/manual/reference/connection-string/ + +## Important Notes + +### Vector search indexes + +In this version, vector search index management is outside of `MongoDBMemoryStore` scope. +Creation and maintenance of the indexes have to be done by the user. Please note that deleting a collection +(`memoryStore.DeleteCollectionAsync`) will delete the index as well. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/AssemblyInfo.cs new file mode 100644 index 000000000000..705141ce5568 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0031")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj index da502d5cae4f..9d065d9e779c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Connectors.Memory.Pinecone.csproj @@ -1,29 +1,29 @@  - - - Microsoft.SemanticKernel.Connectors.Memory.Pinecone - $(AssemblyName) - netstandard2.0 - + + + Microsoft.SemanticKernel.Connectors.Pinecone + $(AssemblyName) + netstandard2.0 + alpha + - - - + + + - - - Semantic Kernel - Pinecone Connector - Pinecone connector for Semantic Kernel plugins and semantic memory - + + + Semantic Kernel - Pinecone Connector + Pinecone connector for Semantic Kernel plugins and semantic memory + - - - + + + - - - - + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs index 545500f640ad..bf566513f3b6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ConfigureIndexRequest.cs @@ -2,9 +2,8 @@ using System.Net.Http; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// This operation specifies the pod type and number of replicas for an index. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs index 5109776b5ca4..dcc7f12e552a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteIndexRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Deletes an index and all its data. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs index a7881d48da4b..f97bc27c9657 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DeleteRequest.cs @@ -6,7 +6,7 @@ using System.Text; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// DeleteRequest diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs index bede85c449f5..db44e6580734 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Get information about an index. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs index 5802b6ad74ca..d1a640dfc02e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/DescribeIndexStatsRequest.cs @@ -4,7 +4,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// DescribeIndexStatsRequest diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs index 985702fde458..2496aafb3837 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchRequest.cs @@ -5,7 +5,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// FetchRequest diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs index 5d9116c9e701..694e4dc21a39 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/FetchResponse.cs @@ -4,7 +4,7 @@ using System.Linq; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; #pragma warning disable CA1812 // remove class never instantiated (used by System.Text.Json) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs index 2636a3bbd5ee..7264dca0b2a2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/ListIndexesRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// ListIndexesRequest diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs index 446dac99d767..f460730fd3f6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryRequest.cs @@ -4,9 +4,8 @@ using System.Collections.Generic; using System.Net.Http; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// QueryRequest diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs index 185fb85869b4..71a75593cb10 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/QueryResponse.cs @@ -3,7 +3,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; #pragma warning disable CA1812 // remove class never instantiated (used by System.Text.Json) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs index 008fb11594a6..5910f1d578cd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpdateVectorRequest.cs @@ -4,9 +4,8 @@ using System.Collections.Generic; using System.Net.Http; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// The Update operation updates vector in a namespace. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs index 4c945188016c..ae9c04e3d3d2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertRequest.cs @@ -4,7 +4,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// UpsertRequest diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs index 57fcdfb7f402..ad8f7a63d76b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Http/ApiSchema/UpsertResponse.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; #pragma warning disable CA1812 // remove class never instantiated (used by System.Text.Json) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs index 48fc3753bdaa..c563f902efe4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeClient.cs @@ -4,9 +4,8 @@ using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Interface for a Pinecone client diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs index dc079d9d9857..1e30d1b2bb00 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/IPineconeMemoryStore.cs @@ -6,7 +6,7 @@ using System.Threading.Tasks; using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Interface for Pinecone memory store that extends the memory store interface diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs index 37efe6a3979d..674ac3bf3f32 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexDefinition.cs @@ -4,7 +4,7 @@ using System.Text; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Used to create a new index. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs index bfbc322c2cdd..e454625c544d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetadataConfig.cs @@ -4,7 +4,7 @@ using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Configuration for the behavior of Pinecone's internal metadata index. By default, all metadata is indexed; when metadata_config is present, only specified metadata fields are indexed. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs index 259c53d0904e..78e9f9bf9fc3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexMetric.cs @@ -3,7 +3,7 @@ using System.Runtime.Serialization; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// The vector similarity metric of the index diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs index e3c21012eeea..9d6ad3e38f15 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexNamespaceStats.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Index namespace parameters. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs index 926595a8cd60..a909311bfe6c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexState.cs @@ -3,7 +3,7 @@ using System.Runtime.Serialization; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// The current status of a index. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs index b9d058a95533..9db602df9995 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStats.cs @@ -3,7 +3,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Index parameters. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs index 619961eb5f43..4a23163d37fc 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/IndexStatus.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Status of the index. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs index 55bc5246f0b0..9c689a4fac92 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/OperationType.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; internal enum OperationType { diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs index c95c6d211943..96ba81a1f5e4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PineconeIndex.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Index entity. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs index 25c825048375..5821e78c0a81 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/PodType.cs @@ -7,7 +7,7 @@ using System.Text.Json; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Pod type of the index, see https://docs.pinecone.io/docs/indexes#pods-pod-types-and-pod-sizes. @@ -96,10 +96,18 @@ public enum PodType /// Enum Starter for value: starter /// [EnumMember(Value = "starter")] - Starter = 13 + Starter = 13, + + /// + /// Enum Nano for value: nano + /// + [EnumMember(Value = "nano")] + Nano = 14 } +#pragma warning disable CA1812 // Avoid uninstantiated internal classes internal sealed class PodTypeJsonConverter : JsonConverter +#pragma warning restore CA1812 { public override PodType Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { @@ -108,7 +116,7 @@ public override PodType Read(ref Utf8JsonReader reader, Type typeToConvert, Json object? enumValue = Enum .GetValues(typeToConvert) .Cast() - .FirstOrDefault(value => value != null && typeToConvert.GetMember(value.ToString())[0] + .FirstOrDefault(value => value != null && typeToConvert.GetMember(value.ToString()!)[0] .GetCustomAttribute(typeof(EnumMemberAttribute)) is EnumMemberAttribute enumMemberAttr && enumMemberAttr.Value == stringValue); if (enumValue != null) diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs index bd549502b180..dab4955787a3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs @@ -5,7 +5,7 @@ using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Query parameters for use in a query request. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs index b4b9bf2d3815..811ab0b58daf 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs @@ -5,7 +5,7 @@ using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Represents a sparse vector data, which is a list of indices and a list of corresponding values, both of the same length. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs index 27a5cbcbc57b..70beb3a424d1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeClient.cs @@ -12,11 +12,9 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// A client for the Pinecone API @@ -35,8 +33,8 @@ public PineconeClient(string pineconeEnvironment, string apiKey, ILoggerFactory? this._pineconeEnvironment = pineconeEnvironment; this._authHeader = new KeyValuePair("Api-Key", apiKey); this._jsonSerializerOptions = PineconeUtils.DefaultSerializerOptions; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(PineconeClient)) : NullLogger.Instance; - this._httpClient = httpClient ?? new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); + this._logger = loggerFactory?.CreateLogger(typeof(PineconeClient)) ?? NullLogger.Instance; + this._httpClient = HttpClientProvider.GetHttpClient(httpClient); this._indexHostMapping = new ConcurrentDictionary(); } @@ -258,7 +256,7 @@ public async Task DeleteAsync( { if (ids == null && string.IsNullOrEmpty(indexNamespace) && filter == null && !deleteAll) { - throw new SKException("Must provide at least one of ids, filter, or deleteAll"); + throw new ArgumentException("Must provide at least one of ids, filter, or deleteAll"); } ids = ids?.ToList(); @@ -551,7 +549,7 @@ private string GetIndexOperationsApiBasePath() private async Task GetIndexHostAsync(string indexName, CancellationToken cancellationToken = default) { - if (this._indexHostMapping.TryGetValue(indexName, out string indexHost)) + if (this._indexHostMapping.TryGetValue(indexName, out string? indexHost)) { return indexHost; } @@ -562,12 +560,12 @@ private async Task GetIndexHostAsync(string indexName, CancellationToken if (pineconeIndex == null) { - throw new SKException("Index not found in Pinecone. Create index to perform operations with vectors."); + throw new KernelException("Index not found in Pinecone. Create index to perform operations with vectors."); } if (string.IsNullOrWhiteSpace(pineconeIndex.Status.Host)) { - throw new SKException($"Host of index {indexName} is unknown."); + throw new KernelException($"Host of index {indexName} is unknown."); } this._logger.LogDebug("Found host {0} for index {1}", pineconeIndex.Status.Host, indexName); diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs index 397c5389a926..f743b84062cd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs @@ -5,11 +5,9 @@ using System.Linq; using System.Text.Json; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Pinecone Document entity. @@ -154,20 +152,11 @@ public string GetSerializedMetadata() .Where(x => !propertiesToSkip.Contains(x.Key)) .ToDictionary(x => x.Key, x => x.Value); - return JsonSerializer.Serialize(distinctMetadata, s_jsonSerializerOptions); + return JsonSerializer.Serialize(distinctMetadata, JsonOptionsCache.Default); } internal UpdateVectorRequest ToUpdateRequest() { return UpdateVectorRequest.FromPineconeDocument(this); } - - private static readonly JsonSerializerOptions s_jsonSerializerOptions = CreateSerializerOptions(); - - private static JsonSerializerOptions CreateSerializerOptions() - { - var jso = new JsonSerializerOptions(); - jso.Converters.Add(new ReadOnlyMemoryConverter()); - return jso; - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs index 0962fd43ac44..e72a54b67c0c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocumentExtensions.cs @@ -6,7 +6,7 @@ using System.Text.Json; using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Extensions for class. diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs deleted file mode 100644 index 2db0698f73d4..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeKernelBuilderExtensions.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Net.Http; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure Pinecone connectors. -/// -[Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use PineconeMemoryBuilderExtensions instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class PineconeKernelBuilderExtensions -{ - /// - /// Registers Pinecone Memory Store. - /// - /// The instance - /// The environment for Pinecone. - /// The API key for accessing Pinecone services. - /// An optional HttpClient instance for making HTTP requests. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use PineconeMemoryBuilderExtensions.WithPineconeMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithPineconeMemoryStore(this KernelBuilder builder, - string environment, - string apiKey, - HttpClient? httpClient = null) - { - builder.WithMemoryStorage((loggerFactory, httpHandlerFactory) => - { - var client = new PineconeClient( - environment, - apiKey, - loggerFactory, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory)); - - return new PineconeMemoryStore(client, loggerFactory); - }); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs index 984fd97e42db..b61347d5f276 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryBuilderExtensions.cs @@ -1,9 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Provides extension methods for the class to configure Pinecone connector. @@ -24,13 +25,13 @@ public static MemoryBuilder WithPineconeMemoryStore( string apiKey, HttpClient? httpClient = null) { - builder.WithMemoryStore((loggerFactory, httpHandlerFactory) => + builder.WithMemoryStore((loggerFactory, injectedClient) => { var client = new PineconeClient( environment, apiKey, loggerFactory, - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory)); + HttpClientProvider.GetHttpClient(httpClient ?? injectedClient)); return new PineconeMemoryStore(client, loggerFactory); }); diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs index d9d8f5e4468d..eba221daabc5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeMemoryStore.cs @@ -8,11 +8,9 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// An implementation of for Pinecone Vector database. @@ -36,7 +34,7 @@ public PineconeMemoryStore( ILoggerFactory? loggerFactory = null) { this._pineconeClient = pineconeClient; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(PineconeMemoryStore)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(PineconeMemoryStore)) ?? NullLogger.Instance; } /// @@ -51,7 +49,7 @@ public PineconeMemoryStore( ILoggerFactory? loggerFactory = null) { this._pineconeClient = new PineconeClient(pineconeEnvironment, apiKey, loggerFactory); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(PineconeMemoryStore)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(PineconeMemoryStore)) ?? NullLogger.Instance; } /// @@ -65,7 +63,7 @@ public async Task CreateCollectionAsync(string collectionName, CancellationToken { if (!await this.DoesCollectionExistAsync(collectionName, cancellationToken).ConfigureAwait(false)) { - throw new SKException("Index creation is not supported within memory store. " + + throw new KernelException("Index creation is not supported within memory store. " + $"It should be created manually or using {nameof(IPineconeClient.CreateIndexAsync)}. " + $"Ensure index state is {IndexState.Ready}."); } @@ -308,7 +306,7 @@ public async IAsyncEnumerable GetBatchFromNamespaceAsync( /// If true, the embedding will be returned in the memory record. /// Cancellation token. /// - /// + /// public async IAsyncEnumerable GetWithDocumentIdAsync(string indexName, string documentId, int limit = 3, @@ -459,7 +457,7 @@ await this._pineconeClient.DeleteAsync( /// The name associated with a collection of embeddings. /// Cancellation token. /// - /// + /// public async Task RemoveWithDocumentIdAsync(string indexName, string documentId, string indexNamespace, CancellationToken cancellationToken = default) { try @@ -484,7 +482,7 @@ public async Task RemoveWithDocumentIdAsync(string indexName, string documentId, /// The name associated with a collection of embeddings. /// Cancellation token. /// - /// + /// public async Task RemoveWithDocumentIdBatchAsync( string indexName, IEnumerable documentIds, diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs index 445234273aa3..373badfb4ff4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeUtils.cs @@ -8,10 +8,8 @@ using System.Text.Encodings.Web; using System.Text.Json; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Http.ApiSchema; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +namespace Microsoft.SemanticKernel.Connectors.Pinecone; /// /// Utils for Pinecone connector. @@ -44,7 +42,7 @@ public static class PineconeUtils /// public const PodType DefaultPodType = PodType.P1X1; - internal static JsonSerializerOptions DefaultSerializerOptions => new() + internal static JsonSerializerOptions DefaultSerializerOptions { get; } = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, WriteIndented = true, @@ -54,11 +52,7 @@ public static class PineconeUtils UnknownTypeHandling = JsonUnknownTypeHandling.JsonNode, NumberHandling = JsonNumberHandling.AllowReadingFromString, Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, - Converters = - { - new PodTypeJsonConverter(), - new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) - } + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, }; /// @@ -86,7 +80,7 @@ public static async IAsyncEnumerable EnsureValidMetadataAsync( continue; } - if (!document.Metadata.TryGetValue("text", out object value)) + if (!document.Metadata.TryGetValue("text", out object? value)) { yield return document; @@ -159,7 +153,7 @@ internal static async IAsyncEnumerable GetUpsertBatchesAsync( currentBatch = new List(batchSize); } - if (currentBatch.Count <= 0) + if (currentBatch.Count == 0) { yield break; } @@ -177,13 +171,6 @@ private static int GetMetadataSize(Dictionary metadata) return (int)stream.Length; } - private static int GetEntrySize(KeyValuePair entry) - { - Dictionary temp = new() { { entry.Key, entry.Value } }; - - return GetMetadataSize(temp); - } - /// /// Utility method to convert a dictionary of filters to the format expected by Pinecone. /// @@ -250,6 +237,7 @@ public static string PodTypeToString(PodType podType) PodType.S1X4 => "s1x4", PodType.S1X8 => "s1x8", PodType.Starter => "starter", + PodType.Nano => "nano", _ => string.Empty }; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/AssemblyInfo.cs new file mode 100644 index 000000000000..8d1d31266ab8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0032")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj index b8ce22772d9c..218b0d26174d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/Connectors.Memory.Postgres.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.Postgres + Microsoft.SemanticKernel.Connectors.Postgres $(AssemblyName) netstandard2.0 + alpha @@ -20,13 +21,9 @@ - - - - diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs index 51064a4e3839..70747990e2fd 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/IPostgresDbClient.cs @@ -6,7 +6,7 @@ using System.Threading.Tasks; using Pgvector; -namespace Microsoft.SemanticKernel.Connectors.Memory.Postgres; +namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// Interface for client managing postgres database operations. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs index 0ce375618414..1dc1ffef3c1d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresDbClient.cs @@ -10,7 +10,7 @@ using NpgsqlTypes; using Pgvector; -namespace Microsoft.SemanticKernel.Connectors.Memory.Postgres; +namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// An implementation of a client for Postgres. This class is used to managing postgres database operations. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresKernelBuilderExtensions.cs deleted file mode 100644 index 5bbe95e28189..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresKernelBuilderExtensions.cs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using Microsoft.SemanticKernel.Connectors.Memory.Postgres; -using Npgsql; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure Postgres connectors. -/// -[Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use PostgresMemoryBuilderExtensions instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class PostgresKernelBuilderExtensions -{ - /// - /// Registers Postgres Memory Store. - /// - /// The instance - /// Postgres database connection string. - /// Embedding vector size. - /// Schema of collection tables. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use PostgresMemoryBuilderExtensions.WithPostgresMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithPostgresMemoryStore(this KernelBuilder builder, - string connectionString, - int vectorSize, - string schema = PostgresMemoryStore.DefaultSchema) - { - builder.WithMemoryStorage((loggerFactory) => - { - return new PostgresMemoryStore(connectionString, vectorSize, schema); - }); - - return builder; - } - - /// - /// Registers Postgres Memory Store. - /// - /// The instance - /// Postgres data source. - /// Embedding vector size. - /// Schema of collection tables. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use PostgresMemoryBuilderExtensions.WithPostgresMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithPostgresMemoryStore(this KernelBuilder builder, - NpgsqlDataSource dataSource, - int vectorSize, - string schema = PostgresMemoryStore.DefaultSchema) - { - builder.WithMemoryStorage((loggerFactory) => - { - return new PostgresMemoryStore(dataSource, vectorSize, schema); - }); - - return builder; - } - - /// - /// Registers Postgres Memory Store. - /// - /// The instance - /// Postgres database client. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use PostgresMemoryBuilderExtensions.WithPostgresMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithPostgresMemoryStore(this KernelBuilder builder, IPostgresDbClient postgresDbClient) - { - builder.WithMemoryStorage((loggerFactory) => - { - return new PostgresMemoryStore(postgresDbClient); - }); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs index 7037c3c74d6b..00590e14e5aa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryBuilderExtensions.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Memory; using Npgsql; -namespace Microsoft.SemanticKernel.Connectors.Memory.Postgres; +namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// Provides extension methods for the class to configure Postgres connector. diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs index a7429b44c157..c18b3dd69686 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryEntry.cs @@ -3,7 +3,7 @@ using System; using Pgvector; -namespace Microsoft.SemanticKernel.Connectors.Memory.Postgres; +namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// A postgres memory entry. @@ -16,7 +16,7 @@ public record struct PostgresMemoryEntry public string Key { get; set; } /// - /// Metadata as a string. + /// Attributes as a string. /// public string MetadataString { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs index 18b5bbbb4035..e303a8f13421 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresMemoryStore.cs @@ -7,13 +7,11 @@ using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Memory; using Npgsql; using Pgvector; -using Pgvector.Npgsql; -namespace Microsoft.SemanticKernel.Connectors.Memory.Postgres; +namespace Microsoft.SemanticKernel.Connectors.Postgres; /// /// An implementation of backed by a Postgres database with pgvector extension. @@ -210,7 +208,9 @@ protected virtual void Dispose(bool disposing) { if (disposing) { - this._dataSource?.Dispose(); + // Avoid error when running in .Net 7 where it throws + // Could not load type 'System.Data.Common.DbDataSource' from assembly 'Npgsql, Version=7.* + (this._dataSource as IDisposable)?.Dispose(); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md b/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md index 937e03a039ef..4941821a3fe1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Postgres/README.md @@ -1,4 +1,4 @@ -# Microsoft.SemanticKernel.Connectors.Memory.Postgres +# Microsoft.SemanticKernel.Connectors.Postgres This connector uses Postgres to implement Semantic Memory. It requires the [pgvector](https://github.com/pgvector/pgvector) extension to be installed on Postgres to implement vector similarity search. @@ -9,6 +9,7 @@ This connector uses Postgres to implement Semantic Memory. It requires the [pgve How to install the pgvector extension, please refer to its [documentation](https://github.com/pgvector/pgvector#installation). This extension is also available for **Azure Database for PostgreSQL - Flexible Server** and **Azure Cosmos DB for PostgreSQL**. + - [Azure Database for Postgres](https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/how-to-use-pgvector) - [Azure Cosmos DB for PostgreSQL](https://learn.microsoft.com/en-us/azure/cosmos-db/postgresql/howto-use-pgvector) @@ -33,20 +34,20 @@ sk_demo=# CREATE EXTENSION vector; > Note, "Azure Cosmos DB for PostgreSQL" uses `SELECT CREATE_EXTENSION('vector');` to enable the extension. 3. To use Postgres as a semantic memory store: + > See [Example 14](../../../samples/KernelSyntaxExamples/Example14_SemanticMemory.cs) and [Example 15](../../../samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs) for more memory usage examples with the kernel. ```csharp NpgsqlDataSourceBuilder dataSourceBuilder = new NpgsqlDataSourceBuilder("Host=localhost;Port=5432;Database=sk_demo;User Id=postgres;Password=mysecretpassword"); dataSourceBuilder.UseVector(); NpgsqlDataSource dataSource = dataSourceBuilder.Build(); -PostgresMemoryStore memoryStore = new PostgresMemoryStore(dataSource, vectorSize: 1536/*, schema: "public" */); - -IKernel kernel = new KernelBuilder() - .WithLogger(ConsoleLogger.Logger) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) - .WithMemoryStorage(memoryStore) - //.WithPostgresMemoryStore(dataSource, vectorSize: 1536, schema: "public") // This method offers an alternative approach to registering Postgres memory store. +var memoryWithPostgres = new MemoryBuilder() + .WithPostgresMemoryStore(dataSource, vectorSize: 1536/*, schema: "public" */) + .WithLoggerFactory(loggerFactory) + .WithOpenAITextEmbeddingGeneration("text-embedding-ada-002", apiKey) .Build(); + +var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(memoryWithPostgres)); ``` ### Create Index @@ -56,6 +57,7 @@ IKernel kernel = new KernelBuilder() > You can add an index to use approximate nearest neighbor search, which trades some recall for performance. Unlike typical indexes, you will see different results for queries after adding an approximate index. > Three keys to achieving good recall are: +> > - Create the index after the table has some data > - Choose an appropriate number of lists - a good place to start is rows / 1000 for up to 1M rows and sqrt(rows) for over 1M rows > - When querying, specify an appropriate number of probes (higher is better for recall, lower is better for speed) - a good place to start is sqrt(lists) @@ -87,6 +89,7 @@ END $$; ``` ## Migration from older versions + Since Postgres Memory connector has been re-implemented, the new implementation uses a separate table to store each Collection. We provide the following migration script to help you migrate to the new structure. However, please note that due to the use of collections as table names, you need to make sure that all Collections conform to the [Postgres naming convention](https://www.postgresql.org/docs/15/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS) before migrating. @@ -140,7 +143,7 @@ DECLARE BEGIN FOR r IN SELECT DISTINCT collection FROM sk_memory_table LOOP EXECUTE format('INSERT INTO public.%I (key, metadata, embedding, timestamp) - SELECT key, metadata::JSONB, embedding, to_timestamp(timestamp / 1000.0) AT TIME ZONE ''UTC'' + SELECT key, metadata::JSONB, embedding, to_timestamp(timestamp / 1000.0) AT TIME ZONE ''UTC'' FROM sk_memory_table WHERE collection = %L AND key <> '''';', r.collection, r.collection); END LOOP; END $$; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/AssemblyInfo.cs new file mode 100644 index 000000000000..1589a36f76f4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0026")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj index 361115c0f621..474916e5ac88 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.Qdrant + Microsoft.SemanticKernel.Connectors.Qdrant $(AssemblyName) netstandard2.0 + alpha @@ -22,7 +23,6 @@ - diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/IValidatable.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/IValidatable.cs deleted file mode 100644 index f871ac8705cd..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/IValidatable.cs +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; - -internal interface IValidatable -{ - void Validate(); -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/ValidateExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/ValidateExtensions.cs deleted file mode 100644 index ece58cd86d18..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/ValidateExtensions.cs +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; - -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; - -internal static class ValidateExtensions -{ - public static void Validate(IValidatable target) - { - target.Validate(); - } - - public static void Validate(params IValidatable[] targets) - { - foreach (var t in targets ?? Enumerable.Empty()) - { - Validate(t); - } - } - - public static void ValidateRequired(this IValidatable item, string arg) - { - Verify.NotNull(item, arg); - item.Validate(); - } - - public static void ValidateRequired(this object item, string arg) - { - if (item is IValidatable v) - { - v.ValidateRequired(arg); - } - else - { - Verify.NotNull(item, arg); - } - } - - public static void ValidateRequired(this string item, string arg) - { - Verify.NotNullOrEmpty(item, arg); - } - - public static void ValidateOptional(this IValidatable item, string arg) - { - if (item == null) - { - return; - } - - item.ValidateRequired(arg); - } - - [SuppressMessage("Design", "CA1031:Modify to catch a more specific allowed exception type, or rethrow exception", - Justification = "Does not throw an exception by design.")] - public static bool IsValid(this IValidatable target) - { - try - { - target.ValidateRequired("target"); - return true; - } - catch - { - } - - return false; - } - - public static void ValidateRequired(this IEnumerable list, string arg) - { - Verify.NotNull(list, nameof(list)); - foreach (T item in list) - { - item?.ValidateRequired(arg); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/Verify.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/Verify.cs deleted file mode 100644 index 7fb4003facc0..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Diagnostics/Verify.cs +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Runtime.CompilerServices; - -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; - -internal static class Verify -{ - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static void True(bool value, string message) - { - if (!value) - { - throw new ArgumentException(message); - } - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static void NotNull([NotNull] object? obj, string message) - { - if (obj != null) { return; } - - throw new ArgumentNullException(null, message); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static void NotNullOrEmpty([NotNull] string? str, string message) - { - NotNull(str, message); - if (!string.IsNullOrWhiteSpace(str)) { return; } - - throw new ArgumentOutOfRangeException(message); - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static void ArgNotNullOrEmpty([NotNull] string? str, string paramName, [CallerMemberName] string? caller = default) - { - NotNull(str, paramName); - if (!string.IsNullOrWhiteSpace(str)) { return; } - - throw new ArgumentException(paramName, $"Parameter {paramName} cannot be empty." + (!string.IsNullOrEmpty(caller) ? $"({caller})" : string.Empty)); - } - - internal static void NotNullOrEmpty(IList list, string message) - { - if (list == null || list.Count == 0) - { - throw new ArgumentOutOfRangeException(message); - } - } - - public static void IsValidUrl(string name, string url, bool requireHttps, bool allowReservedIp, bool allowQuery) - { - static bool IsReservedIpAddress(string host) - { - return host.StartsWith("0.", StringComparison.Ordinal) || - host.StartsWith("10.", StringComparison.Ordinal) || - host.StartsWith("127.", StringComparison.Ordinal) || - host.StartsWith("169.254.", StringComparison.Ordinal) || - host.StartsWith("192.0.0.", StringComparison.Ordinal) || - host.StartsWith("192.88.99.", StringComparison.Ordinal) || - host.StartsWith("192.168.", StringComparison.Ordinal) || - host.StartsWith("255.255.255.255", StringComparison.Ordinal); - } - - if (string.IsNullOrEmpty(url)) - { - throw new ArgumentException($"The {name} is empty", name); - } - - if (requireHttps && url.StartsWith("http://", StringComparison.OrdinalIgnoreCase)) - { - throw new ArgumentException($"The {name} `{url}` is not safe, it must start with https://", name); - } - - if (requireHttps && !url.StartsWith("https://", StringComparison.OrdinalIgnoreCase)) - { - throw new ArgumentException($"The {name} `{url}` is incomplete, enter a valid URL starting with 'https://", name); - } - - bool result = Uri.TryCreate(url, UriKind.Absolute, out var uri); - if (!result || string.IsNullOrEmpty(uri.Host)) - { - throw new ArgumentException($"The {name} `{url}` is not valid", name); - } - - if (requireHttps && uri.Scheme != Uri.UriSchemeHttps) - { - throw new ArgumentException($"The {name} `{url}` is not safe, it must start with https://", name); - } - - if (!allowReservedIp && (uri.IsLoopback || IsReservedIpAddress(uri.Host))) - { - throw new ArgumentException($"The {name} `{url}` is not safe, it cannot point to a reserved network address", name); - } - - if (!allowQuery && !string.IsNullOrEmpty(uri.Query)) - { - throw new ArgumentException($"The {name} `{url}` is not valid, it cannot contain query parameters", name); - } - - if (!string.IsNullOrEmpty(uri.Fragment)) - { - throw new ArgumentException($"The {name} `{url}` is not valid, it cannot contain URL fragments", name); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs index 22d39e7c1807..ae724f176af3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/CreateCollectionRequest.cs @@ -3,9 +3,8 @@ using System; using System.Net.Http; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal sealed class CreateCollectionRequest { @@ -33,7 +32,7 @@ public HttpRequestMessage Build() payload: this); } - internal sealed class VectorSettings : IValidatable + internal sealed class VectorSettings { [JsonPropertyName("size")] public int? Size { get; set; } @@ -47,15 +46,6 @@ public string? DistanceAsString [JsonIgnore] private QdrantDistanceType DistanceType { get; set; } - public void Validate() - { - Verify.True(this.Size > 0, "The vector size must be greater than zero"); - Verify.NotNull(this.DistanceType, "The distance type has not been defined"); - Verify.True( - this.DistanceType is QdrantDistanceType.Cosine or QdrantDistanceType.DotProduct or QdrantDistanceType.Euclidean or QdrantDistanceType.Manhattan, - $"Distance type {this.DistanceType:G} not supported."); - } - public VectorSettings(int vectorSize, QdrantDistanceType distanceType) { this.Size = vectorSize; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs index cd670231514d..0c8bb4a3366e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteCollectionRequest.cs @@ -1,11 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; -internal sealed class DeleteCollectionRequest : IValidatable +internal sealed class DeleteCollectionRequest { public static DeleteCollectionRequest Create(string collectionName) { @@ -14,12 +13,11 @@ public static DeleteCollectionRequest Create(string collectionName) public void Validate() { - Verify.NotNullOrEmpty(this._collectionName, "The collection name is empty"); } public HttpRequestMessage Build() { - this.Validate(); + Verify.NotNullOrWhiteSpace(this._collectionName, "collectionName"); return HttpRequest.CreateDeleteRequest($"collections/{this._collectionName}?timeout=30"); } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs index c8da48a49049..712db7750fa1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsRequest.cs @@ -3,11 +3,10 @@ using System.Collections.Generic; using System.Net.Http; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; -internal sealed class DeleteVectorsRequest : IValidatable +internal sealed class DeleteVectorsRequest { [JsonPropertyName("points")] public List Ids { get; set; } @@ -17,12 +16,6 @@ public static DeleteVectorsRequest DeleteFrom(string collectionName) return new DeleteVectorsRequest(collectionName); } - public void Validate() - { - Verify.NotNullOrEmpty(this._collectionName, "The collection name is empty"); - Verify.NotNullOrEmpty(this.Ids, "The list of vectors to delete is NULL or empty"); - } - public DeleteVectorsRequest DeleteVector(string qdrantPointId) { Verify.NotNull(qdrantPointId, "The point ID is NULL"); @@ -39,7 +32,9 @@ public DeleteVectorsRequest DeleteRange(IEnumerable qdrantPointIds) public HttpRequestMessage Build() { - this.Validate(); + Verify.NotNullOrWhiteSpace(this._collectionName, "collectionName"); + Verify.NotNullOrEmpty(this.Ids, "The list of vectors to delete is NULL or empty"); + return HttpRequest.CreatePostRequest( $"collections/{this._collectionName}/points/delete", payload: this); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsResponse.cs new file mode 100644 index 000000000000..8144aa458eaa --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/DeleteVectorsResponse.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.Qdrant; + +/// +/// Empty qdrant response for requests that return nothing but status / error. +/// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes. Justification: deserialized by QdrantVectorDbClient.DeleteVectorsByIdAsync & QdrantVectorDbClient.DeleteVectorByPayloadIdAsync +internal sealed class DeleteVectorsResponse : QdrantResponse +#pragma warning restore CA1812 // Avoid uninstantiated internal classes +{ +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs index a0f939b07afb..044b93e5c181 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetCollectionRequest.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal sealed class GetCollectionsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs index 6abb67ec42a5..9ed68b78f85c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsRequest.cs @@ -5,7 +5,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal sealed class GetVectorsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs index 49813b86e1f6..da23a88e1124 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/GetVectorsResponse.cs @@ -4,7 +4,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization internal sealed class GetVectorsResponse : QdrantResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs index 2464956fb763..d98c4fc84871 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal sealed class ListCollectionsRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs index 1296126661e9..34e28f1153e8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/ListCollectionsResponse.cs @@ -3,7 +3,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization internal sealed class ListCollectionsResponse : QdrantResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs index ac31332a0e1c..c3c4d1c94b0d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/NumberToStringConverter.cs @@ -5,7 +5,8 @@ using System.Text.Json; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; + #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization internal sealed class NumberToStringConverter : JsonConverter { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs index 0ede71413017..f3c1e4a45eb5 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/QdrantResponse.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// Base class for Qdrant response schema. diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs index 1800c3aca6c9..8fbe76352de9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsRequest.cs @@ -4,11 +4,10 @@ using System.Collections.Generic; using System.Net.Http; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; -internal sealed class SearchVectorsRequest : IValidatable +internal sealed class SearchVectorsRequest { [JsonPropertyName("vector")] public ReadOnlyMemory StartingVector { get; set; } @@ -104,25 +103,21 @@ public SearchVectorsRequest TakeFirst() return this.FromPosition(0).Take(1); } - public void Validate() + public HttpRequestMessage Build() { - Verify.NotNull(this.StartingVector, "Missing target, either provide a vector or a vector size"); - Verify.NotNullOrEmpty(this._collectionName, "The collection name is empty"); + Verify.NotNull(this.StartingVector); + Verify.NotNullOrWhiteSpace(this._collectionName); Verify.True(this.Limit > 0, "The number of vectors must be greater than zero"); this.Filters.Validate(); - } - public HttpRequestMessage Build() - { - this.Validate(); return HttpRequest.CreatePostRequest( $"collections/{this._collectionName}/points/search", payload: this); } - internal sealed class Filter : IValidatable + internal sealed class Filter { - internal sealed class Match : IValidatable + internal sealed class Match { [JsonPropertyName("value")] public object Value { get; set; } @@ -131,13 +126,9 @@ public Match() { this.Value = string.Empty; } - - public void Validate() - { - } } - internal sealed class Must : IValidatable + internal sealed class Must { [JsonPropertyName("key")] public string Key { get; set; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs index 4d2b7bd9ef30..19797b6a9613 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs @@ -5,7 +5,7 @@ using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization internal sealed class SearchVectorsResponse : QdrantResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs index b258caee498e..641a081af116 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorRequest.cs @@ -5,7 +5,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal sealed class UpsertVectorRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs index 47b66a26e899..5fcc38d52f4d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/UpsertVectorResponse.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; #pragma warning disable CA1812 // Avoid uninstantiated internal classes: Used for Json Deserialization internal sealed class UpsertVectorResponse : QdrantResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/SecureHttpHandler.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/SecureHttpHandler.cs index 0adef5cb1319..f5ec0cf02ee1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/SecureHttpHandler.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/SecureHttpHandler.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal static class HttpHandlers { diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs index 8bd6e89275e0..ae7500974e9f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/IQdrantVectorDbClient.cs @@ -5,7 +5,7 @@ using System.Threading; using System.Threading.Tasks; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// Interface for a Qdrant vector database client. diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs index f72160421be6..007ae102ed1d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantDistanceType.cs @@ -2,7 +2,7 @@ using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// The vector distance type used by Qdrant. diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs deleted file mode 100644 index 0ed45e671fbb..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantKernelBuilderExtensions.cs +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Net.Http; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure Qdrant memory connector. -/// -[Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use QdrantMemoryBuilderExtensions instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class QdrantKernelBuilderExtensions -{ - /// - /// Registers Qdrant memory connector. - /// - /// The instance. - /// The Qdrant Vector Database endpoint. - /// The size of the vectors. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use QdrantMemoryBuilderExtensions.WithQdrantMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithQdrantMemoryStore(this KernelBuilder builder, - string endpoint, - int vectorSize) - { - builder.WithMemoryStorage((loggerFactory, httpHandlerFactory) => - { - var client = new QdrantVectorDbClient( - HttpClientProvider.GetHttpClient(httpHandlerFactory, null, loggerFactory), - vectorSize, - endpoint, - loggerFactory); - - return new QdrantMemoryStore(client, loggerFactory); - }); - - return builder; - } - - /// - /// Registers Qdrant memory connector. - /// - /// The instance - /// The optional instance used for making HTTP requests. - /// The size of the vectors. - /// The Qdrant Vector Database endpoint. If not specified, the base address of the HTTP client is used. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use QdrantMemoryBuilderExtensions.WithQdrantMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithQdrantMemoryStore(this KernelBuilder builder, - HttpClient httpClient, - int vectorSize, - string? endpoint = null) - { - builder.WithMemoryStorage((loggerFactory, httpHandlerFactory) => - { - var client = new QdrantVectorDbClient( - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - vectorSize, - endpoint, - loggerFactory); - - return new QdrantMemoryStore(client, loggerFactory); - }); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs index 8f18fbe4f850..2712fcc72e58 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryBuilderExtensions.cs @@ -1,9 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// Provides extension methods for the class to configure Qdrant connector. @@ -22,10 +23,10 @@ public static MemoryBuilder WithQdrantMemoryStore( string endpoint, int vectorSize) { - builder.WithMemoryStore((loggerFactory, httpHandlerFactory) => + builder.WithMemoryStore((loggerFactory, injectedClient) => { var client = new QdrantVectorDbClient( - HttpClientProvider.GetHttpClient(httpHandlerFactory, null, loggerFactory), + HttpClientProvider.GetHttpClient(injectedClient), vectorSize, endpoint, loggerFactory); @@ -50,10 +51,10 @@ public static MemoryBuilder WithQdrantMemoryStore( int vectorSize, string? endpoint = null) { - builder.WithMemoryStore((loggerFactory, httpHandlerFactory) => + builder.WithMemoryStore((loggerFactory, injectedClient) => { var client = new QdrantVectorDbClient( - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), + HttpClientProvider.GetHttpClient(httpClient ?? injectedClient), vectorSize, endpoint, loggerFactory); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs index 99809f4e7759..738eba7dfc12 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantMemoryStore.cs @@ -9,10 +9,9 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// An implementation of for Qdrant Vector Database. @@ -36,7 +35,7 @@ public class QdrantMemoryStore : IMemoryStore public QdrantMemoryStore(string endpoint, int vectorSize, ILoggerFactory? loggerFactory = null) { this._qdrantClient = new QdrantVectorDbClient(endpoint, vectorSize, loggerFactory); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(QdrantMemoryStore)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(QdrantMemoryStore)) ?? NullLogger.Instance; } /// @@ -49,7 +48,7 @@ public QdrantMemoryStore(string endpoint, int vectorSize, ILoggerFactory? logger public QdrantMemoryStore(HttpClient httpClient, int vectorSize, string? endpoint = null, ILoggerFactory? loggerFactory = null) { this._qdrantClient = new QdrantVectorDbClient(httpClient, vectorSize, endpoint, loggerFactory); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(QdrantMemoryStore)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(QdrantMemoryStore)) ?? NullLogger.Instance; } /// @@ -60,7 +59,7 @@ public QdrantMemoryStore(HttpClient httpClient, int vectorSize, string? endpoint public QdrantMemoryStore(IQdrantVectorDbClient client, ILoggerFactory? loggerFactory = null) { this._qdrantClient = client; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(QdrantMemoryStore)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(QdrantMemoryStore)) ?? NullLogger.Instance; } /// @@ -100,7 +99,7 @@ public async Task UpsertAsync(string collectionName, MemoryRecord record if (vectorData == null) { - throw new SKException("Failed to convert memory record to Qdrant vector record"); + throw new KernelException("Failed to convert memory record to Qdrant vector record"); } try @@ -186,7 +185,7 @@ public async IAsyncEnumerable GetBatchAsync(string collectionName, /// If true, the embedding will be returned in the memory record. /// The to monitor for cancellation requests. The default is . /// Memory record - /// + /// public async Task GetWithPointIdAsync(string collectionName, string pointId, bool withEmbedding = false, CancellationToken cancellationToken = default) { @@ -262,7 +261,7 @@ public async Task RemoveBatchAsync(string collectionName, IEnumerable ke /// The name associated with a collection of embeddings. /// The unique indexed ID associated with the Qdrant vector record to remove. /// The to monitor for cancellation requests. The default is . - /// + /// public async Task RemoveWithPointIdAsync(string collectionName, string pointId, CancellationToken cancellationToken = default) { try @@ -282,7 +281,7 @@ public async Task RemoveWithPointIdAsync(string collectionName, string pointId, /// The name associated with a collection of embeddings. /// The unique indexed IDs associated with the Qdrant vector records to remove. /// The to monitor for cancellation requests. The default is . - /// + /// public async Task RemoveWithPointIdBatchAsync(string collectionName, IEnumerable pointIds, CancellationToken cancellationToken = default) { try @@ -419,7 +418,7 @@ private async Task ConvertFromMemoryRecordAsync( if (vectorData == null) { - throw new SKException("Failed to convert memory record to Qdrant vector record"); + throw new KernelException("Failed to convert memory record to Qdrant vector record"); } return vectorData; diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs index 7803aad8b16b..23906615a360 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorDbClient.cs @@ -11,11 +11,9 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Http.ApiSchema; -using Microsoft.SemanticKernel.Diagnostics; -using Verify = Microsoft.SemanticKernel.Connectors.Memory.Qdrant.Diagnostics.Verify; +using Microsoft.SemanticKernel.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// An implementation of a client for the Qdrant Vector Database. This class is used to @@ -37,9 +35,9 @@ public QdrantVectorDbClient( ILoggerFactory? loggerFactory = null) { this._vectorSize = vectorSize; - this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); + this._httpClient = HttpClientProvider.GetHttpClient(); this._httpClient.BaseAddress = SanitizeEndpoint(endpoint); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(QdrantVectorDbClient)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(QdrantVectorDbClient)) ?? NullLogger.Instance; } /// @@ -57,13 +55,13 @@ public QdrantVectorDbClient( { if (string.IsNullOrEmpty(httpClient.BaseAddress?.AbsoluteUri) && string.IsNullOrEmpty(endpoint)) { - throw new SKException("The HttpClient BaseAddress and endpoint are both null or empty. Please ensure at least one is provided."); + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); } this._httpClient = httpClient; this._vectorSize = vectorSize; this._endpointOverride = string.IsNullOrEmpty(endpoint) ? null : SanitizeEndpoint(endpoint!); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(QdrantVectorDbClient)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(QdrantVectorDbClient)) ?? NullLogger.Instance; } /// @@ -176,8 +174,8 @@ public async Task DeleteVectorsByIdAsync(string collectionName, IEnumerable(responseContent); + var result = JsonSerializer.Deserialize(responseContent); if (result?.Status == "ok") { this._logger.LogDebug("Vector being deleted"); @@ -236,7 +234,7 @@ public async Task DeleteVectorByPayloadIdAsync(string collectionName, string met throw; } - var result = JsonSerializer.Deserialize(responseContent); + var result = JsonSerializer.Deserialize(responseContent); if (result?.Status == "ok") { this._logger.LogDebug("Vector being deleted"); @@ -251,8 +249,8 @@ public async Task DeleteVectorByPayloadIdAsync(string collectionName, string met public async Task UpsertVectorsAsync(string collectionName, IEnumerable vectorData, CancellationToken cancellationToken = default) { this._logger.LogDebug("Upserting vectors"); - Verify.NotNull(vectorData, "The vector data entries are NULL"); - Verify.NotNullOrEmpty(collectionName, "Collection name is empty"); + Verify.NotNull(vectorData); + Verify.NotNullOrWhiteSpace(collectionName); using var request = UpsertVectorRequest.Create(collectionName) .UpsertRange(vectorData) @@ -465,7 +463,7 @@ public async IAsyncEnumerable ListCollectionsAsync([EnumeratorCancellati private static Uri SanitizeEndpoint(string endpoint, int? port = null) { - Verify.IsValidUrl(nameof(endpoint), endpoint, false, true, false); + Verify.ValidateUrl(endpoint); UriBuilder builder = new(endpoint); if (port.HasValue) { builder.Port = port.Value; } @@ -480,7 +478,7 @@ private static Uri SanitizeEndpoint(string endpoint, int? port = null) //Apply endpoint override if it's specified. if (this._endpointOverride != null) { - request.RequestUri = new Uri(this._endpointOverride, request.RequestUri); + request.RequestUri = new Uri(this._endpointOverride, request.RequestUri!); } HttpResponseMessage response = await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs index 5dd1ad3a43eb..ea3affd94693 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs @@ -4,10 +4,9 @@ using System.Collections.Generic; using System.Text.Json; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +namespace Microsoft.SemanticKernel.Connectors.Qdrant; /// /// A record structure used by Qdrant that contains an embedding and metadata. @@ -71,7 +70,7 @@ public string GetSerializedPayload() /// /// /// Vector record - /// Qdrant exception + /// Qdrant exception public static QdrantVectorRecord FromJsonMetadata(string pointId, ReadOnlyMemory embedding, string json, List? tags = null) { var payload = JsonSerializer.Deserialize>(json); @@ -80,6 +79,6 @@ public static QdrantVectorRecord FromJsonMetadata(string pointId, ReadOnlyMemory return new QdrantVectorRecord(pointId, embedding, payload, tags); } - throw new SKException("Unable to deserialize record payload"); + throw new KernelException("Unable to deserialize record payload"); } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/AssemblyInfo.cs new file mode 100644 index 000000000000..363ae3657d56 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0027")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj index 554afa9e0538..9faa763e46aa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.Redis + Microsoft.SemanticKernel.Connectors.Redis $(AssemblyName) netstandard2.0 + alpha @@ -20,9 +21,6 @@ - - - diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/README.md b/dotnet/src/Connectors/Connectors.Memory.Redis/README.md index 77ff2e855913..f2f735daee5f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/README.md +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/README.md @@ -1,4 +1,4 @@ -# Microsoft.SemanticKernel.Connectors.Memory.Redis +# Microsoft.SemanticKernel.Connectors.Redis This connector uses Redis to implement Semantic Memory. It requires the [RediSearch](https://redis.io/docs/interact/search-and-query/) module to be enabled on Redis to implement vector similarity search. @@ -7,6 +7,7 @@ This connector uses Redis to implement Semantic Memory. It requires the [RediSea [RediSearch](https://redis.io/docs/interact/search-and-query/) is a source-available Redis module that enables querying, secondary indexing, and full-text search for Redis. These features enable multi-field queries, aggregation, exact phrase matching, numeric filtering, geo filtering and vector similarity semantic search on top of text queries. Ways to get RediSearch: + 1. You can create an [Azure Cache for Redis Enterpise instance](https://learn.microsoft.com/azure/azure-cache-for-redis/quickstart-create-redis-enterprise) and [enable RediSearch module](https://learn.microsoft.com/azure/azure-cache-for-redis/cache-redis-modules). 1. Set up the RediSearch on your self-managed Redis, please refer to its [documentation](https://redis.io/docs/interact/search-and-query/quickstart/). @@ -22,6 +23,7 @@ docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:la ``` 2. To use Redis as a semantic memory store: + > See [Example 14](../../../samples/KernelSyntaxExamples/Example14_SemanticMemory.cs) and [Example 15](../../../samples/KernelSyntaxExamples/Example15_TextMemoryPlugin.cs) for more memory usage examples with the kernel. ```csharp // ConnectionMultiplexer should be a singleton instance in your application, please consider to dispose of it when your application shuts down. @@ -30,9 +32,9 @@ ConnectionMultiplexer connectionMultiplexer = await ConnectionMultiplexer.Connec IDatabase database = connectionMultiplexer.GetDatabase(); RedisMemoryStore memoryStore = new RedisMemoryStore(database, vectorSize: 1536); -IKernel kernel = new KernelBuilder() - .WithLogger(ConsoleLogger.Logger) - .WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", Env.Var("OPENAI_API_KEY")) - .WithMemoryStorage(memoryStore) - .Build(); +var embeddingGenerator = new OpenAITextEmbeddingGenerationService("text-embedding-ada-002", apiKey); + +SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator); + +var memoryPlugin = kernel.ImportPluginFromObject(new TextMemoryPlugin(textMemory)); ``` diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs index b275660260c5..33d2188df310 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisMemoryStore.cs @@ -7,7 +7,6 @@ using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Memory; using NRedisStack; using NRedisStack.RedisStackCommands; @@ -16,7 +15,7 @@ using StackExchange.Redis; using static NRedisStack.Search.Schema.VectorField; -namespace Microsoft.SemanticKernel.Connectors.Memory.Redis; +namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// An implementation of for Redis. @@ -364,7 +363,7 @@ private double GetSimilarity(Document document) if (vectorScoreValue.IsNullOrEmpty || !vectorScoreValue.TryParse(out double vectorScore)) { - throw new SKException("Invalid or missing vector score value."); + throw new KernelException("Invalid or missing vector score value."); } return 1 - vectorScore; diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs index 2d5ff71c900c..b211a050393b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorDistanceMetric.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Connectors.Memory.Redis; +namespace Microsoft.SemanticKernel.Connectors.Redis; /// /// Supported distance metrics are {L2, IP, COSINE}. The default value is "COSINE". diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/AssemblyInfo.cs new file mode 100644 index 000000000000..130c3cb6fdfc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0028")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj index 8625483cf0ab..5d1db02079fa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Connectors.Memory.Sqlite.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.Sqlite + Microsoft.SemanticKernel.Connectors.Sqlite $(AssemblyName) netstandard2.0 + alpha diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs index 210f75af659b..84e844800e84 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs @@ -7,7 +7,7 @@ using System.Threading.Tasks; using Microsoft.Data.Sqlite; -namespace Microsoft.SemanticKernel.Connectors.Memory.Sqlite; +namespace Microsoft.SemanticKernel.Connectors.Sqlite; internal struct DatabaseEntry { diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteExtensions.cs index ea6e590ea96e..70c4c0e8d6e8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteExtensions.cs @@ -2,7 +2,7 @@ using Microsoft.Data.Sqlite; -namespace Microsoft.SemanticKernel.Connectors.Memory.Sqlite; +namespace Microsoft.SemanticKernel.Connectors.Sqlite; internal static class SqliteExtensions { diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs index d15c47bf5ab7..ae88f2b2e9e1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs @@ -13,7 +13,7 @@ using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Sqlite; +namespace Microsoft.SemanticKernel.Connectors.Sqlite; /// /// An implementation of backed by a SQLite database. @@ -234,7 +234,7 @@ private async IAsyncEnumerable GetAllAsync(string collectionName, await foreach (DatabaseEntry dbEntry in this._dbConnector.ReadAllAsync(this._dbConnection, collectionName, cancellationToken)) { - ReadOnlyMemory vector = JsonSerializer.Deserialize>(dbEntry.EmbeddingString, s_jsonSerializerOptions); + ReadOnlyMemory vector = JsonSerializer.Deserialize>(dbEntry.EmbeddingString, JsonOptionsCache.Default); var record = MemoryRecord.FromJsonMetadata(dbEntry.MetadataString, vector, dbEntry.Key, ParseTimestamp(dbEntry.Timestamp)); @@ -252,7 +252,7 @@ await this._dbConnector.UpdateAsync( collection: collectionName, key: record.Key, metadata: record.GetSerializedMetadata(), - embedding: JsonSerializer.Serialize(record.Embedding, s_jsonSerializerOptions), + embedding: JsonSerializer.Serialize(record.Embedding, JsonOptionsCache.Default), timestamp: ToTimestampString(record.Timestamp), cancellationToken: cancellationToken).ConfigureAwait(false); @@ -262,7 +262,7 @@ await this._dbConnector.InsertOrIgnoreAsync( collection: collectionName, key: record.Key, metadata: record.GetSerializedMetadata(), - embedding: JsonSerializer.Serialize(record.Embedding, s_jsonSerializerOptions), + embedding: JsonSerializer.Serialize(record.Embedding, JsonOptionsCache.Default), timestamp: ToTimestampString(record.Timestamp), cancellationToken: cancellationToken).ConfigureAwait(false); @@ -283,7 +283,7 @@ await this._dbConnector.InsertOrIgnoreAsync( { return MemoryRecord.FromJsonMetadata( json: entry.Value.MetadataString, - JsonSerializer.Deserialize>(entry.Value.EmbeddingString, s_jsonSerializerOptions), + JsonSerializer.Deserialize>(entry.Value.EmbeddingString, JsonOptionsCache.Default), entry.Value.Key, ParseTimestamp(entry.Value.Timestamp)); } @@ -295,14 +295,5 @@ await this._dbConnector.InsertOrIgnoreAsync( ParseTimestamp(entry.Value.Timestamp)); } - private static readonly JsonSerializerOptions s_jsonSerializerOptions = CreateSerializerOptions(); - - private static JsonSerializerOptions CreateSerializerOptions() - { - var jso = new JsonSerializerOptions(); - jso.Converters.Add(new ReadOnlyMemoryConverter()); - return jso; - } - #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/AssemblyInfo.cs new file mode 100644 index 000000000000..03bce96ee563 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0029")] diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj index b142a7b5ba2c..ba985c11f536 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Connectors.Memory.Weaviate + Microsoft.SemanticKernel.Connectors.Weaviate $(AssemblyName) netstandard2.0 + alpha @@ -22,7 +23,6 @@ - diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs index 0d2c45943bdb..ce2f4d9f4aa3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchRequest.cs @@ -2,10 +2,9 @@ using System.Collections.Generic; using System.Net.Http; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class BatchRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs index ff99d058f8ca..1dd522095052 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/BatchResponse.cs @@ -2,10 +2,8 @@ using System; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.JsonConverter; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; // ReSharper disable once ClassNeverInstantiated.Global #pragma warning disable CA1812 // 'BatchResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs index 4d6926a0b04b..8513099f7b15 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaRequest.cs @@ -1,9 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class CreateClassSchemaRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs index 9383d5c8047d..7d3148ce8d1f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateClassSchemaResponse.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'CreateClassSchemaResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class CreateClassSchemaResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs index d937eb78019d..75c6f2224d14 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/CreateGraphRequest.cs @@ -4,7 +4,7 @@ using System.Net.Http; using System.Runtime.InteropServices; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; // ReSharper disable once ClassCannotBeInstantiated internal sealed class CreateGraphRequest diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs index fb63456e4b28..2578860a6062 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteObjectRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class DeleteObjectRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs index 39f92a7116ce..720fbb8ab67d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/DeleteSchemaRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class DeleteSchemaRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs index d32d27bc549b..ed1335f00de3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassRequest.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class GetClassRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs index fe5a527b8a6b..569558c5916a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetClassResponse.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GetClassResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class GetClassResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs index 6ef723a3c3b8..64f7924209e3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetObjectRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class GetObjectRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs index 4afe69a9351d..1f4abf4d80f4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaRequest.cs @@ -2,7 +2,7 @@ using System.Net.Http; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class GetSchemaRequest { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs index eef0bf1b5c02..2653cb45262d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GetSchemaResponse.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GetSchemaResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class GetSchemaResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs index 679d9b77bb97..4a09d9ce47ae 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/GraphResponse.cs @@ -1,9 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System.Text.Json.Nodes; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GraphResponse' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class GraphResponse diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs index 231b88455c6f..608715c4a6fb 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/ApiSchema/ObjectResponseResult.cs @@ -2,7 +2,7 @@ using System.Text.Json.Nodes; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; // ReSharper disable once ClassNeverInstantiated.Global #pragma warning disable CA1812 // 'ObjectResponseResult' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs index 60a51cf482e1..21b5a4c43cd1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs @@ -6,11 +6,16 @@ using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal static class HttpRequest { - private static readonly JsonSerializerOptions s_jsonSerializerOptions = CreateSerializerOptions(); + private static readonly JsonSerializerOptions s_jsonOptionsCache = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { JsonOptionsCache.ReadOnlyMemoryConverter }, + }; public static HttpRequestMessage CreateGetRequest(string url, object? payload = null) { @@ -40,18 +45,7 @@ public static HttpRequestMessage CreateDeleteRequest(string url) return null; } - string strPayload = payload as string ?? JsonSerializer.Serialize(payload, s_jsonSerializerOptions); + string strPayload = payload as string ?? JsonSerializer.Serialize(payload, s_jsonOptionsCache); return new(strPayload, Encoding.UTF8, "application/json"); } - - private static JsonSerializerOptions CreateSerializerOptions() - { - var jso = new JsonSerializerOptions() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - jso.Converters.Add(new ReadOnlyMemoryConverter()); - return jso; - } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs index caa81bb96779..acf672b251a8 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/JsonConverter/UnixSecondsDateTimeJsonConverter.cs @@ -4,7 +4,7 @@ using System.Text.Json; using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.JsonConverter; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'UnixSecondsDateTimeJsonConverter' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class UnixSecondsDateTimeJsonConverter : JsonConverter diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs index 9b6938632ba6..95c88fa9929e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Deprecation.cs @@ -2,7 +2,7 @@ using System; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'Deprecation' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class Deprecation diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs index f0826015bc0c..88bc4077442d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphError.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GraphError' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class GraphError diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs index 29401d725a79..1931dc46bd2b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/GraphErrorLocationsItems.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; #pragma warning disable CA1812 // 'GraphErrorLocationsItems' is an internal class that is apparently never instantiated. If so, remove the code from the assembly. If this class is intended to contain only static members, make it 'static' (Module in Visual Basic). internal sealed class GraphErrorLocationsItems diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs index 94c223691914..67f8e7f0f3c9 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/Property.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal sealed class Property { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs index f3c60fb3a461..34de5449651b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Model/WeaviateObject.cs @@ -3,7 +3,7 @@ using System; using System.Collections.Generic; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; internal class WeaviateObject { diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs deleted file mode 100644 index e11047eb2288..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Net.Http; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class to configure Weaviate memory connector. -/// -[Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use WeaviateMemoryBuilderExtensions instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class WeaviateKernelBuilderExtensions -{ - /// - /// Registers Weaviate memory connector. - /// - /// The instance. - /// The Weaviate server endpoint URL. - /// The API key for accessing Weaviate server. - /// The API version to use. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use WeaviateMemoryBuilderExtensions.WithWeaviateMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithWeaviateMemoryStore( - this KernelBuilder builder, - string endpoint, - string? apiKey, - string? apiVersion = null) - { - builder.WithMemoryStorage((loggerFactory, httpHandlerFactory) => - { - return new WeaviateMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, null, loggerFactory), - apiKey, - endpoint, - apiVersion, - loggerFactory); - }); - - return builder; - } - - /// - /// Registers Weaviate memory connector. - /// - /// The instance - /// The optional instance used for making HTTP requests. - /// The Weaviate server endpoint URL. If not specified, the base address of the HTTP client is used. - /// The API key for accessing Weaviate server. - /// The API version to use. - /// Self instance - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. Use WeaviateMemoryBuilderExtensions.WithWeaviateMemoryStore instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static KernelBuilder WithWeaviateMemoryStore(this KernelBuilder builder, - HttpClient httpClient, - string? endpoint = null, - string? apiKey = null, - string? apiVersion = null) - { - builder.WithMemoryStorage((loggerFactory, httpHandlerFactory) => - { - return new WeaviateMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), - apiKey, - endpoint, - apiVersion, - loggerFactory); - }); - - return builder; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs index b1dbd2686707..fc437c6688f7 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryBuilderExtensions.cs @@ -1,9 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net.Http; -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// Provides extension methods for the class to configure Weaviate connector. @@ -24,10 +25,10 @@ public static MemoryBuilder WithWeaviateMemoryStore( string? apiKey, string? apiVersion = null) { - builder.WithMemoryStore((loggerFactory, httpHandlerFactory) => + builder.WithMemoryStore((loggerFactory, injectedClient) => { return new WeaviateMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, null, loggerFactory), + HttpClientProvider.GetHttpClient(injectedClient), apiKey, endpoint, apiVersion, @@ -53,10 +54,10 @@ public static MemoryBuilder WithWeaviateMemoryStore( string? apiKey = null, string? apiVersion = null) { - builder.WithMemoryStore((loggerFactory, httpHandlerFactory) => + builder.WithMemoryStore((loggerFactory, injectedClient) => { return new WeaviateMemoryStore( - HttpClientProvider.GetHttpClient(httpHandlerFactory, httpClient, loggerFactory), + HttpClientProvider.GetHttpClient(httpClient ?? injectedClient), apiKey, endpoint, apiVersion, diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs index 821200b61968..4e76651a5f29 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs @@ -15,13 +15,11 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Http.ApiSchema; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate.Model; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Http; using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Text; -namespace Microsoft.SemanticKernel.Connectors.Memory.Weaviate; +namespace Microsoft.SemanticKernel.Connectors.Weaviate; /// /// An implementation of for Weaviate. @@ -45,11 +43,11 @@ public class WeaviateMemoryStore : IMemoryStore private const string DefaultApiVersion = "v1"; - private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + private static readonly JsonSerializerOptions s_jsonOptionsCache = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - Converters = { new ReadOnlyMemoryConverter() } + Converters = { JsonOptionsCache.ReadOnlyMemoryConverter } }; private readonly HttpClient _httpClient; @@ -57,6 +55,7 @@ public class WeaviateMemoryStore : IMemoryStore private readonly Uri? _endpoint = null; private readonly string? _apiVersion; private readonly string? _apiKey; + private static readonly string[] s_stringArray = { "vector" }; /// /// Initializes a new instance of the class. @@ -76,8 +75,8 @@ public WeaviateMemoryStore( this._endpoint = new Uri(endpoint); this._apiKey = apiKey; this._apiVersion = apiVersion; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(WeaviateMemoryStore)) : NullLogger.Instance; - this._httpClient = new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false); + this._logger = loggerFactory?.CreateLogger(typeof(WeaviateMemoryStore)) ?? NullLogger.Instance; + this._httpClient = HttpClientProvider.GetHttpClient(); } /// @@ -99,13 +98,13 @@ public WeaviateMemoryStore( if (string.IsNullOrEmpty(httpClient.BaseAddress?.AbsoluteUri) && string.IsNullOrEmpty(endpoint)) { - throw new SKException("The HttpClient BaseAddress and endpoint are both null or empty. Please ensure at least one is provided."); + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); } this._apiKey = apiKey; this._apiVersion = apiVersion; this._endpoint = string.IsNullOrEmpty(endpoint) ? null : new Uri(endpoint); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(WeaviateMemoryStore)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(WeaviateMemoryStore)) ?? NullLogger.Instance; this._httpClient = httpClient; } @@ -125,11 +124,11 @@ public async Task CreateCollectionAsync(string collectionName, CancellationToken { (HttpResponseMessage response, string responseContent) = await this.ExecuteHttpRequestAsync(request, cancellationToken).ConfigureAwait(false); - CreateClassSchemaResponse? result = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + CreateClassSchemaResponse? result = JsonSerializer.Deserialize(responseContent, s_jsonOptionsCache); if (result == null || result.Description != description) { - throw new SKException($"Name conflict for collection: {collectionName} with class name: {className}"); + throw new KernelException($"Name conflict for collection: {collectionName} with class name: {className}"); } this._logger.LogDebug("Created collection: {0}, with class name: {1}", collectionName, className); @@ -156,7 +155,7 @@ public async Task DoesCollectionExistAsync(string collectionName, Cancella { (_, string responseContent) = await this.ExecuteHttpRequestAsync(request, cancellationToken).ConfigureAwait(false); - GetClassResponse? existing = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + GetClassResponse? existing = JsonSerializer.Deserialize(responseContent, s_jsonOptionsCache); if (existing != null && existing.Description != ToWeaviateFriendlyClassDescription(collectionName)) { @@ -165,7 +164,7 @@ public async Task DoesCollectionExistAsync(string collectionName, Cancella // For example a collectionName of '__this_collection' and 'this_collection' are // both transformed to the class name of thiscollection - even though the external // system could consider them as unique collection names. - throw new SKException($"Unable to verify existing collection: {collectionName} with class name: {className}"); + throw new KernelException($"Unable to verify existing collection: {collectionName} with class name: {className}"); } return true; @@ -201,10 +200,10 @@ public async IAsyncEnumerable GetCollectionsAsync([EnumeratorCancellatio throw; } - GetSchemaResponse? getSchemaResponse = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + GetSchemaResponse? getSchemaResponse = JsonSerializer.Deserialize(responseContent, s_jsonOptionsCache); if (getSchemaResponse == null) { - throw new SKException("Unable to deserialize list collections response"); + throw new KernelException("Unable to deserialize list collections response"); } foreach (GetClassResponse? @class in getSchemaResponse.Classes!) @@ -275,11 +274,11 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE throw; } - BatchResponse[]? result = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + BatchResponse[]? result = JsonSerializer.Deserialize(responseContent, s_jsonOptionsCache); if (result == null) { - throw new SKException("Unable to deserialize batch response"); + throw new KernelException("Unable to deserialize batch response"); } foreach (BatchResponse batchResponse in result) @@ -297,7 +296,7 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE using HttpRequestMessage request = new GetObjectRequest { Id = key, - Additional = withEmbedding ? new[] { "vector" } : null + Additional = withEmbedding ? s_stringArray : null }.Build(); string responseContent; @@ -312,7 +311,7 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE return null; } - WeaviateObject? weaviateObject = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + WeaviateObject? weaviateObject = JsonSerializer.Deserialize(responseContent, s_jsonOptionsCache); if (weaviateObject == null) { this._logger.LogError("Unable to deserialize response to WeaviateObject"); @@ -321,12 +320,12 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE DateTimeOffset? timestamp = weaviateObject.Properties == null ? null - : weaviateObject.Properties.TryGetValue("sk_timestamp", out object value) + : weaviateObject.Properties.TryGetValue("sk_timestamp", out object? value) ? Convert.ToDateTime(value.ToString(), CultureInfo.InvariantCulture) : null; MemoryRecord record = new( - key: weaviateObject.Id!, + key: weaviateObject.Id, timestamp: timestamp, embedding: weaviateObject.Vector, metadata: ToMetadata(weaviateObject)); @@ -420,7 +419,7 @@ public async Task RemoveBatchAsync(string collectionName, IEnumerable ke { (_, string responseContent) = await this.ExecuteHttpRequestAsync(request, cancellationToken).ConfigureAwait(false); - GraphResponse? data = JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + GraphResponse? data = JsonSerializer.Deserialize(responseContent, s_jsonOptionsCache); if (data == null) { @@ -502,7 +501,7 @@ private static MemoryRecord DeserializeToMemoryRecord(JsonNode? json) // Get a class description, useful for checking name collisions private static string ToWeaviateFriendlyClassDescription(string collectionName) { - return $"{"Semantic Kernel memory store for collection:"} {collectionName}"; + return $"Semantic Kernel memory store for collection: {collectionName}"; } // Convert a collectionName to a valid Weaviate class name @@ -528,7 +527,7 @@ private static string ToWeaviateFriendlyClassName(string collectionName) var apiVersion = !string.IsNullOrWhiteSpace(this._apiVersion) ? this._apiVersion : DefaultApiVersion; var baseAddress = this._endpoint ?? this._httpClient.BaseAddress; - request.RequestUri = new Uri(baseAddress, $"{apiVersion}/{request.RequestUri}"); + request.RequestUri = new Uri(baseAddress!, $"{apiVersion}/{request.RequestUri}"); if (!string.IsNullOrEmpty(this._apiKey)) { @@ -554,20 +553,15 @@ private static string ToWeaviateFriendlyClassName(string collectionName) private static MemoryRecordMetadata ToMetadata(WeaviateObject weaviateObject) { - if (weaviateObject.Properties == null) - { -#pragma warning disable CA2208 - throw new ArgumentNullException(nameof(weaviateObject.Properties)); -#pragma warning restore CA2208 - } + Verify.NotNull(weaviateObject.Properties, "weaviateObject.Properties"); return new( false, string.Empty, - weaviateObject.Properties["sk_id"].ToString(), - weaviateObject.Properties["sk_description"].ToString(), - weaviateObject.Properties["sk_text"].ToString(), - weaviateObject.Properties["sk_additional_metadata"].ToString() + weaviateObject.Properties["sk_id"].ToString() ?? string.Empty, + weaviateObject.Properties["sk_description"].ToString() ?? string.Empty, + weaviateObject.Properties["sk_text"].ToString() ?? string.Empty, + weaviateObject.Properties["sk_additional_metadata"].ToString() ?? string.Empty ); } } diff --git a/dotnet/src/Connectors/Connectors.AI.Oobabooga/README.md b/dotnet/src/Connectors/Connectors.Oobabooga/README.md similarity index 100% rename from dotnet/src/Connectors/Connectors.AI.Oobabooga/README.md rename to dotnet/src/Connectors/Connectors.Oobabooga/README.md diff --git a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs similarity index 91% rename from dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs rename to dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs index a8aa6ca7f38f..33f155b9eeec 100644 --- a/dotnet/src/Connectors/Connectors.AI.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs @@ -3,7 +3,7 @@ using Azure.Core; using Azure.Core.Pipeline; -namespace Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; +namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// /// Helper class to inject headers into Azure SDK HTTP pipeline diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs new file mode 100644 index 000000000000..91550505182f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs @@ -0,0 +1,100 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using Azure; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Core implementation for Azure OpenAI clients, providing common functionality and properties. +/// +internal sealed class AzureOpenAIClientCore : ClientCore +{ + /// + /// Gets the key used to store the deployment name in the dictionary. + /// + public static string DeploymentNameKey => "DeploymentName"; + + /// + /// OpenAI / Azure OpenAI Client + /// + internal override OpenAIClient Client { get; } + + /// + /// Initializes a new instance of the class using API Key authentication. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal AzureOpenAIClientCore( + string deploymentName, + string endpoint, + string apiKey, + HttpClient? httpClient = null, + ILogger? logger = null) : base(logger) + { + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); + Verify.NotNullOrWhiteSpace(apiKey); + + var options = GetOpenAIClientOptions(httpClient); + + this.DeploymentOrModelName = deploymentName; + this.Client = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), options); + } + + /// + /// Initializes a new instance of the class supporting AAD authentication. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal AzureOpenAIClientCore( + string deploymentName, + string endpoint, + TokenCredential credential, + HttpClient? httpClient = null, + ILogger? logger = null) : base(logger) + { + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); + + var options = GetOpenAIClientOptions(httpClient); + + this.DeploymentOrModelName = deploymentName; + this.Client = new OpenAIClient(new Uri(endpoint), credential, options); + } + + /// + /// Initializes a new instance of the class using the specified OpenAIClient. + /// Note: instances created this way might not have the default diagnostics settings, + /// it's up to the caller to configure the client. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// The to use for logging. If null, no logging will be performed. + internal AzureOpenAIClientCore( + string deploymentName, + OpenAIClient openAIClient, + ILogger? logger = null) : base(logger) + { + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNull(openAIClient); + + this.DeploymentOrModelName = deploymentName; + this.Client = openAIClient; + + this.AddAttribute(DeploymentNameKey, deploymentName); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs new file mode 100644 index 000000000000..04b0e7a2c00a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI specialized with data chat message content +/// +[Experimental("SKEXP0010")] +public sealed class AzureOpenAIWithDataChatMessageContent : ChatMessageContent +{ + /// + /// Content from data source, including citations. + /// For more information see . + /// + public string? ToolContent { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// Azure Chat With Data Choice + /// The model ID used to generate the content + /// Additional metadata + internal AzureOpenAIWithDataChatMessageContent(ChatWithDataChoice chatChoice, string? modelId, IReadOnlyDictionary? metadata = null) + : base(default, string.Empty, modelId, chatChoice, System.Text.Encoding.UTF8, CreateMetadataDictionary(metadata)) + { + // An assistant message content must be present, otherwise the chat is not valid. + var chatMessage = chatChoice.Messages.FirstOrDefault(m => string.Equals(m.Role, AuthorRole.Assistant.Label, StringComparison.OrdinalIgnoreCase)) ?? + throw new ArgumentException("Chat is not valid. Chat message does not contain any messages with 'assistant' role."); + + this.Content = chatMessage.Content; + this.Role = new AuthorRole(chatMessage.Role); + + this.ToolContent = chatChoice.Messages.FirstOrDefault(message => message.Role.Equals(AuthorRole.Tool.Label, StringComparison.OrdinalIgnoreCase))?.Content; + ((Dictionary)this.Metadata!).Add(nameof(this.ToolContent), this.ToolContent); + } + + private static Dictionary CreateMetadataDictionary(IReadOnlyDictionary? metadata) + { + Dictionary newDictionary; + if (metadata is null) + { + // There's no existing metadata to clone; just allocate a new dictionary. + newDictionary = new Dictionary(1); + } + else if (metadata is IDictionary origMutable) + { + // Efficiently clone the old dictionary to a new one. + newDictionary = new Dictionary(origMutable); + } + else + { + // There's metadata to clone but we have to do so one item at a time. + newDictionary = new Dictionary(metadata.Count + 1); + foreach (var kvp in metadata) + { + newDictionary[kvp.Key] = kvp.Value; + } + } + + return newDictionary; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs new file mode 100644 index 000000000000..457c2f0babf3 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure Open AI WithData Specialized streaming chat message content. +/// +/// +/// Represents a chat message content chunk that was streamed from the remote model. +/// +[Experimental("SKEXP0010")] +public sealed class AzureOpenAIWithDataStreamingChatMessageContent : StreamingChatMessageContent +{ + /// + public string? FunctionName { get; set; } + + /// + public string? FunctionArgument { get; set; } + + /// + /// Create a new instance of the class. + /// + /// Azure message update representation from WithData apis + /// Index of the choice + /// The model ID used to generate the content + /// Additional metadata + internal AzureOpenAIWithDataStreamingChatMessageContent(ChatWithDataStreamingChoice choice, int choiceIndex, string modelId, IReadOnlyDictionary? metadata = null) : + base(AuthorRole.Assistant, null, choice, choiceIndex, modelId, Encoding.UTF8, metadata) + { + var message = choice.Messages.FirstOrDefault(this.IsValidMessage); + var messageContent = message?.Delta?.Content; + + this.Content = messageContent; + } + + private bool IsValidMessage(ChatWithDataStreamingMessage message) + { + return !message.EndTurn && + (message.Delta.Role is null || !message.Delta.Role.Equals(AuthorRole.Tool.Label, StringComparison.Ordinal)); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs new file mode 100644 index 000000000000..fdda7217125b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace Microsoft.SemanticKernel; + +/// +/// Chat history extensions. +/// +public static class ChatHistoryExtensions +{ + /// + /// Add a message to the chat history at the end of the streamed message + /// + /// Target chat history + /// list of streaming message contents + /// Returns the original streaming results with some message processing + [Experimental("SKEXP0014")] + public static async IAsyncEnumerable AddStreamingMessageAsync(this ChatHistory chatHistory, IAsyncEnumerable streamingMessageContents) + { + List messageContents = new(); + + // Stream the response. + StringBuilder? contentBuilder = null; + Dictionary? toolCallIdsByIndex = null; + Dictionary? functionNamesByIndex = null; + Dictionary? functionArgumentBuildersByIndex = null; + Dictionary? metadata = null; + AuthorRole? streamedRole = default; + await foreach (var chatMessage in streamingMessageContents.ConfigureAwait(false)) + { + metadata ??= (Dictionary?)chatMessage.Metadata; + + if (chatMessage.Content is { Length: > 0 } contentUpdate) + { + (contentBuilder ??= new()).Append(contentUpdate); + } + + OpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatMessage.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + + // Is always expected to have at least one chunk with the role provided from a streaming message + streamedRole ??= chatMessage.Role; + + messageContents.Add(chatMessage); + yield return chatMessage; + } + + if (messageContents.Count != 0) + { + chatHistory.Add(new OpenAIChatMessageContent( + streamedRole ?? AuthorRole.Assistant, + contentBuilder?.ToString() ?? string.Empty, + messageContents[0].ModelId!, + OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex), + metadata)); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs new file mode 100644 index 000000000000..bb7278e11d60 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs @@ -0,0 +1,1024 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.AI.OpenAI; +using Azure.Core.Pipeline; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Http; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal abstract class ClientCore +{ + private const int MaxResultsPerPrompt = 128; + + /// + /// The maximum number of auto-invokes that can be in-flight at any given time as part of the current + /// asynchronous chain of execution. + /// + /// + /// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that + /// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself, + /// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close + /// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution. + /// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in + /// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that + /// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that, + /// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent + /// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made + /// configurable should need arise. + /// + private const int MaxInflightAutoInvokes = 5; + + /// Tracking for . + private static readonly AsyncLocal s_inflightAutoInvokes = new(); + + internal ClientCore(ILogger? logger = null) + { + this.Logger = logger ?? NullLogger.Instance; + } + + /// + /// Model Id or Deployment Name + /// + internal string DeploymentOrModelName { get; set; } = string.Empty; + + /// + /// OpenAI / Azure OpenAI Client + /// + internal abstract OpenAIClient Client { get; } + + /// + /// Logger instance + /// + internal ILogger Logger { get; set; } + + /// + /// Storage for AI service attributes. + /// + internal Dictionary Attributes { get; } = new(); + + /// + /// Instance of for metrics. + /// + private static readonly Meter s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI"); + + /// + /// Instance of to keep track of the number of prompt tokens used. + /// + private static readonly Counter s_promptTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.prompt", + unit: "{token}", + description: "Number of prompt tokens used"); + + /// + /// Instance of to keep track of the number of completion tokens used. + /// + private static readonly Counter s_completionTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.completion", + unit: "{token}", + description: "Number of completion tokens used"); + + /// + /// Instance of to keep track of the total number of tokens used. + /// + private static readonly Counter s_totalTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.total", + unit: "{token}", + description: "Number of tokens used"); + + /// + /// Creates completions for the prompt and settings. + /// + /// The prompt to complete. + /// Execution settings for the completion API. + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Completions generated by the remote model + internal async Task> GetTextResultsAsync( + string text, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings textExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); + + ValidateMaxTokens(textExecutionSettings.MaxTokens); + + var options = CreateCompletionsOptions(text, textExecutionSettings, this.DeploymentOrModelName); + + var responseData = (await RunRequestAsync(() => this.Client.GetCompletionsAsync(options, cancellationToken)).ConfigureAwait(false)).Value; + if (responseData.Choices.Count == 0) + { + throw new KernelException("Text completions not found"); + } + + this.CaptureUsageDetails(responseData.Usage); + IReadOnlyDictionary metadata = GetResponseMetadata(responseData); + return responseData.Choices.Select(choice => new TextContent(choice.Text, this.DeploymentOrModelName, choice, Encoding.UTF8, metadata)).ToList(); + } + + internal async IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings textExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); + + ValidateMaxTokens(textExecutionSettings.MaxTokens); + + var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName); + + StreamingResponse? response = await RunRequestAsync(() => this.Client.GetCompletionsStreamingAsync(options, cancellationToken)).ConfigureAwait(false); + + IReadOnlyDictionary? metadata = null; + await foreach (Completions completions in response) + { + metadata ??= GetResponseMetadata(completions); + foreach (Choice choice in completions.Choices) + { + yield return new OpenAIStreamingTextContent(choice.Text, choice.Index, this.DeploymentOrModelName, choice, metadata); + } + } + } + + private static Dictionary GetResponseMetadata(Completions completions) + { + return new Dictionary(4) + { + { nameof(completions.Id), completions.Id }, + { nameof(completions.Created), completions.Created }, + { nameof(completions.PromptFilterResults), completions.PromptFilterResults }, + { nameof(completions.Usage), completions.Usage }, + }; + } + + private static Dictionary GetResponseMetadata(ChatCompletions completions) + { + return new Dictionary(5) + { + { nameof(completions.Id), completions.Id }, + { nameof(completions.Created), completions.Created }, + { nameof(completions.PromptFilterResults), completions.PromptFilterResults }, + { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, + { nameof(completions.Usage), completions.Usage }, + }; + } + + private static Dictionary GetResponseMetadata(StreamingChatCompletionsUpdate completions) + { + return new Dictionary(3) + { + { nameof(completions.Id), completions.Id }, + { nameof(completions.Created), completions.Created }, + { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, + }; + } + + /// + /// Generates an embedding from the given . + /// + /// List of strings to generate embeddings for + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of embeddings + internal async Task>> GetEmbeddingsAsync( + IList data, + Kernel? kernel, + CancellationToken cancellationToken) + { + var result = new List>(data.Count); + + if (data.Count > 0) + { + var response = await RunRequestAsync(() => this.Client.GetEmbeddingsAsync(new(this.DeploymentOrModelName, data), cancellationToken)).ConfigureAwait(false); + var embeddings = response.Value.Data; + + if (embeddings.Count != data.Count) + { + throw new KernelException($"Expected {data.Count} text embedding(s), but received {embeddings.Count}"); + } + + for (var i = 0; i < embeddings.Count; i++) + { + result.Add(embeddings[i].Embedding); + } + } + + return result; + } + + /// + /// Generate a new chat message + /// + /// Chat history + /// Execution settings for the completion API. + /// The containing services, plugins, and other state for use throughout the operation. + /// Async cancellation token + /// Generated chat message in string format + internal async Task> GetChatMessageContentsAsync( + ChatHistory chat, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + Verify.NotNull(chat); + + // Convert the incoming execution settings to OpenAI settings. + OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes; + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt); + + // Create the Azure SDK ChatCompletionOptions instance from all available information. + var chatOptions = CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName); + + for (int iteration = 1; ; iteration++) + { + // Make the request. + var responseData = (await RunRequestAsync(() => this.Client.GetChatCompletionsAsync(chatOptions, cancellationToken)).ConfigureAwait(false)).Value; + this.CaptureUsageDetails(responseData.Usage); + if (responseData.Choices.Count == 0) + { + throw new KernelException("Chat completions not found"); + } + + IReadOnlyDictionary metadata = GetResponseMetadata(responseData); + + // If we don't want to attempt to invoke any functions, just return the result. + // Or if we are auto-invoking but we somehow end up with other than 1 choice even though only 1 was requested, similarly bail. + if (!autoInvoke || responseData.Choices.Count != 1) + { + return responseData.Choices.Select(chatChoice => new OpenAIChatMessageContent(chatChoice.Message, this.DeploymentOrModelName, metadata)).ToList(); + } + + Debug.Assert(kernel is not null); + + // Get our single result and extract the function call information. If this isn't a function call, or if it is + // but we're unable to find the function or extract the relevant information, just return the single result. + // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service + // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool + // is specified. + ChatChoice resultChoice = responseData.Choices[0]; + OpenAIChatMessageContent result = new(resultChoice.Message, this.DeploymentOrModelName, metadata); + if (result.ToolCalls.Count == 0) + { + return new[] { result }; + } + + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Tool requests: {Requests}", result.ToolCalls.Count); + } + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", result.ToolCalls.OfType().Select(ftc => $"{ftc.Name}({ftc.Arguments})"))); + } + + // Add the original assistant message to the chatOptions; this is required for the service + // to understand the tool call responses. Also add the result message to the caller's chat + // history: if they don't want it, they can remove it, but this makes the data available, + // including metadata like usage. + chatOptions.Messages.Add(GetRequestMessage(resultChoice.Message)); + chat.Add(result); + + // We must send back a response for every tool call, regardless of whether we successfully executed it or not. + // If we successfully execute it, we'll add the result. If we don't, we'll add an error. + for (int i = 0; i < result.ToolCalls.Count; i++) + { + ChatCompletionsToolCall toolCall = result.ToolCalls[i]; + + // We currently only know about function tool calls. If it's anything else, we'll respond with an error. + if (toolCall is not ChatCompletionsFunctionToolCall functionToolCall) + { + AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall.Id, this.Logger); + continue; + } + + // Parse the function call arguments. + OpenAIFunctionToolCall? openAIFunctionToolCall; + try + { + openAIFunctionToolCall = new(functionToolCall); + } + catch (JsonException) + { + AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall.Id, this.Logger); + continue; + } + + // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, + // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able + // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. + if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && + !IsRequestableTool(chatOptions, openAIFunctionToolCall)) + { + AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall.Id, this.Logger); + continue; + } + + // Find the function in the kernel and populate the arguments. + if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) + { + AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall.Id, this.Logger); + continue; + } + + // Now, invoke the function, and add the resulting tool call message to the chat options. + s_inflightAutoInvokes.Value++; + object? functionResult; + try + { + // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any + // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, + // as the called function could in turn telling the model about itself as a possible candidate for invocation. + functionResult = (await function.InvokeAsync(kernel, functionArgs, cancellationToken: cancellationToken).ConfigureAwait(false)).GetValue() ?? string.Empty; + } +#pragma warning disable CA1031 // Do not catch general exception types + catch (Exception e) +#pragma warning restore CA1031 + { + AddResponseMessage(chatOptions, chat, null, $"Error: Exception while invoking function. {e.Message}", toolCall.Id, this.Logger); + continue; + } + finally + { + s_inflightAutoInvokes.Value--; + } + AddResponseMessage(chatOptions, chat, functionResult as string ?? JsonSerializer.Serialize(functionResult), errorMessage: null, toolCall.Id, this.Logger); + + static void AddResponseMessage(ChatCompletionsOptions chatOptions, ChatHistory chat, string? result, string? errorMessage, string toolId, ILogger logger) + { + // Log any error + if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug)) + { + Debug.Assert(result is null); + logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", toolId, errorMessage); + } + + // Add the tool response message to both the chat options and to the chat history. + result ??= errorMessage ?? string.Empty; + chatOptions.Messages.Add(new ChatRequestToolMessage(result, toolId)); + chat.AddMessage(AuthorRole.Tool, result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, toolId } }); + } + } + + // Respect the tool's maximum use attempts and maximum auto-invoke attempts. + Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null); + + if (iteration >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts) + { + // Set the tool choice to none. We'd also like to clear the tools, but doing so can make the service unhappy ("[] is too short - 'tools'"). + chatOptions.ToolChoice = ChatCompletionsToolChoice.None; + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts); + } + } + + if (iteration >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts) + { + autoInvoke = false; + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts); + } + } + } + } + + internal async IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chat, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(chat); + + OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + + bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes; + ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt); + + var chatOptions = CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName); + + StringBuilder? contentBuilder = null; + Dictionary? toolCallIdsByIndex = null; + Dictionary? functionNamesByIndex = null; + Dictionary? functionArgumentBuildersByIndex = null; + for (int iteration = 1; ; iteration++) + { + // Make the request. + var response = await RunRequestAsync(() => this.Client.GetChatCompletionsStreamingAsync(chatOptions, cancellationToken)).ConfigureAwait(false); + + // Reset state + contentBuilder?.Clear(); + toolCallIdsByIndex?.Clear(); + functionNamesByIndex?.Clear(); + functionArgumentBuildersByIndex?.Clear(); + + // Stream the response. + IReadOnlyDictionary? metadata = null; + ChatRole? streamedRole = default; + CompletionsFinishReason finishReason = default; + await foreach (StreamingChatCompletionsUpdate update in response.ConfigureAwait(false)) + { + metadata ??= GetResponseMetadata(update); + streamedRole ??= update.Role; + finishReason = update.FinishReason ?? default; + + // If we're intending to invoke function calls, we need to consume that function call information. + if (autoInvoke) + { + if (update.ContentUpdate is { Length: > 0 } contentUpdate) + { + (contentBuilder ??= new()).Append(contentUpdate); + } + + OpenAIFunctionToolCall.TrackStreamingToolingUpdate(update.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + } + + yield return new OpenAIStreamingChatMessageContent(update, update.ChoiceIndex ?? 0, this.DeploymentOrModelName, metadata); + } + + // If we don't have a function to invoke, we're done. + // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service + // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool + // is specified. + if (!autoInvoke || + toolCallIdsByIndex is not { Count: > 0 }) + { + yield break; + } + + // Get any response content that was streamed. + string content = contentBuilder?.ToString() ?? string.Empty; + + // Translate all entries into ChatCompletionsFunctionToolCall instances. + ChatCompletionsFunctionToolCall[] toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + + // Log the requests + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", toolCalls.Select(fcr => $"{fcr.Name}({fcr.Arguments})"))); + } + else if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Function call requests: {Requests}", toolCalls.Length); + } + + // Add the original assistant message to the chatOptions; this is required for the service + // to understand the tool call responses. + chatOptions.Messages.Add(GetRequestMessage(streamedRole ?? default, content, toolCalls)); + chat.Add(new OpenAIChatMessageContent(streamedRole ?? default, content, this.DeploymentOrModelName, toolCalls, metadata)); + + // Respond to each tooling request. + foreach (ChatCompletionsFunctionToolCall toolCall in toolCalls) + { + // We currently only know about function tool calls. If it's anything else, we'll respond with an error. + if (string.IsNullOrEmpty(toolCall.Name)) + { + AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Tool call was not a function call.", this.Logger); + continue; + } + + // Parse the function call arguments. + OpenAIFunctionToolCall? openAIFunctionToolCall; + try + { + openAIFunctionToolCall = new(toolCall); + } + catch (JsonException) + { + AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Function call arguments were invalid JSON.", this.Logger); + continue; + } + + // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, + // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able + // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. + if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && + !IsRequestableTool(chatOptions, openAIFunctionToolCall)) + { + AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Function call request for a function that wasn't defined.", this.Logger); + continue; + } + + // Find the function in the kernel and populate the arguments. + if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) + { + AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Requested function could not be found.", this.Logger); + continue; + } + + // Now, invoke the function, and add the resulting tool call message to the chat options. + s_inflightAutoInvokes.Value++; + object? functionResult; + try + { + // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any + // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, + // as the called function could in turn telling the model about itself as a possible candidate for invocation. + functionResult = (await function.InvokeAsync(kernel, functionArgs, cancellationToken: cancellationToken).ConfigureAwait(false)).GetValue() ?? string.Empty; + } +#pragma warning disable CA1031 // Do not catch general exception types + catch (Exception e) +#pragma warning restore CA1031 + { + AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, $"Error: Exception while invoking function. {e.Message}", this.Logger); + continue; + } + finally + { + s_inflightAutoInvokes.Value--; + } + AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, functionResult as string ?? JsonSerializer.Serialize(functionResult), errorMessage: null, this.Logger); + + static void AddResponseMessage( + ChatCompletionsOptions chatOptions, ChatHistory chat, ChatRole? streamedRole, ChatCompletionsToolCall tool, IReadOnlyDictionary? metadata, + string? result, string? errorMessage, ILogger logger) + { + if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug)) + { + Debug.Assert(result is null); + logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", tool.Id, errorMessage); + } + + // Add the tool response message to both the chat options and to the chat history. + result ??= errorMessage ?? string.Empty; + chatOptions.Messages.Add(new ChatRequestToolMessage(result, tool.Id)); + chat.AddMessage(AuthorRole.Tool, result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, tool.Id } }); + } + } + + // Respect the tool's maximum use attempts and maximum auto-invoke attempts. + Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null); + + if (iteration >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts) + { + // Set the tool choice to none. We'd also like to clear the tools, but doing so can make the service unhappy ("[] is too short - 'tools'"). + chatOptions.ToolChoice = ChatCompletionsToolChoice.None; + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts); + } + } + + if (iteration >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts) + { + autoInvoke = false; + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts); + } + } + } + } + + /// Checks if a tool call is for a function that was defined. + private static bool IsRequestableTool(ChatCompletionsOptions options, OpenAIFunctionToolCall ftc) + { + IList tools = options.Tools; + for (int i = 0; i < tools.Count; i++) + { + if (tools[i] is ChatCompletionsFunctionToolDefinition def && + string.Equals(def.Name, ftc.FullyQualifiedName, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } + + internal async IAsyncEnumerable GetChatAsTextStreamingContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings chatSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + ChatHistory chat = CreateNewChat(prompt, chatSettings); + + await foreach (var chatUpdate in this.GetStreamingChatMessageContentsAsync(chat, executionSettings, kernel, cancellationToken)) + { + yield return new StreamingTextContent(chatUpdate.Content, chatUpdate.ChoiceIndex, chatUpdate.ModelId, chatUpdate, Encoding.UTF8, chatUpdate.Metadata); + } + } + + internal async Task> GetChatAsTextContentsAsync( + string text, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings chatSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + + ChatHistory chat = CreateNewChat(text, chatSettings); + return (await this.GetChatMessageContentsAsync(chat, chatSettings, kernel, cancellationToken).ConfigureAwait(false)) + .Select(chat => new TextContent(chat.Content, chat.ModelId, chat.Content, Encoding.UTF8, chat.Metadata)) + .ToList(); + } + + internal void AddAttribute(string key, string? value) + { + if (!string.IsNullOrEmpty(value)) + { + this.Attributes.Add(key, value); + } + } + + /// Gets options to use for an OpenAIClient + /// Custom for HTTP requests. + /// An instance of . + internal static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient) + { + OpenAIClientOptions options = new() + { + Diagnostics = { ApplicationId = HttpHeaderValues.UserAgent } + }; + + if (httpClient is not null) + { + options.Transport = new HttpClientTransport(httpClient); + options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. + } + + return options; + } + + /// + /// Create a new empty chat instance + /// + /// Optional chat instructions for the AI service + /// Execution settings + /// Chat object + internal static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null) + { + var chat = new ChatHistory(); + + // If settings is not provided, create a new chat with the text as the system prompt + AuthorRole textRole = AuthorRole.System; + + if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt)) + { + chat.AddSystemMessage(executionSettings!.ChatSystemPrompt); + textRole = AuthorRole.User; + } + + if (!string.IsNullOrWhiteSpace(text)) + { + chat.AddMessage(textRole, text!); + } + + return chat; + } + + private static CompletionsOptions CreateCompletionsOptions(string text, OpenAIPromptExecutionSettings executionSettings, string deploymentOrModelName) + { + if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) + { + throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); + } + + var options = new CompletionsOptions + { + Prompts = { text.Replace("\r\n", "\n") }, // normalize line endings + MaxTokens = executionSettings.MaxTokens, + Temperature = (float?)executionSettings.Temperature, + NucleusSamplingFactor = (float?)executionSettings.TopP, + FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, + PresencePenalty = (float?)executionSettings.PresencePenalty, + Echo = false, + ChoicesPerPrompt = executionSettings.ResultsPerPrompt, + GenerationSampleCount = executionSettings.ResultsPerPrompt, + LogProbabilityCount = null, + User = executionSettings.User, + DeploymentName = deploymentOrModelName + }; + + if (executionSettings.TokenSelectionBiases is not null) + { + foreach (var keyValue in executionSettings.TokenSelectionBiases) + { + options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); + } + } + + if (executionSettings.StopSequences is { Count: > 0 }) + { + foreach (var s in executionSettings.StopSequences) + { + options.StopSequences.Add(s); + } + } + + return options; + } + + private static ChatCompletionsOptions CreateChatCompletionsOptions( + OpenAIPromptExecutionSettings executionSettings, + ChatHistory chatHistory, + Kernel? kernel, + string deploymentOrModelName) + { + if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) + { + throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); + } + + var options = new ChatCompletionsOptions + { + MaxTokens = executionSettings.MaxTokens, + Temperature = (float?)executionSettings.Temperature, + NucleusSamplingFactor = (float?)executionSettings.TopP, + FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, + PresencePenalty = (float?)executionSettings.PresencePenalty, + ChoiceCount = executionSettings.ResultsPerPrompt, + DeploymentName = deploymentOrModelName, + Seed = executionSettings.Seed, + User = executionSettings.User + }; + + switch (executionSettings.ResponseFormat) + { + case ChatCompletionsResponseFormat formatObject: + // If the response format is an Azure SDK ChatCompletionsResponseFormat, just pass it along. + options.ResponseFormat = formatObject; + break; + + case string formatString: + // If the response format is a string, map the ones we know about, and ignore the rest. + switch (formatString) + { + case "json_object": + options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject; + break; + + case "text": + options.ResponseFormat = ChatCompletionsResponseFormat.Text; + break; + } + break; + + case JsonElement formatElement: + // This is a workaround for a type mismatch when deserializing a JSON into an object? type property. + // Handling only string formatElement. + if (formatElement.ValueKind == JsonValueKind.String) + { + string formatString = formatElement.GetString() ?? ""; + switch (formatString) + { + case "json_object": + options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject; + break; + + case "text": + options.ResponseFormat = ChatCompletionsResponseFormat.Text; + break; + } + } + break; + } + + executionSettings.ToolCallBehavior?.ConfigureOptions(kernel, options); + if (executionSettings.TokenSelectionBiases is not null) + { + foreach (var keyValue in executionSettings.TokenSelectionBiases) + { + options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); + } + } + + if (executionSettings.StopSequences is { Count: > 0 }) + { + foreach (var s in executionSettings.StopSequences) + { + options.StopSequences.Add(s); + } + } + + if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) + { + options.Messages.Add(GetRequestMessage(new ChatMessageContent(AuthorRole.System, executionSettings!.ChatSystemPrompt))); + } + + foreach (var message in chatHistory) + { + options.Messages.Add(GetRequestMessage(message)); + } + + return options; + } + + private static ChatRequestMessage GetRequestMessage(ChatRole chatRole, string contents, ChatCompletionsFunctionToolCall[]? tools) + { + if (chatRole == ChatRole.User) + { + return new ChatRequestUserMessage(contents); + } + + if (chatRole == ChatRole.System) + { + return new ChatRequestSystemMessage(contents); + } + + if (chatRole == ChatRole.Assistant) + { + var msg = new ChatRequestAssistantMessage(contents); + if (tools is not null) + { + foreach (ChatCompletionsFunctionToolCall tool in tools) + { + msg.ToolCalls.Add(tool); + } + } + return msg; + } + + throw new NotImplementedException($"Role {chatRole} is not implemented"); + } + + private static ChatRequestMessage GetRequestMessage(ChatMessageContent message) + { + if (message.Role == AuthorRole.System) + { + return new ChatRequestSystemMessage(message.Content); + } + + if (message.Role == AuthorRole.User || message.Role == AuthorRole.Tool) + { + if (message.Metadata?.TryGetValue(OpenAIChatMessageContent.ToolIdProperty, out object? toolId) is true && + toolId?.ToString() is string toolIdString) + { + return new ChatRequestToolMessage(message.Content, toolIdString); + } + + if (message.Items is { Count: > 0 }) + { + return new ChatRequestUserMessage(message.Items.Select(static (KernelContent item) => (ChatMessageContentItem)(item switch + { + TextContent textContent => new ChatMessageTextContentItem(textContent.Text), + ImageContent imageContent => new ChatMessageImageContentItem(imageContent.Uri), + _ => throw new NotSupportedException($"Unsupported chat message content type '{item.GetType()}'.") + }))); + } + + return new ChatRequestUserMessage(message.Content); + } + + if (message.Role == AuthorRole.Assistant) + { + var asstMessage = new ChatRequestAssistantMessage(message.Content); + + IEnumerable? tools = (message as OpenAIChatMessageContent)?.ToolCalls; + if (tools is null && message.Metadata?.TryGetValue(OpenAIChatMessageContent.FunctionToolCallsProperty, out object? toolCallsObject) is true) + { + tools = toolCallsObject as IEnumerable; + if (tools is null && toolCallsObject is JsonElement { ValueKind: JsonValueKind.Array } array) + { + int length = array.GetArrayLength(); + var ftcs = new List(length); + for (int i = 0; i < length; i++) + { + JsonElement e = array[i]; + if (e.TryGetProperty("Id", out JsonElement id) && + e.TryGetProperty("Name", out JsonElement name) && + e.TryGetProperty("Arguments", out JsonElement arguments) && + id.ValueKind == JsonValueKind.String && + name.ValueKind == JsonValueKind.String && + arguments.ValueKind == JsonValueKind.String) + { + ftcs.Add(new ChatCompletionsFunctionToolCall(id.GetString()!, name.GetString()!, arguments.GetString()!)); + } + } + tools = ftcs; + } + } + + if (tools is not null) + { + foreach (ChatCompletionsToolCall tool in tools) + { + asstMessage.ToolCalls.Add(tool); + } + } + + return asstMessage; + } + + throw new NotSupportedException($"Role {message.Role} is not supported."); + } + + private static ChatRequestMessage GetRequestMessage(ChatResponseMessage message) + { + if (message.Role == ChatRole.System) + { + return new ChatRequestSystemMessage(message.Content); + } + + if (message.Role == ChatRole.Assistant) + { + var msg = new ChatRequestAssistantMessage(message.Content); + if (message.ToolCalls is { Count: > 0 } tools) + { + foreach (ChatCompletionsToolCall tool in tools) + { + msg.ToolCalls.Add(tool); + } + } + + return msg; + } + + if (message.Role == ChatRole.User) + { + return new ChatRequestUserMessage(message.Content); + } + + throw new NotSupportedException($"Role {message.Role} is not supported."); + } + + private static void ValidateMaxTokens(int? maxTokens) + { + if (maxTokens.HasValue && maxTokens < 1) + { + throw new ArgumentException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); + } + } + + private static void ValidateAutoInvoke(bool autoInvoke, int resultsPerPrompt) + { + if (autoInvoke && resultsPerPrompt != 1) + { + // We can remove this restriction in the future if valuable. However, multiple results per prompt is rare, + // and limiting this significantly curtails the complexity of the implementation. + throw new ArgumentException($"Auto-invocation of tool calls may only be used with a {nameof(OpenAIPromptExecutionSettings.ResultsPerPrompt)} of 1."); + } + } + + private static async Task RunRequestAsync(Func> request) + { + try + { + return await request.Invoke().ConfigureAwait(false); + } + catch (RequestFailedException e) + { + throw e.ToHttpOperationException(); + } + } + + /// + /// Captures usage details, including token information. + /// + /// Instance of with usage details. + private void CaptureUsageDetails(CompletionsUsage usage) + { + if (usage is null) + { + if (this.Logger.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Usage information is not available."); + } + + return; + } + + if (this.Logger.IsEnabled(LogLevel.Information)) + { + this.Logger.LogInformation( + "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}.", + usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens); + } + + s_promptTokensCounter.Add(usage.PromptTokens); + s_completionTokensCounter.Add(usage.CompletionTokens); + s_totalTokensCounter.Add(usage.TotalTokens); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs new file mode 100644 index 000000000000..2edb2c9baae4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs @@ -0,0 +1,121 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI specialized chat message content +/// +public sealed class OpenAIChatMessageContent : ChatMessageContent +{ + /// + /// Gets the metadata key for the name property. + /// + public static string ToolIdProperty => $"{nameof(ChatCompletionsToolCall)}.{nameof(ChatCompletionsToolCall.Id)}"; + + /// + /// Gets the metadata key for the list of . + /// + internal static string FunctionToolCallsProperty => $"{nameof(ChatResponseMessage)}.FunctionToolCalls"; + + /// + /// Initializes a new instance of the class. + /// + /// Azure SDK chat message + /// The model ID used to generate the content + /// Additional metadata + internal OpenAIChatMessageContent(ChatResponseMessage chatMessage, string modelId, IReadOnlyDictionary? metadata = null) + : base(new AuthorRole(chatMessage.Role.ToString()), chatMessage.Content, modelId, chatMessage, System.Text.Encoding.UTF8, CreateMetadataDictionary(chatMessage.ToolCalls, metadata)) + { + this.ToolCalls = chatMessage.ToolCalls; + } + + /// + /// Initializes a new instance of the class. + /// + internal OpenAIChatMessageContent(ChatRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) + : base(new AuthorRole(role.ToString()), content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata)) + { + this.ToolCalls = toolCalls; + } + + /// + /// Initializes a new instance of the class. + /// + internal OpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) + : base(role, content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata)) + { + this.ToolCalls = toolCalls; + } + + /// + /// A list of the tools called by the model. + /// + public IReadOnlyList ToolCalls { get; } + + /// + /// Retrieve the resulting function from the chat result. + /// + /// The , or null if no function was returned by the model. + public IReadOnlyList GetOpenAIFunctionToolCalls() + { + List? functionToolCallList = null; + + foreach (var toolCall in this.ToolCalls) + { + if (toolCall is ChatCompletionsFunctionToolCall functionToolCall) + { + (functionToolCallList ??= new List()).Add(new OpenAIFunctionToolCall(functionToolCall)); + } + } + + if (functionToolCallList is not null) + { + return functionToolCallList; + } + + return Array.Empty(); + } + + private static IReadOnlyDictionary? CreateMetadataDictionary( + IReadOnlyList toolCalls, + IReadOnlyDictionary? original) + { + // We only need to augment the metadata if there are any tool calls. + if (toolCalls.Count > 0) + { + Dictionary newDictionary; + if (original is null) + { + // There's no existing metadata to clone; just allocate a new dictionary. + newDictionary = new Dictionary(1); + } + else if (original is IDictionary origIDictionary) + { + // Efficiently clone the old dictionary to a new one. + newDictionary = new Dictionary(origIDictionary); + } + else + { + // There's metadata to clone but we have to do so one item at a time. + newDictionary = new Dictionary(original.Count + 1); + foreach (var kvp in original) + { + newDictionary[kvp.Key] = kvp.Value; + } + } + + // Add the additional entry. + newDictionary.Add(FunctionToolCallsProperty, toolCalls.OfType().ToList()); + + return newDictionary; + } + + return original; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs new file mode 100644 index 000000000000..78a58337fc62 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using System.Runtime.CompilerServices; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Core implementation for OpenAI clients, providing common functionality and properties. +/// +internal sealed class OpenAIClientCore : ClientCore +{ + /// + /// Gets the attribute name used to store the organization in the dictionary. + /// + public static string OrganizationKey => "Organization"; + + /// + /// OpenAI / Azure OpenAI Client + /// + internal override OpenAIClient Client { get; } + + /// + /// Initializes a new instance of the class. + /// + /// Model name. + /// OpenAI API Key. + /// OpenAI Organization Id (usually optional). + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal OpenAIClientCore( + string modelId, + string apiKey, + string? organization = null, + HttpClient? httpClient = null, + ILogger? logger = null) : base(logger) + { + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + this.DeploymentOrModelName = modelId; + + var options = GetOpenAIClientOptions(httpClient); + + if (!string.IsNullOrWhiteSpace(organization)) + { + options.AddPolicy(new AddHeaderRequestPolicy("OpenAI-Organization", organization!), HttpPipelinePosition.PerCall); + } + + this.Client = new OpenAIClient(apiKey, options); + } + + /// + /// Initializes a new instance of the class using the specified OpenAIClient. + /// Note: instances created this way might not have the default diagnostics settings, + /// it's up to the caller to configure the client. + /// + /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// The to use for logging. If null, no logging will be performed. + internal OpenAIClientCore( + string modelId, + OpenAIClient openAIClient, + ILogger? logger = null) : base(logger) + { + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNull(openAIClient); + + this.DeploymentOrModelName = modelId; + this.Client = openAIClient; + } + + /// + /// Logs OpenAI action details. + /// + /// Caller member name. Populated automatically by runtime. + internal void LogActionDetails([CallerMemberName] string? callerMemberName = default) + { + if (this.Logger.IsEnabled(LogLevel.Information)) + { + this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.DeploymentOrModelName); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs new file mode 100644 index 000000000000..a17abb4abbb9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.AI.OpenAI; +using Json.Schema; +using Json.Schema.Generation; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +// NOTE: Since this space is evolving rapidly, in order to reduce the risk of needing to take breaking +// changes as OpenAI's APIs evolve, these types are not externally constructible. In the future, once +// things stabilize, and if need demonstrates, we could choose to expose those constructors. + +/// +/// Represents a function parameter that can be passed to an OpenAI function tool call. +/// +public sealed class OpenAIFunctionParameter +{ + internal OpenAIFunctionParameter(string? name, string? description, bool isRequired, Type? parameterType, KernelJsonSchema? schema) + { + this.Name = name ?? string.Empty; + this.Description = description ?? string.Empty; + this.IsRequired = isRequired; + this.ParameterType = parameterType; + this.Schema = schema; + } + + /// Gets the name of the parameter. + public string Name { get; } + + /// Gets a description of the parameter. + public string Description { get; } + + /// Gets whether the parameter is required vs optional. + public bool IsRequired { get; } + + /// Gets the of the parameter, if known. + public Type? ParameterType { get; } + + /// Gets a JSON schema for the parameter, if known. + public KernelJsonSchema? Schema { get; } +} + +/// +/// Represents a function return parameter that can be returned by a tool call to OpenAI. +/// +public sealed class OpenAIFunctionReturnParameter +{ + internal OpenAIFunctionReturnParameter(string? description, Type? parameterType, KernelJsonSchema? schema) + { + this.Description = description ?? string.Empty; + this.Schema = schema; + this.ParameterType = parameterType; + } + + /// Gets a description of the return parameter. + public string Description { get; } + + /// Gets the of the return parameter, if known. + public Type? ParameterType { get; } + + /// Gets a JSON schema for the return parameter, if known. + public KernelJsonSchema? Schema { get; } +} + +/// +/// Represents a function that can be passed to the OpenAI API +/// +public sealed class OpenAIFunction +{ + /// + /// Cached storing the JSON for a function with no parameters. + /// + /// + /// This is an optimization to avoid serializing the same JSON Schema over and over again + /// for this relatively common case. + /// + private static readonly BinaryData s_zeroFunctionParametersSchema = new("{\"type\":\"object\",\"required\":[],\"properties\":{}}"); + /// + /// Cached schema for a descriptionless string. + /// + private static readonly KernelJsonSchema s_stringNoDescriptionSchema = KernelJsonSchema.Parse("{\"type\":\"string\"}"); + + /// Initializes the OpenAIFunction. + internal OpenAIFunction( + string? pluginName, + string functionName, + string? description, + IReadOnlyList? parameters, + OpenAIFunctionReturnParameter? returnParameter) + { + Verify.NotNullOrWhiteSpace(functionName); + + this.PluginName = pluginName; + this.FunctionName = functionName; + this.Description = description; + this.Parameters = parameters; + this.ReturnParameter = returnParameter; + } + + /// Gets the separator used between the plugin name and the function name, if a plugin name is present. + /// This separator was previously _, but has been changed to - to better align to the behavior elsewhere in SK and in response + /// to developers who want to use underscores in their function or plugin names. We plan to make this setting configurable in the future. + public static string NameSeparator { get; set; } = "-"; + + /// Gets the name of the plugin with which the function is associated, if any. + public string? PluginName { get; } + + /// Gets the name of the function. + public string FunctionName { get; } + + /// Gets the fully-qualified name of the function. + /// + /// This is the concatenation of the and the , + /// separated by . If there is no , this is + /// the same as . + /// + public string FullyQualifiedName => + string.IsNullOrEmpty(this.PluginName) ? this.FunctionName : $"{this.PluginName}{NameSeparator}{this.FunctionName}"; + + /// Gets a description of the function. + public string? Description { get; } + + /// Gets a list of parameters to the function, if any. + public IReadOnlyList? Parameters { get; } + + /// Gets the return parameter of the function, if any. + public OpenAIFunctionReturnParameter? ReturnParameter { get; } + + /// + /// Converts the representation to the Azure SDK's + /// representation. + /// + /// A containing all the function information. + public FunctionDefinition ToFunctionDefinition() + { + BinaryData resultParameters = s_zeroFunctionParametersSchema; + + IReadOnlyList? parameters = this.Parameters; + if (parameters is { Count: > 0 }) + { + var properties = new Dictionary(); + var required = new List(); + + for (int i = 0; i < parameters.Count; i++) + { + var parameter = parameters[i]; + properties.Add(parameter.Name, parameter.Schema ?? GetDefaultSchemaForTypelessParameter(parameter.Description)); + if (parameter.IsRequired) + { + required.Add(parameter.Name); + } + } + + resultParameters = BinaryData.FromObjectAsJson(new + { + type = "object", + required, + properties, + }); + } + + return new FunctionDefinition + { + Name = this.FullyQualifiedName, + Description = this.Description, + Parameters = resultParameters, + }; + } + + /// Gets a for a typeless parameter with the specified description, defaulting to typeof(string) + private static KernelJsonSchema GetDefaultSchemaForTypelessParameter(string? description) + { + // If there's a description, incorporate it. + if (!string.IsNullOrWhiteSpace(description)) + { + return KernelJsonSchema.Parse(JsonSerializer.Serialize( + new JsonSchemaBuilder() + .FromType(typeof(string)) + .Description(description!) + .Build())); + } + + // Otherwise, we can use a cached schema for a string with no description. + return s_stringNoDescriptionSchema; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs new file mode 100644 index 000000000000..f6ef3b489dfc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; +using System.Text.Json; +using Azure.AI.OpenAI; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Represents an OpenAI function tool call with deserialized function name and arguments. +/// +public sealed class OpenAIFunctionToolCall +{ + private string? _fullyQualifiedFunctionName; + + /// Initialize the from a . + internal OpenAIFunctionToolCall(ChatCompletionsFunctionToolCall functionToolCall) + { + Verify.NotNull(functionToolCall); + Verify.NotNull(functionToolCall.Name); + + string fullyQualifiedFunctionName = functionToolCall.Name; + string functionName = fullyQualifiedFunctionName; + string? arguments = functionToolCall.Arguments; + string? pluginName = null; + + int separatorPos = fullyQualifiedFunctionName.IndexOf(OpenAIFunction.NameSeparator, StringComparison.Ordinal); + if (separatorPos >= 0) + { + pluginName = fullyQualifiedFunctionName.AsSpan(0, separatorPos).Trim().ToString(); + functionName = fullyQualifiedFunctionName.AsSpan(separatorPos + OpenAIFunction.NameSeparator.Length).Trim().ToString(); + } + + this.Id = functionToolCall.Id; + this._fullyQualifiedFunctionName = fullyQualifiedFunctionName; + this.PluginName = pluginName; + this.FunctionName = functionName; + if (!string.IsNullOrWhiteSpace(arguments)) + { + this.Arguments = JsonSerializer.Deserialize>(arguments!); + } + } + + /// Gets the ID of the tool call. + public string? Id { get; } + + /// Gets the name of the plugin with which this function is associated, if any. + public string? PluginName { get; } + + /// Gets the name of the function. + public string FunctionName { get; } + + /// Gets a name/value collection of the arguments to the function, if any. + public Dictionary? Arguments { get; } + + /// Gets the fully-qualified name of the function. + /// + /// This is the concatenation of the and the , + /// separated by . If there is no , + /// this is the same as . + /// + public string FullyQualifiedName => + this._fullyQualifiedFunctionName ??= + string.IsNullOrEmpty(this.PluginName) ? this.FunctionName : $"{this.PluginName}{OpenAIFunction.NameSeparator}{this.FunctionName}"; + + /// + public override string ToString() + { + var sb = new StringBuilder(this.FullyQualifiedName); + + sb.Append('('); + if (this.Arguments is not null) + { + string separator = ""; + foreach (var arg in this.Arguments) + { + sb.Append(separator).Append(arg.Key).Append(':').Append(arg.Value); + separator = ", "; + } + } + sb.Append(')'); + + return sb.ToString(); + } + + /// + /// Tracks tooling updates from streaming responses. + /// + /// The tool call update to incorporate. + /// Lazily-initialized dictionary mapping indices to IDs. + /// Lazily-initialized dictionary mapping indices to names. + /// Lazily-initialized dictionary mapping indices to arguments. + internal static void TrackStreamingToolingUpdate( + StreamingToolCallUpdate? update, + ref Dictionary? toolCallIdsByIndex, + ref Dictionary? functionNamesByIndex, + ref Dictionary? functionArgumentBuildersByIndex) + { + if (update is null) + { + // Nothing to track. + return; + } + + // If we have an ID, ensure the index is being tracked. Even if it's not a function update, + // we want to keep track of it so we can send back an error. + if (update.Id is string id) + { + (toolCallIdsByIndex ??= new())[update.ToolCallIndex] = id; + } + + if (update is StreamingFunctionToolCallUpdate ftc) + { + // Ensure we're tracking the function's name. + if (ftc.Name is string name) + { + (functionNamesByIndex ??= new())[ftc.ToolCallIndex] = name; + } + + // Ensure we're tracking the function's arguments. + if (ftc.ArgumentsUpdate is string argumentsUpdate) + { + if (!(functionArgumentBuildersByIndex ??= new()).TryGetValue(ftc.ToolCallIndex, out StringBuilder? arguments)) + { + functionArgumentBuildersByIndex[ftc.ToolCallIndex] = arguments = new(); + } + + arguments.Append(argumentsUpdate); + } + } + } + + /// + /// Converts the data built up by into an array of s. + /// + /// Dictionary mapping indices to IDs. + /// Dictionary mapping indices to names. + /// Dictionary mapping indices to arguments. + internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + ref Dictionary? toolCallIdsByIndex, + ref Dictionary? functionNamesByIndex, + ref Dictionary? functionArgumentBuildersByIndex) + { + ChatCompletionsFunctionToolCall[] toolCalls = Array.Empty(); + if (toolCallIdsByIndex is { Count: > 0 }) + { + toolCalls = new ChatCompletionsFunctionToolCall[toolCallIdsByIndex.Count]; + + int i = 0; + foreach (KeyValuePair toolCallIndexAndId in toolCallIdsByIndex) + { + string? functionName = null; + StringBuilder? functionArguments = null; + + functionNamesByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionName); + functionArgumentBuildersByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionArguments); + + toolCalls[i] = new ChatCompletionsFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, functionArguments?.ToString() ?? string.Empty); + i++; + } + + Debug.Assert(i == toolCalls.Length); + } + + return toolCalls; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs new file mode 100644 index 000000000000..6859e1225dd6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Extensions for specific to the OpenAI connector. +/// +public static class OpenAIKernelFunctionMetadataExtensions +{ + /// + /// Convert a to an . + /// + /// The object to convert. + /// An object. + public static OpenAIFunction ToOpenAIFunction(this KernelFunctionMetadata metadata) + { + IReadOnlyList metadataParams = metadata.Parameters; + + var openAIParams = new OpenAIFunctionParameter[metadataParams.Count]; + for (int i = 0; i < openAIParams.Length; i++) + { + var param = metadataParams[i]; + + openAIParams[i] = new OpenAIFunctionParameter( + param.Name, + GetDescription(param), + param.IsRequired, + param.ParameterType, + param.Schema); + } + + return new OpenAIFunction( + metadata.PluginName, + metadata.Name, + metadata.Description, + openAIParams, + new OpenAIFunctionReturnParameter( + metadata.ReturnParameter.Description, + metadata.ReturnParameter.ParameterType, + metadata.ReturnParameter.Schema)); + + static string GetDescription(KernelParameterMetadata param) + { + if (InternalTypeConverter.ConvertToString(param.DefaultValue) is string stringValue && !string.IsNullOrEmpty(stringValue)) + { + return $"{param.Description} (default value: {stringValue})"; + } + + return param.Description; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs new file mode 100644 index 000000000000..dbb53c10fecf --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs @@ -0,0 +1,62 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Azure.AI.OpenAI; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Extension methods for . +/// +public static class OpenAIPluginCollectionExtensions +{ + /// + /// Given an object, tries to retrieve the corresponding and populate with its parameters. + /// + /// The plugins. + /// The object. + /// When this method returns, the function that was retrieved if one with the specified name was found; otherwise, + /// When this method returns, the arguments for the function; otherwise, + /// if the function was found; otherwise, . + public static bool TryGetFunctionAndArguments( + this IReadOnlyKernelPluginCollection plugins, + ChatCompletionsFunctionToolCall functionToolCall, + [NotNullWhen(true)] out KernelFunction? function, + out KernelArguments? arguments) => + plugins.TryGetFunctionAndArguments(new OpenAIFunctionToolCall(functionToolCall), out function, out arguments); + + /// + /// Given an object, tries to retrieve the corresponding and populate with its parameters. + /// + /// The plugins. + /// The object. + /// When this method returns, the function that was retrieved if one with the specified name was found; otherwise, + /// When this method returns, the arguments for the function; otherwise, + /// if the function was found; otherwise, . + public static bool TryGetFunctionAndArguments( + this IReadOnlyKernelPluginCollection plugins, + OpenAIFunctionToolCall functionToolCall, + [NotNullWhen(true)] out KernelFunction? function, + out KernelArguments? arguments) + { + if (plugins.TryGetFunction(functionToolCall.PluginName, functionToolCall.FunctionName, out function)) + { + // Add parameters to arguments + arguments = null; + if (functionToolCall.Arguments is not null) + { + arguments = new KernelArguments(); + foreach (var parameter in functionToolCall.Arguments) + { + arguments[parameter.Key] = parameter.Value?.ToString(); + } + } + + return true; + } + + // Function not found in collection + arguments = null; + return false; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs new file mode 100644 index 000000000000..fa3845782d0a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure OpenAI and OpenAI Specialized streaming chat message content. +/// +/// +/// Represents a chat message content chunk that was streamed from the remote model. +/// +public sealed class OpenAIStreamingChatMessageContent : StreamingChatMessageContent +{ + /// + /// The reason why the completion finished. + /// + public CompletionsFinishReason? FinishReason { get; set; } + + /// + /// Create a new instance of the class. + /// + /// Internal Azure SDK Message update representation + /// Index of the choice + /// The model ID used to generate the content + /// Additional metadata + internal OpenAIStreamingChatMessageContent( + StreamingChatCompletionsUpdate chatUpdate, + int choiceIndex, + string modelId, + IReadOnlyDictionary? metadata = null) + : base( + chatUpdate.Role.HasValue ? new AuthorRole(chatUpdate.Role.Value.ToString()) : null, + chatUpdate.ContentUpdate, + chatUpdate, + choiceIndex, + modelId, + Encoding.UTF8, + metadata) + { + this.ToolCallUpdate = chatUpdate.ToolCallUpdate; + this.FinishReason = chatUpdate?.FinishReason; + } + + /// + /// Create a new instance of the class. + /// + /// Author role of the message + /// Content of the message + /// Tool call update + /// Completion finish reason + /// Index of the choice + /// The model ID used to generate the content + /// Additional metadata + internal OpenAIStreamingChatMessageContent( + AuthorRole? authorRole, + string? content, + StreamingToolCallUpdate? tootToolCallUpdate = null, + CompletionsFinishReason? completionsFinishReason = null, + int choiceIndex = 0, + string? modelId = null, + IReadOnlyDictionary? metadata = null) + : base( + authorRole, + content, + null, + choiceIndex, + modelId, + Encoding.UTF8, + metadata) + { + this.ToolCallUpdate = tootToolCallUpdate; + this.FinishReason = completionsFinishReason; + } + + /// Gets any update information in the message about a tool call. + public StreamingToolCallUpdate? ToolCallUpdate { get; } + + /// + public override byte[] ToByteArray() => this.Encoding.GetBytes(this.ToString()); + + /// + public override string ToString() => this.Content ?? string.Empty; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs new file mode 100644 index 000000000000..126e1615a747 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure OpenAI and OpenAI Specialized streaming text content. +/// +/// +/// Represents a text content chunk that was streamed from the remote model. +/// +public sealed class OpenAIStreamingTextContent : StreamingTextContent +{ + /// + /// Create a new instance of the class. + /// + /// Text update + /// Index of the choice + /// The model ID used to generate the content + /// Inner chunk object + /// Metadata information + internal OpenAIStreamingTextContent( + string text, + int choiceIndex, + string modelId, + object? innerContentObject = null, + IReadOnlyDictionary? metadata = null) + : base( + text, + choiceIndex, + modelId, + innerContentObject, + Encoding.UTF8, + metadata) + { + } + + /// + public override byte[] ToByteArray() + { + return this.Encoding.GetBytes(this.ToString()); + } + + /// + public override string ToString() + { + return this.Text ?? string.Empty; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs new file mode 100644 index 000000000000..51f99aa1c0cb --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using Azure; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Provides extension methods for the class. +/// +internal static class RequestFailedExceptionExtensions +{ + /// + /// Converts a to an . + /// + /// The original . + /// An instance. + public static HttpOperationException ToHttpOperationException(this RequestFailedException exception) + { + const int NoResponseReceived = 0; + + string? responseContent = null; + + try + { + responseContent = exception.GetRawResponse()?.Content?.ToString(); + } +#pragma warning disable CA1031 // Do not catch general exception types + catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead. +#pragma warning restore CA1031 + + return new HttpOperationException( + exception.Status == NoResponseReceived ? null : (HttpStatusCode?)exception.Status, + responseContent, + exception.Message, + exception); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs new file mode 100644 index 000000000000..04da5d2dc1e3 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure OpenAI chat completion service. +/// +public sealed class AzureOpenAIChatCompletionService : IChatCompletionService, ITextGenerationService +{ + /// Core implementation shared by Azure OpenAI clients. + private readonly AzureOpenAIClientCore _core; + + /// + /// Create an instance of the connector with API key auth. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAIChatCompletionService( + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Create an instance of the connector with AAD auth. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAIChatCompletionService( + string deploymentName, + string endpoint, + TokenCredential credentials, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Creates a new client instance using the specified . + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAIChatCompletionService( + string deploymentName, + OpenAIClient openAIClient, + string? modelId = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + + /// + public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + + /// + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetChatAsTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + + /// + public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetChatAsTextStreamingContentsAsync(prompt, executionSettings, kernel, cancellationToken); +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs new file mode 100644 index 000000000000..91ec14fd3d78 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI chat completion service. +/// +public sealed class OpenAIChatCompletionService : IChatCompletionService, ITextGenerationService +{ + private readonly OpenAIClientCore _core; + + /// + /// Create an instance of the OpenAI chat completion connector + /// + /// Model name + /// OpenAI API Key + /// OpenAI Organization Id (usually optional) + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAIChatCompletionService( + string modelId, + string apiKey, + string? organization = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(modelId, apiKey, organization, httpClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); + } + + /// + /// Create an instance of the OpenAI chat completion connector + /// + /// Model name + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAIChatCompletionService( + string modelId, + OpenAIClient openAIClient, + ILoggerFactory? loggerFactory = null) + { + this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + + /// + public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + + /// + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetChatAsTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + + /// + public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetChatAsTextStreamingContentsAsync(prompt, executionSettings, kernel, cancellationToken); +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs new file mode 100644 index 000000000000..bae02aae3627 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Required configuration for Azure OpenAI chat completion with data. +/// More information: +/// +[Experimental("SKEXP0010")] +public class AzureOpenAIChatCompletionWithDataConfig +{ + /// + /// Azure OpenAI model ID or deployment name, see + /// + public string CompletionModelId { get; set; } = string.Empty; + + /// + /// Azure OpenAI deployment URL, see + /// + public string CompletionEndpoint { get; set; } = string.Empty; + + /// + /// Azure OpenAI API key, see + /// + public string CompletionApiKey { get; set; } = string.Empty; + + /// + /// Azure OpenAI Completion API version (e.g. 2023-06-01-preview) + /// + public string CompletionApiVersion { get; set; } = string.Empty; + + /// + /// Data source endpoint URL. + /// For Azure AI Search, see + /// + public string DataSourceEndpoint { get; set; } = string.Empty; + + /// + /// Data source API key. + /// For Azure AI Search keys, see + /// + public string DataSourceApiKey { get; set; } = string.Empty; + + /// + /// Data source index name. + /// For Azure AI Search indexes, see + /// + public string DataSourceIndex { get; set; } = string.Empty; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs new file mode 100644 index 000000000000..8cf1631fb6e4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs @@ -0,0 +1,298 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.Text; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure OpenAI Chat Completion with data service. +/// More information: +/// +[Experimental("SKEXP0010")] +public sealed class AzureOpenAIChatCompletionWithDataService : IChatCompletionService, ITextGenerationService +{ + /// + /// Initializes a new instance of the class. + /// + /// Instance of class with completion configuration. + /// Custom for HTTP requests. + /// Instance of to use for logging. + public AzureOpenAIChatCompletionWithDataService( + AzureOpenAIChatCompletionWithDataConfig config, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this.ValidateConfig(config); + + this._config = config; + + this._httpClient = HttpClientProvider.GetHttpClient(httpClient); + this._logger = loggerFactory?.CreateLogger(this.GetType()) ?? NullLogger.Instance; + this._attributes.Add(AIServiceExtensions.ModelIdKey, config.CompletionModelId); + } + + /// + public IReadOnlyDictionary Attributes => this._attributes; + + /// + public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this.InternalGetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + + /// + public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this.InternalGetChatStreamingContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + + /// + public async Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + return (await this.GetChatMessageContentsAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) + .Select(chat => new TextContent(chat.Content, chat.ModelId, chat, Encoding.UTF8, chat.Metadata)) + .ToList(); + } + + /// + public async IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await foreach (var streamingChatContent in this.InternalGetChatStreamingContentsAsync(new ChatHistory(prompt), executionSettings, kernel, cancellationToken).ConfigureAwait(false)) + { + yield return new StreamingTextContent(streamingChatContent.Content, streamingChatContent.ChoiceIndex, streamingChatContent.ModelId, streamingChatContent, Encoding.UTF8, streamingChatContent.Metadata); + } + } + + #region private ================================================================================ + + private const string DefaultApiVersion = "2023-06-01-preview"; + + private readonly AzureOpenAIChatCompletionWithDataConfig _config; + + private readonly HttpClient _httpClient; + private readonly ILogger _logger; + private readonly Dictionary _attributes = new(); + private void ValidateConfig(AzureOpenAIChatCompletionWithDataConfig config) + { + Verify.NotNull(config); + + Verify.NotNullOrWhiteSpace(config.CompletionModelId); + Verify.NotNullOrWhiteSpace(config.CompletionEndpoint); + Verify.NotNullOrWhiteSpace(config.CompletionApiKey); + Verify.NotNullOrWhiteSpace(config.DataSourceEndpoint); + Verify.NotNullOrWhiteSpace(config.DataSourceApiKey); + Verify.NotNullOrWhiteSpace(config.DataSourceIndex); + } + + private async Task> InternalGetChatMessageContentsAsync( + ChatHistory chat, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + var openAIExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); + + using var request = this.GetRequest(chat, openAIExecutionSettings, isStreamEnabled: false); + using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); + + var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + + var chatWithDataResponse = this.DeserializeResponse(body); + IReadOnlyDictionary metadata = GetResponseMetadata(chatWithDataResponse); + + return chatWithDataResponse.Choices.Select(choice => new AzureOpenAIWithDataChatMessageContent(choice, this.GetModelId(), metadata)).ToList(); + } + + private static Dictionary GetResponseMetadata(ChatWithDataResponse chatResponse) + { + return new Dictionary(5) + { + { nameof(chatResponse.Id), chatResponse.Id }, + { nameof(chatResponse.Model), chatResponse.Model }, + { nameof(chatResponse.Created), chatResponse.Created }, + { nameof(chatResponse.Object), chatResponse.Object }, + { nameof(chatResponse.Usage), chatResponse.Usage }, + }; + } + + private static Dictionary GetResponseMetadata(ChatWithDataStreamingResponse chatResponse) + { + return new Dictionary(4) + { + { nameof(chatResponse.Id), chatResponse.Id }, + { nameof(chatResponse.Model), chatResponse.Model }, + { nameof(chatResponse.Created), chatResponse.Created }, + { nameof(chatResponse.Object), chatResponse.Object }, + }; + } + + private async Task SendRequestAsync( + HttpRequestMessage request, + CancellationToken cancellationToken = default) + { + request.Headers.Add("User-Agent", HttpHeaderValues.UserAgent); + request.Headers.Add("Api-Key", this._config.CompletionApiKey); + + try + { + return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); + } + catch (HttpOperationException ex) + { + this._logger.LogError( + "Error occurred on chat completion with data request execution: {ExceptionMessage}", ex.Message); + + throw; + } + } + + private async IAsyncEnumerable InternalGetChatStreamingContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings chatRequestSettings = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings); + + using var request = this.GetRequest(chatHistory, chatRequestSettings, isStreamEnabled: true); + using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); + + const string ServerEventPayloadPrefix = "data:"; + + using var stream = await response.Content.ReadAsStreamAndTranslateExceptionAsync().ConfigureAwait(false); + using var reader = new StreamReader(stream); + + while (!reader.EndOfStream) + { + var body = await reader.ReadLineAsync().ConfigureAwait(false); + + if (string.IsNullOrWhiteSpace(body)) + { + continue; + } + + if (body.StartsWith(ServerEventPayloadPrefix, StringComparison.Ordinal)) + { + body = body.Substring(ServerEventPayloadPrefix.Length); + } + + var chatWithDataResponse = this.DeserializeResponse(body); + IReadOnlyDictionary metadata = GetResponseMetadata(chatWithDataResponse); + + foreach (var choice in chatWithDataResponse.Choices) + { + yield return new AzureOpenAIWithDataStreamingChatMessageContent(choice, choice.Index, this.GetModelId()!, metadata); + } + } + } + + private T DeserializeResponse(string body) + { + var response = JsonSerializer.Deserialize(body, JsonOptionsCache.ReadPermissive); + + if (response is null) + { + const string ErrorMessage = "Error occurred on chat completion with data response deserialization"; + + this._logger.LogError(ErrorMessage); + + throw new KernelException(ErrorMessage); + } + + return response; + } + + private HttpRequestMessage GetRequest( + ChatHistory chat, + OpenAIPromptExecutionSettings executionSettings, + bool isStreamEnabled) + { + var payload = new ChatWithDataRequest + { + Temperature = executionSettings.Temperature, + TopP = executionSettings.TopP, + IsStreamEnabled = isStreamEnabled, + StopSequences = executionSettings.StopSequences, + MaxTokens = executionSettings.MaxTokens, + PresencePenalty = executionSettings.PresencePenalty, + FrequencyPenalty = executionSettings.FrequencyPenalty, + TokenSelectionBiases = executionSettings.TokenSelectionBiases ?? new Dictionary(), + DataSources = this.GetDataSources(), + Messages = this.GetMessages(chat) + }; + + return HttpRequest.CreatePostRequest(this.GetRequestUri(), payload); + } + + private List GetDataSources() + { + return new List + { + new() { + Parameters = new ChatWithDataSourceParameters + { + Endpoint = this._config.DataSourceEndpoint, + ApiKey = this._config.DataSourceApiKey, + IndexName = this._config.DataSourceIndex + } + } + }; + } + + private List GetMessages(ChatHistory chat) + { + // The system role as the unique message is not allowed in the With Data APIs. + // This avoids the error: Invalid message request body. Learn how to use Completions extension API, please refer to https://learn.microsoft.com/azure/ai-services/openai/reference#completions-extensions + if (chat.Count == 1 && chat[0].Role == AuthorRole.System) + { + // Converts a system message to a user message if is the unique message in the chat. + chat[0].Role = AuthorRole.User; + } + + return chat + .Select(message => new ChatWithDataMessage + { + Role = message.Role.Label, + Content = message.Content ?? string.Empty + }) + .ToList(); + } + + private string GetRequestUri() + { + const string EndpointUriFormat = "{0}/openai/deployments/{1}/extensions/chat/completions?api-version={2}"; + + var apiVersion = this._config.CompletionApiVersion; + + if (string.IsNullOrWhiteSpace(apiVersion)) + { + apiVersion = DefaultApiVersion; + } + + return string.Format( + CultureInfo.InvariantCulture, + EndpointUriFormat, + this._config.CompletionEndpoint.TrimEnd('/'), + this._config.CompletionModelId, + apiVersion); + } + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs new file mode 100644 index 000000000000..46d8d51863df --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +[Experimental("SKEXP0010")] +internal sealed class ChatWithDataMessage +{ + [JsonPropertyName("role")] + public string Role { get; set; } = string.Empty; + + [JsonPropertyName("content")] + public string Content { get; set; } = string.Empty; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs new file mode 100644 index 000000000000..ed9d38f8a452 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +[Experimental("SKEXP0010")] +internal sealed class ChatWithDataRequest +{ + [JsonPropertyName("temperature")] + public double Temperature { get; set; } = 0; + + [JsonPropertyName("top_p")] + public double TopP { get; set; } = 0; + + [JsonPropertyName("stream")] + public bool IsStreamEnabled { get; set; } + + [JsonPropertyName("stop")] + public IList? StopSequences { get; set; } = Array.Empty(); + + [JsonPropertyName("max_tokens")] + public int? MaxTokens { get; set; } + + [JsonPropertyName("presence_penalty")] + public double PresencePenalty { get; set; } = 0; + + [JsonPropertyName("frequency_penalty")] + public double FrequencyPenalty { get; set; } = 0; + + [JsonPropertyName("logit_bias")] + public IDictionary TokenSelectionBiases { get; set; } = new Dictionary(); + + [JsonPropertyName("dataSources")] + public IList DataSources { get; set; } = Array.Empty(); + + [JsonPropertyName("messages")] + public IList Messages { get; set; } = Array.Empty(); +} + +[Experimental("SKEXP0010")] +internal sealed class ChatWithDataSource +{ + [JsonPropertyName("type")] + // The current API only supports "AzureCognitiveSearch" as name otherwise an error is returned. + // Validation error at #/dataSources/0: Input tag 'AzureAISearch' found using 'type' does not match any of + // the expected tags: 'AzureCognitiveSearch', 'Elasticsearch', 'AzureCosmosDB', 'Pinecone', 'AzureMLIndex', 'Microsoft365' + public string Type { get; set; } = "AzureCognitiveSearch"; + + [JsonPropertyName("parameters")] + public ChatWithDataSourceParameters Parameters { get; set; } = new ChatWithDataSourceParameters(); +} + +[Experimental("SKEXP0010")] +internal sealed class ChatWithDataSourceParameters +{ + [JsonPropertyName("endpoint")] + public string Endpoint { get; set; } = string.Empty; + + [JsonPropertyName("key")] + public string ApiKey { get; set; } = string.Empty; + + [JsonPropertyName("indexName")] + public string IndexName { get; set; } = string.Empty; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs new file mode 100644 index 000000000000..62cb36c2cc5e --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + +[Experimental("SKEXP0010")] +internal sealed class ChatWithDataResponse +{ + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + [JsonPropertyName("created")] + public int Created { get; set; } = default; + + [JsonPropertyName("choices")] + public IList Choices { get; set; } = Array.Empty(); + + [JsonPropertyName("usage")] + public ChatWithDataUsage Usage { get; set; } + + [JsonPropertyName("model")] + public string Model { get; set; } = string.Empty; + + [JsonPropertyName("object")] + public string Object { get; set; } = string.Empty; + + [JsonConstructor] + public ChatWithDataResponse(ChatWithDataUsage usage) + { + this.Usage = usage; + } +} + +[Experimental("SKEXP0010")] +[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] +internal sealed class ChatWithDataChoice +{ + [JsonPropertyName("messages")] + public IList Messages { get; set; } = Array.Empty(); +} + +[Experimental("SKEXP0010")] +internal sealed class ChatWithDataUsage +{ + [JsonPropertyName("prompt_tokens")] + public int PromptTokens { get; set; } + + [JsonPropertyName("completion_tokens")] + public int CompletionTokens { get; set; } + + [JsonPropertyName("total_tokens")] + public int TotalTokens { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs new file mode 100644 index 000000000000..ee5cdcd0bd7a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +[Experimental("SKEXP0010")] +[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] +internal sealed class ChatWithDataStreamingResponse +{ + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + [JsonPropertyName("created")] + public int Created { get; set; } = default; + + [JsonPropertyName("model")] + public string Model { get; set; } = string.Empty; + + [JsonPropertyName("object")] + public string Object { get; set; } = string.Empty; + + [JsonPropertyName("choices")] + public IList Choices { get; set; } = Array.Empty(); +} + +[Experimental("SKEXP0010")] +[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] +internal sealed class ChatWithDataStreamingChoice +{ + [JsonPropertyName("messages")] + public IList Messages { get; set; } = Array.Empty(); + + [JsonPropertyName("index")] + public int Index { get; set; } = 0; +} + +[Experimental("SKEXP0010")] +[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] +internal sealed class ChatWithDataStreamingMessage +{ + [JsonPropertyName("delta")] + public ChatWithDataStreamingDelta Delta { get; set; } = new(); + + [JsonPropertyName("end_turn")] + public bool EndTurn { get; set; } +} + +[Experimental("SKEXP0010")] +internal sealed class ChatWithDataStreamingDelta +{ + [JsonPropertyName("role")] + public string? Role { get; set; } + + [JsonPropertyName("content")] + public string Content { get; set; } = string.Empty; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj new file mode 100644 index 000000000000..e7771adc594c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj @@ -0,0 +1,34 @@ + + + + + Microsoft.SemanticKernel.Connectors.OpenAI + $(AssemblyName) + netstandard2.0 + true + $(NoWarn);NU5104;SKEXP0013,SKEXP0014 + true + + + + + + + + + Semantic Kernel - OpenAI and Azure OpenAI connectors + Semantic Kernel connectors for OpenAI and Azure OpenAI. Contains clients for text generation, chat completion, embedding and DALL-E text to image. + + + + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs new file mode 100644 index 000000000000..e14e730af52f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// Base type for OpenAI text to image clients. +internal sealed class OpenAITextToImageClientCore +{ + /// + /// Initializes a new instance of the class. + /// + /// The HttpClient used for making HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal OpenAITextToImageClientCore(HttpClient? httpClient, ILogger? logger = null) + { + this._httpClient = HttpClientProvider.GetHttpClient(httpClient); + this._logger = logger ?? NullLogger.Instance; + } + + /// + /// Storage for AI service attributes. + /// + internal Dictionary Attributes { get; } = new(); + + /// + /// Run the HTTP request to generate a list of images + /// + /// URL for the text to image request API + /// Request payload + /// Function to invoke to extract the desired portion of the text to image response. + /// The to monitor for cancellation requests. The default is . + /// List of image URLs + [Experimental("SKEXP0012")] + internal async Task> ExecuteImageGenerationRequestAsync( + string url, + string requestBody, + Func extractResponseFunc, + CancellationToken cancellationToken = default) + { + var result = await this.ExecutePostRequestAsync(url, requestBody, cancellationToken).ConfigureAwait(false); + return result.Images.Select(extractResponseFunc).ToList(); + } + + /// + /// Add attribute to the internal attribute dictionary if the value is not null or empty. + /// + /// Attribute key + /// Attribute value + internal void AddAttribute(string key, string? value) + { + if (!string.IsNullOrEmpty(value)) + { + this.Attributes.Add(key, value!); + } + } + + /// + /// Logger + /// + private readonly ILogger _logger; + + /// + /// The HttpClient used for making HTTP requests. + /// + private readonly HttpClient _httpClient; + + internal async Task ExecutePostRequestAsync(string url, string requestBody, CancellationToken cancellationToken = default) + { + using var content = new StringContent(requestBody, Encoding.UTF8, "application/json"); + using var response = await this.ExecuteRequestAsync(url, HttpMethod.Post, content, cancellationToken).ConfigureAwait(false); + string responseJson = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + T result = JsonDeserialize(responseJson); + return result; + } + + internal static T JsonDeserialize(string responseJson) => + JsonSerializer.Deserialize(responseJson, JsonOptionsCache.ReadPermissive) ?? + throw new KernelException("Response JSON parse error"); + + internal event EventHandler? RequestCreated; + + internal async Task ExecuteRequestAsync(string url, HttpMethod method, HttpContent? content, CancellationToken cancellationToken = default) + { + using var request = new HttpRequestMessage(method, url); + + if (content != null) + { + request.Content = content; + } + + request.Headers.Add("User-Agent", HttpHeaderValues.UserAgent); + this.RequestCreated?.Invoke(this, request); + + var response = await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); + + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("HTTP response: {0} {1}", (int)response.StatusCode, response.StatusCode.ToString("G")); + } + + return response; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs new file mode 100644 index 000000000000..76c2bc9e536a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Azure.Core; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Provides extension methods for the class to configure OpenAI and AzureOpenAI connectors. +/// +public static class OpenAIMemoryBuilderExtensions +{ + /// + /// Adds an Azure OpenAI text embeddings service. + /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. + /// + /// The instance + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Model identifier + /// Custom for HTTP requests. + /// Self instance + [Experimental("SKEXP0011")] + public static MemoryBuilder WithAzureOpenAITextEmbeddingGeneration( + this MemoryBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + HttpClient? httpClient = null) + { + return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), + loggerFactory)); + } + + /// + /// Adds an Azure OpenAI text embeddings service. + /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. + /// + /// The instance + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Model identifier + /// Custom for HTTP requests. + /// Self instance + [Experimental("SKEXP0011")] + public static MemoryBuilder WithAzureOpenAITextEmbeddingGeneration( + this MemoryBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credential, + string? modelId = null, + HttpClient? httpClient = null) + { + return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), + loggerFactory)); + } + + /// + /// Adds the OpenAI text embeddings service. + /// See https://platform.openai.com/docs for service details. + /// + /// The instance + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// Custom for HTTP requests. + /// Self instance + [Experimental("SKEXP0011")] + public static MemoryBuilder WithOpenAITextEmbeddingGeneration( + this MemoryBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + HttpClient? httpClient = null) + { + return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), + loggerFactory)); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs new file mode 100644 index 000000000000..907193908d72 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs @@ -0,0 +1,218 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Execution settings for an OpenAI completion request. +/// +[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] +public sealed class OpenAIPromptExecutionSettings : PromptExecutionSettings +{ + /// + /// Temperature controls the randomness of the completion. + /// The higher the temperature, the more random the completion. + /// Default is 1.0. + /// + [JsonPropertyName("temperature")] + public double Temperature { get; set; } = 1; + + /// + /// TopP controls the diversity of the completion. + /// The higher the TopP, the more diverse the completion. + /// Default is 1.0. + /// + [JsonPropertyName("top_p")] + public double TopP { get; set; } = 1; + + /// + /// Number between -2.0 and 2.0. Positive values penalize new tokens + /// based on whether they appear in the text so far, increasing the + /// model's likelihood to talk about new topics. + /// + [JsonPropertyName("presence_penalty")] + public double PresencePenalty { get; set; } + + /// + /// Number between -2.0 and 2.0. Positive values penalize new tokens + /// based on their existing frequency in the text so far, decreasing + /// the model's likelihood to repeat the same line verbatim. + /// + [JsonPropertyName("frequency_penalty")] + public double FrequencyPenalty { get; set; } + + /// + /// The maximum number of tokens to generate in the completion. + /// + [JsonPropertyName("max_tokens")] + public int? MaxTokens { get; set; } + + /// + /// Sequences where the completion will stop generating further tokens. + /// + [JsonPropertyName("stop_sequences")] + public IList? StopSequences { get; set; } + + /// + /// How many completions to generate for each prompt. Default is 1. + /// Note: Because this parameter generates many completions, it can quickly consume your token quota. + /// Use carefully and ensure that you have reasonable settings for max_tokens and stop. + /// + [JsonPropertyName("results_per_prompt")] + public int ResultsPerPrompt { get; set; } = 1; + + /// + /// If specified, the system will make a best effort to sample deterministically such that repeated requests with the + /// same seed and parameters should return the same result. Determinism is not guaranteed. + /// + [Experimental("SKEXP0013")] + [JsonPropertyName("seed")] + public long? Seed { get; set; } + + /// + /// Gets or sets the response format to use for the completion. + /// + /// + /// Possible values are: "json_object", "text", object. + /// + [Experimental("SKEXP0013")] + [JsonPropertyName("response_format")] + public object? ResponseFormat { get; set; } + + /// + /// The system prompt to use when generating text using a chat model. + /// Defaults to "Assistant is a large language model." + /// + [JsonPropertyName("chat_system_prompt")] + public string ChatSystemPrompt + { + get => this._chatSystemPrompt; + set + { + if (string.IsNullOrWhiteSpace(value)) + { + value = DefaultChatSystemPrompt; + } + this._chatSystemPrompt = value; + } + } + + /// + /// Modify the likelihood of specified tokens appearing in the completion. + /// + [JsonPropertyName("token_selection_biases")] + public IDictionary? TokenSelectionBiases { get; set; } + + /// + /// Gets or sets the behavior for how tool calls are handled. + /// + /// + /// + /// To disable all tool calling, set the property to null (the default). + /// + /// To request that the model use a specific function, set the property to an instance returned + /// from . + /// + /// + /// To allow the model to request one of any number of functions, set the property to an + /// instance returned from , called with + /// a list of the functions available. + /// + /// + /// To allow the model to request one of any of the functions in the supplied , + /// set the property to if the client should simply + /// send the information about the functions and not handle the response in any special manner, or + /// if the client should attempt to automatically + /// invoke the function and send the result back to the service. + /// + /// + /// For all options where an instance is provided, auto-invoke behavior may be selected. If the service + /// sends a request for a function call, if auto-invoke has been requested, the client will attempt to + /// resolve that function from the functions available in the , and if found, rather + /// than returning the response back to the caller, it will handle the request automatically, invoking + /// the function, and sending back the result. The intermediate messages will be retained in the + /// if an instance was provided. + /// + public ToolCallBehavior? ToolCallBehavior { get; set; } + + /// + /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse + /// + public string? User { get; set; } + + /// + /// Default value for chat system property. + /// + internal static string DefaultChatSystemPrompt { get; } = "Assistant is a large language model."; + + /// + /// Default max tokens for a text generation + /// + internal static int DefaultTextMaxTokens { get; } = 256; + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// Default max tokens + /// An instance of OpenAIPromptExecutionSettings + public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + { + if (executionSettings is null) + { + return new OpenAIPromptExecutionSettings() + { + MaxTokens = defaultMaxTokens + }; + } + + if (executionSettings is OpenAIPromptExecutionSettings settings) + { + return settings; + } + + var json = JsonSerializer.Serialize(executionSettings); + + var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); + if (openAIExecutionSettings is not null) + { + return openAIExecutionSettings; + } + + throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(OpenAIPromptExecutionSettings)}", nameof(executionSettings)); + } + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// Default max tokens + /// An instance of OpenAIPromptExecutionSettings + public static OpenAIPromptExecutionSettings FromExecutionSettingsWithData(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + { + var settings = FromExecutionSettings(executionSettings, defaultMaxTokens); + + if (settings.StopSequences?.Count == 0) + { + // Azure OpenAI WithData API does not allow to send empty array of stop sequences + // Gives back "Validation error at #/stop/str: Input should be a valid string\nValidation error at #/stop/list[str]: List should have at least 1 item after validation, not 0" + settings.StopSequences = null; + } + + return settings; + } + + #region private ================================================================================ + + private string _chatSystemPrompt = DefaultChatSystemPrompt; + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs new file mode 100644 index 000000000000..aa1b9c383c4f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs @@ -0,0 +1,1186 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Azure; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToImage; + +#pragma warning disable CA2000 // Dispose objects before losing scope +#pragma warning disable IDE0039 // Use local function + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for and related classes to configure OpenAI and Azure OpenAI connectors. +/// +public static class OpenAIServiceCollectionExtensions +{ + #region Text Completion + + /// + /// Adds an Azure OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAITextGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + { + var client = CreateAzureOpenAIClient(endpoint, new AzureKeyCredential(apiKey), httpClient ?? serviceProvider.GetService()); + return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); + }); + + return builder; + } + + /// + /// Adds an Azure OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAITextGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + { + var client = CreateAzureOpenAIClient(endpoint, new AzureKeyCredential(apiKey), serviceProvider.GetService()); + return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); + }); + } + + /// + /// Adds an Azure OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAITextGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + { + var client = CreateAzureOpenAIClient(endpoint, credentials, httpClient ?? serviceProvider.GetService()); + return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); + }); + + return builder; + } + + /// + /// Adds an Azure OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAITextGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + { + var client = CreateAzureOpenAIClient(endpoint, credentials, serviceProvider.GetService()); + return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); + }); + } + + /// + /// Adds an Azure OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IKernelBuilder AddAzureOpenAITextGeneration( + this IKernelBuilder builder, + string deploymentName, + OpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextGenerationService( + deploymentName, + openAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds an Azure OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAITextGeneration( + this IServiceCollection services, + string deploymentName, + OpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextGenerationService( + deploymentName, + openAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + } + + /// + /// Adds an OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddOpenAITextGeneration( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds an OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAITextGeneration( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + /// + /// Adds an OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IKernelBuilder AddOpenAITextGeneration( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds an OpenAI text generation service with the specified configuration. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAITextGeneration(this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService())); + } + + #endregion + + #region Text Embedding + + /// + /// Adds an Azure OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0011")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds an Azure OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0011")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + /// + /// Adds an Azure OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0011")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credential, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credential); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds an Azure OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0011")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credential, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credential); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + /// + /// Adds an Azure OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0011")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + OpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + openAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds an Azure OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0011")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + OpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + openAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + } + + /// + /// Adds the OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0011")] + public static IKernelBuilder AddOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds the OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0011")] + public static IServiceCollection AddOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + /// + /// Adds the OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0011")] + public static IKernelBuilder AddOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService())); + + return builder; + } + + /// + /// Adds the OpenAI text embeddings service to the list. + /// + /// The instance to augment. + /// The OpenAI model id. + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0011")] + public static IServiceCollection AddOpenAITextEmbeddingGeneration(this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService())); + } + + #endregion + + #region Chat Completion + + /// + /// Adds the Azure OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + OpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new AzureKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the Azure OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + OpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new AzureKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the Azure OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + OpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the Azure OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + OpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the Azure OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + OpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the Azure OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + OpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the Azure OpenAI chat completion with data service to the list. + /// + /// The instance. + /// Required configuration for Azure OpenAI chat completion with data. + /// A local identifier for the given AI service. + /// The same instance as . + /// + /// More information: + /// + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + AzureOpenAIChatCompletionWithDataConfig config, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNull(config); + + Func factory = (serviceProvider, _) => + new(config, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the Azure OpenAI chat completion with data service to the list. + /// + /// The instance. + /// Required configuration for Azure OpenAI chat completion with data. + /// A local identifier for the given AI service. + /// The same instance as . + /// + /// More information: + /// + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + AzureOpenAIChatCompletionWithDataConfig config, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNull(config); + + Func factory = (serviceProvider, _) => + new(config, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAIChatCompletion( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + Func factory = (serviceProvider, _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAIChatCompletion(this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + Func factory = (serviceProvider, _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + #endregion + + #region Images + + /// + /// Add the Azure OpenAI DallE text to image service to the list + /// + /// The instance to augment. + /// Azure OpenAI deployment name + /// Azure OpenAI deployment URL + /// Azure OpenAI API key + /// Model identifier + /// A local identifier for the given AI service + /// Azure OpenAI API version + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0012")] + public static IKernelBuilder AddAzureOpenAITextToImage( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + string? serviceId = null, + string? apiVersion = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + apiVersion)); + + return builder; + } + + /// + /// Add the Azure OpenAI DallE text to image service to the list + /// + /// The instance to augment. + /// Azure OpenAI deployment name + /// Azure OpenAI deployment URL + /// Azure OpenAI API key + /// A local identifier for the given AI service + /// Model identifier + /// Maximum number of attempts to retrieve the text to image operation result. + /// The same instance as . + [Experimental("SKEXP0012")] + public static IServiceCollection AddAzureOpenAITextToImage( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + int maxRetryCount = 5) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + /// + /// Add the OpenAI Dall-E text to image service to the list + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0012")] + public static IKernelBuilder AddOpenAITextToImage( + this IKernelBuilder builder, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToImageService( + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + /// + /// Add the OpenAI Dall-E text to image service to the list + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0012")] + public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToImageService( + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + #endregion + + private static OpenAIClient CreateAzureOpenAIClient(string endpoint, AzureKeyCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient)); + + private static OpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient)); +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs new file mode 100644 index 000000000000..381c86c6aaf1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure OpenAI text embedding service. +/// +[Experimental("SKEXP0011")] +public sealed class AzureOpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService +{ + private readonly AzureOpenAIClientCore _core; + + /// + /// Creates a new client instance using API Key auth. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextEmbeddingGenerationService( + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Creates a new client instance supporting AAD auth. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextEmbeddingGenerationService( + string deploymentName, + string endpoint, + TokenCredential credential, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Creates a new client. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextEmbeddingGenerationService( + string deploymentName, + OpenAIClient openAIClient, + string? modelId = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + public Task>> GenerateEmbeddingsAsync( + IList data, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + return this._core.GetEmbeddingsAsync(data, kernel, cancellationToken); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs new file mode 100644 index 000000000000..cbd1f8327786 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI text embedding service. +/// +[Experimental("SKEXP0011")] +public sealed class OpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService +{ + private readonly OpenAIClientCore _core; + + /// + /// Create an instance of the OpenAI text embedding connector + /// + /// Model name + /// OpenAI API Key + /// OpenAI Organization Id (usually optional) + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAITextEmbeddingGenerationService( + string modelId, + string apiKey, + string? organization = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(modelId, apiKey, organization, httpClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Create an instance of the OpenAI text embedding connector + /// + /// Model name + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAITextEmbeddingGenerationService( + string modelId, + OpenAIClient openAIClient, + ILoggerFactory? loggerFactory = null) + { + this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + public Task>> GenerateEmbeddingsAsync( + IList data, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + this._core.LogActionDetails(); + return this._core.GetEmbeddingsAsync(data, kernel, cancellationToken); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs new file mode 100644 index 000000000000..20111ca99f88 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure OpenAI text generation client. +/// +public sealed class AzureOpenAITextGenerationService : ITextGenerationService +{ + private readonly AzureOpenAIClientCore _core; + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + /// Creates a new client instance using API Key auth + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextGenerationService( + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Creates a new client instance supporting AAD auth + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextGenerationService( + string deploymentName, + string endpoint, + TokenCredential credential, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Creates a new client instance using the specified OpenAIClient + /// + /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextGenerationService( + string deploymentName, + OpenAIClient openAIClient, + string? modelId = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + return this._core.GetTextResultsAsync(prompt, executionSettings, kernel, cancellationToken); + } + + /// + public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + return this._core.GetStreamingTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs new file mode 100644 index 000000000000..c5fd264f9075 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI text generation service. +/// +public sealed class OpenAITextGenerationService : ITextGenerationService +{ + private readonly OpenAIClientCore _core; + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + /// Create an instance of the OpenAI text generation connector + /// + /// Model name + /// OpenAI API Key + /// OpenAI Organization Id (usually optional) + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAITextGenerationService( + string modelId, + string apiKey, + string? organization = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new(modelId, apiKey, organization, httpClient, loggerFactory?.CreateLogger(typeof(OpenAITextGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); + } + + /// + /// Create an instance of the OpenAI text generation connector + /// + /// Model name + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAITextGenerationService( + string modelId, + OpenAIClient openAIClient, + ILoggerFactory? loggerFactory = null) + { + this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextGenerationService))); + + this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + return this._core.GetTextResultsAsync(prompt, executionSettings, kernel, cancellationToken); + } + + /// + public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + return this._core.GetStreamingTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs new file mode 100644 index 000000000000..5ce38d77149b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.AI.OpenAI; +using Azure.Core.Pipeline; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextToImage; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Azure OpenAI Image generation +/// +/// +[Experimental("SKEXP0012")] +public sealed class AzureOpenAITextToImageService : ITextToImageService +{ + private readonly OpenAIClient _client; + private readonly ILogger _logger; + private readonly string _deploymentName; + private readonly Dictionary _attributes = new(); + + /// + public IReadOnlyDictionary Attributes => this._attributes; + + /// + /// Gets the key used to store the deployment name in the dictionary. + /// + public static string DeploymentNameKey => "DeploymentName"; + + /// + /// Create a new instance of Azure OpenAI image generation service + /// + /// Deployment name identifier + /// Azure OpenAI deployment URL + /// Azure OpenAI API key + /// Model identifier + /// Custom for HTTP requests. + /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. + /// Azure OpenAI Endpoint ApiVersion + public AzureOpenAITextToImageService( + string deploymentName, + string endpoint, + string apiKey, + string? modelId, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null, + string? apiVersion = null) + { + Verify.NotNullOrWhiteSpace(apiKey); + Verify.NotNullOrWhiteSpace(deploymentName); + + this._deploymentName = deploymentName; + + if (modelId is not null) + { + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + this.AddAttribute(DeploymentNameKey, deploymentName); + + this._logger = loggerFactory?.CreateLogger(typeof(AzureOpenAITextToImageService)) ?? NullLogger.Instance; + + var connectorEndpoint = !string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri; + if (connectorEndpoint is null) + { + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); + } + + this._client = new(new Uri(connectorEndpoint), + new AzureKeyCredential(apiKey), + GetClientOptions(httpClient, apiVersion)); + } + + /// + public async Task GenerateImageAsync( + string description, + int width, + int height, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(description); + + var size = (width, height) switch + { + (1024, 1024) => ImageSize.Size1024x1024, + (1792, 1024) => ImageSize.Size1792x1024, + (1024, 1792) => ImageSize.Size1024x1792, + _ => throw new NotSupportedException("Dall-E 3 can only generate images of the following sizes 1024x1024, 1792x1024, or 1024x1792") + }; + + Response imageGenerations; + try + { + imageGenerations = await this._client.GetImageGenerationsAsync( + new ImageGenerationOptions + { + DeploymentName = this._deploymentName, + Prompt = description, + Size = size, + }, cancellationToken).ConfigureAwait(false); + } + catch (RequestFailedException e) + { + throw e.ToHttpOperationException(); + } + + if (!imageGenerations.HasValue) + { + throw new KernelException("The response does not contain an image result"); + } + + if (imageGenerations.Value.Data.Count == 0) + { + throw new KernelException("The response does not contain any image"); + } + + return imageGenerations.Value.Data[0].Url.AbsoluteUri; + } + + private static OpenAIClientOptions GetClientOptions(HttpClient? httpClient, string? apiVersion) + { + OpenAIClientOptions.ServiceVersion version = apiVersion switch + { + // Dalle-E-3 is only supported in 2023-12-01-preview + "2023-12-01-preview" => OpenAIClientOptions.ServiceVersion.V2023_12_01_Preview, + _ => OpenAIClientOptions.ServiceVersion.V2023_12_01_Preview + }; + + var options = new OpenAIClientOptions(version) + { + Diagnostics = { ApplicationId = HttpHeaderValues.UserAgent } + }; + + if (httpClient != null) + { + // Disable retries when using a custom HttpClient + options.RetryPolicy = new RetryPolicy(maxRetries: 0); + + options.Transport = new HttpClientTransport(httpClient); + } + + return options; + } + + internal void AddAttribute(string key, string? value) + { + if (!string.IsNullOrEmpty(value)) + { + this._attributes.Add(key, value); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs new file mode 100644 index 000000000000..837b87784502 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.TextToImage; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI text to image service. +/// +[Experimental("SKEXP0012")] +public sealed class OpenAITextToImageService : ITextToImageService +{ + private readonly OpenAITextToImageClientCore _core; + + /// + /// OpenAI REST API endpoint + /// + private const string OpenAIEndpoint = "https://api.openai.com/v1/images/generations"; + + /// + /// Optional value for the OpenAI-Organization header. + /// + private readonly string? _organizationHeaderValue; + + /// + /// Value for the authorization header. + /// + private readonly string _authorizationHeaderValue; + + /// + /// Initializes a new instance of the class. + /// + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAITextToImageService( + string apiKey, + string? organization = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNullOrWhiteSpace(apiKey); + this._authorizationHeaderValue = $"Bearer {apiKey}"; + this._organizationHeaderValue = organization; + + this._core = new(httpClient, loggerFactory?.CreateLogger(this.GetType())); + this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); + + this._core.RequestCreated += (_, request) => + { + request.Headers.Add("Authorization", this._authorizationHeaderValue); + if (!string.IsNullOrEmpty(this._organizationHeaderValue)) + { + request.Headers.Add("OpenAI-Organization", this._organizationHeaderValue); + } + }; + } + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + public Task GenerateImageAsync(string description, int width, int height, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(description); + if (width != height || width != 256 && width != 512 && width != 1024) + { + throw new ArgumentOutOfRangeException(nameof(width), width, "OpenAI can generate only square images of size 256x256, 512x512, or 1024x1024."); + } + + return this.GenerateImageAsync(description, width, height, "url", x => x.Url, cancellationToken); + } + + private async Task GenerateImageAsync( + string description, + int width, int height, + string format, Func extractResponse, + CancellationToken cancellationToken) + { + Verify.NotNull(extractResponse); + + var requestBody = JsonSerializer.Serialize(new TextToImageRequest + { + Prompt = description, + Size = $"{width}x{height}", + Count = 1, + Format = format, + }); + + var list = await this._core.ExecuteImageGenerationRequestAsync(OpenAIEndpoint, requestBody, extractResponse!, cancellationToken).ConfigureAwait(false); + return list[0]; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs new file mode 100644 index 000000000000..b5988a91cda4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Text to image request +/// +internal sealed class TextToImageRequest +{ + /// + /// Image prompt + /// + [JsonPropertyName("prompt")] + [JsonPropertyOrder(1)] + public string Prompt { get; set; } = string.Empty; + + /// + /// Image size + /// + [JsonPropertyName("size")] + [JsonPropertyOrder(2)] + public string Size { get; set; } = "256x256"; + + /// + /// How many images to generate + /// + [JsonPropertyName("n")] + [JsonPropertyOrder(3)] + public int Count { get; set; } = 1; + + /// + /// Image format, "url" or "b64_json" + /// + [JsonPropertyName("response_format")] + [JsonPropertyOrder(4)] + public string Format { get; set; } = "url"; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs new file mode 100644 index 000000000000..4894aad65a04 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Text to image response +/// +internal class TextToImageResponse +{ + /// + /// OpenAI Image response + /// + public sealed class Image + { + /// + /// URL to the image created + /// + [JsonPropertyName("url")] + [SuppressMessage("Design", "CA1056:URI return values should not be strings", Justification = "Using the original value")] + public string Url { get; set; } = string.Empty; + + /// + /// Image content in base64 format + /// + [JsonPropertyName("b64_json")] + public string AsBase64 { get; set; } = string.Empty; + } + + /// + /// List of possible images + /// + [JsonPropertyName("data")] + public IList Images { get; set; } = new List(); + + /// + /// Creation time + /// + [JsonPropertyName("created")] + public int CreatedTime { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs b/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs new file mode 100644 index 000000000000..2650775f034b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using Azure.AI.OpenAI; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// Represents a behavior for OpenAI tool calls. +public abstract class ToolCallBehavior +{ + // NOTE: Right now, the only tools that are available are for function calling. In the future, + // this class can be extended to support additional kinds of tools, including composite ones: + // the OpenAIPromptExecutionSettings has a single ToolCallBehavior property, but we could + // expose a `public static ToolCallBehavior Composite(params ToolCallBehavior[] behaviors)` + // or the like to allow multiple distinct tools to be provided, should that be appropriate. + // We can also consider additional forms of tools, such as ones that dynamically examine + // the Kernel, KernelArguments, etc., and dynamically contribute tools to the ChatCompletionsOptions. + + /// + /// The default maximum number of tool-call auto-invokes that can be made in a single request. + /// + /// + /// After this number of iterations as part of a single user request is reached, auto-invocation + /// will be disabled (e.g. will behave like )). + /// This is a safeguard against possible runaway execution if the model routinely re-requests + /// the same function over and over. It is currently hardcoded, but in the future it could + /// be made configurable by the developer. Other configuration is also possible in the future, + /// such as a delegate on the instance that can be invoked upon function call failure (e.g. failure + /// to find the requested function, failure to invoke the function, etc.), with behaviors for + /// what to do in such a case, e.g. respond to the model telling it to try again. With parallel tool call + /// support, where the model can request multiple tools in a single response, it is significantly + /// less likely that this limit is reached, as most of the time only a single request is needed. + /// + private const int DefaultMaximumAutoInvokeAttempts = 5; + + /// + /// Gets an instance that will provide all of the 's plugins' function information. + /// Function call requests from the model will be propagated back to the caller. + /// + /// + /// If no is available, no function information will be provided to the model. + /// + public static ToolCallBehavior EnableKernelFunctions { get; } = new KernelFunctions(autoInvoke: false); + + /// + /// Gets an instance that will both provide all of the 's plugins' function information + /// to the model and attempt to automatically handle any function call requests. + /// + /// + /// When successful, tool call requests from the model become an implementation detail, with the service + /// handling invoking any requested functions and supplying the results back to the model. + /// If no is available, no function information will be provided to the model. + /// + public static ToolCallBehavior AutoInvokeKernelFunctions { get; } = new KernelFunctions(autoInvoke: true); + + /// Gets an instance that will provide the specified list of functions to the model. + /// The functions that should be made available to the model. + /// true to attempt to automatically handle function call requests; otherwise, false. + /// + /// The that may be set into + /// to indicate that the specified functions should be made available to the model. + /// + public static ToolCallBehavior EnableFunctions(IEnumerable functions, bool autoInvoke = false) + { + Verify.NotNull(functions); + return new EnabledFunctions(functions, autoInvoke); + } + + /// Gets an instance that will request the model to use the specified function. + /// The function the model should request to use. + /// true to attempt to automatically handle function call requests; otherwise, false. + /// + /// The that may be set into + /// to indicate that the specified function should be requested by the model. + /// + public static ToolCallBehavior RequireFunction(OpenAIFunction function, bool autoInvoke = false) + { + Verify.NotNull(function); + return new RequiredFunction(function, autoInvoke); + } + + /// Initializes the instance; prevents external instantiation. + private ToolCallBehavior(bool autoInvoke) + { + this.MaximumAutoInvokeAttempts = autoInvoke ? DefaultMaximumAutoInvokeAttempts : 0; + } + + /// Gets how many requests are part of a single interaction should include this tool in the request. + /// + /// This should be greater than or equal to . It defaults to . + /// Once this limit is reached, the tools will no longer be included in subsequent retries as part of the operation, e.g. + /// if this is 1, the first request will include the tools, but the subsequent response sending back the tool's result + /// will not include the tools for further use. + /// + internal virtual int MaximumUseAttempts => int.MaxValue; + + /// Gets how many tool call request/response roundtrips are supported with auto-invocation. + /// + /// To disable auto invocation, this can be set to 0. + /// + internal int MaximumAutoInvokeAttempts { get; } + + /// + /// Gets whether validation against a specified list is required before allowing the model to request a function from the kernel. + /// + /// true if it's ok to invoke any kernel function requested by the model if it's found; false if a request needs to be validated against an allow list. + internal virtual bool AllowAnyRequestedKernelFunction => false; + + /// Configures the with any tools this provides. + /// The used for the operation. This can be queried to determine what tools to provide into the . + /// The destination to configure. + internal abstract void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options); + + /// + /// Represents a that will provide to the model all available functions from a + /// provided by the client. + /// + internal sealed class KernelFunctions : ToolCallBehavior + { + internal KernelFunctions(bool autoInvoke) : base(autoInvoke) { } + + public override string ToString() => $"{nameof(KernelFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0})"; + + internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + { + // If no kernel is provided, we don't have any tools to provide. + if (kernel is not null) + { + // Provide all functions from the kernel. + IList functions = kernel.Plugins.GetFunctionsMetadata(); + if (functions.Count > 0) + { + options.ToolChoice = ChatCompletionsToolChoice.Auto; + for (int i = 0; i < functions.Count; i++) + { + options.Tools.Add(new ChatCompletionsFunctionToolDefinition(functions[i].ToOpenAIFunction().ToFunctionDefinition())); + } + } + } + } + + internal override bool AllowAnyRequestedKernelFunction => true; + } + + /// + /// Represents a that provides a specified list of functions to the model. + /// + internal sealed class EnabledFunctions : ToolCallBehavior + { + private readonly OpenAIFunction[] _openAIFunctions; + private readonly ChatCompletionsFunctionToolDefinition[] _functions; + + public EnabledFunctions(IEnumerable functions, bool autoInvoke) : base(autoInvoke) + { + this._openAIFunctions = functions.ToArray(); + + var defs = new ChatCompletionsFunctionToolDefinition[this._openAIFunctions.Length]; + for (int i = 0; i < defs.Length; i++) + { + defs[i] = new ChatCompletionsFunctionToolDefinition(this._openAIFunctions[i].ToFunctionDefinition()); + } + this._functions = defs; + } + + public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.Name))}"; + + internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + { + OpenAIFunction[] openAIFunctions = this._openAIFunctions; + ChatCompletionsFunctionToolDefinition[] functions = this._functions; + Debug.Assert(openAIFunctions.Length == functions.Length); + + if (openAIFunctions.Length > 0) + { + bool autoInvoke = base.MaximumAutoInvokeAttempts > 0; + + // If auto-invocation is specified, we need a kernel to be able to invoke the functions. + // Lack of a kernel is fatal: we don't want to tell the model we can handle the functions + // and then fail to do so, so we fail before we get to that point. This is an error + // on the consumers behalf: if they specify auto-invocation with any functions, they must + // specify the kernel and the kernel must contain those functions. + if (autoInvoke && kernel is null) + { + throw new KernelException($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided."); + } + + options.ToolChoice = ChatCompletionsToolChoice.Auto; + for (int i = 0; i < openAIFunctions.Length; i++) + { + // Make sure that if auto-invocation is specified, every enabled function can be found in the kernel. + if (autoInvoke) + { + Debug.Assert(kernel is not null); + OpenAIFunction f = openAIFunctions[i]; + if (!kernel!.Plugins.TryGetFunction(f.PluginName, f.FunctionName, out _)) + { + throw new KernelException($"The specified {nameof(EnabledFunctions)} function {f.FullyQualifiedName} is not available in the kernel."); + } + } + + // Add the function. + options.Tools.Add(functions[i]); + } + } + } + } + + /// Represents a that requests the model use a specific function. + internal sealed class RequiredFunction : ToolCallBehavior + { + private readonly ChatCompletionsFunctionToolDefinition _tool; + private readonly ChatCompletionsToolChoice _choice; + + public RequiredFunction(OpenAIFunction function, bool autoInvoke) : base(autoInvoke) + { + this._tool = new ChatCompletionsFunctionToolDefinition(function.ToFunctionDefinition()); + this._choice = new ChatCompletionsToolChoice(this._tool); + } + + public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.Name}"; + + internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + { + options.ToolChoice = this._choice; + options.Tools.Add(this._tool); + } + + /// Gets how many requests are part of a single interaction should include this tool in the request. + /// + /// Unlike and , this must use 1 as the maximum + /// use attempts. Otherwise, every call back to the model _requires_ it to invoke the function (as opposed + /// to allows it), which means we end up doing the same work over and over and over until the maximum is reached. + /// Thus for "requires", we must send the tool information only once. + /// + internal override int MaximumUseAttempts => 1; + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj index 748c53e46567..a541b834ba4f 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj @@ -4,14 +4,21 @@ SemanticKernel.Connectors.UnitTests SemanticKernel.Connectors.UnitTests net6.0 + 12 LatestMajor true enable disable false - CA2007,VSTHRD111 + CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,SKEXP0012,SKEXP0013,SKEXP0014,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0052 + + + + + + @@ -20,20 +27,24 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + - - - + - + + + - - - + + + @@ -43,19 +54,14 @@ + - - Always - - - Always - - + Always - + Always diff --git a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs deleted file mode 100644 index 19d0b975acdf..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Net.Http; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.HuggingFace.TextCompletion; - -/// -/// Unit tests for class. -/// -public sealed class HuggingFaceTextCompletionTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - - public HuggingFaceTextCompletionTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._messageHandlerStub.ResponseToReturn.Content = new StringContent(HuggingFaceTestHelper.GetTestResponse("completion_test_response.json")); - - this._httpClient = new HttpClient(this._messageHandlerStub, false); - } - - [Fact] - public async Task SpecifiedModelShouldBeUsedAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.EndsWith("/fake-model", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task NoAuthorizationHeaderShouldBeAddedIfApiKeyIsNotProvidedAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", apiKey: null, httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.False(this._messageHandlerStub.RequestHeaders?.Contains("Authorization")); - } - - [Fact] - public async Task AuthorizationHeaderShouldBeAddedIfApiKeyIsProvidedAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", apiKey: "fake-api-key", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("Authorization")); - - var values = this._messageHandlerStub.RequestHeaders!.GetValues("Authorization"); - - var value = values.SingleOrDefault(); - Assert.Equal("Bearer fake-api-key", value); - } - - [Fact] - public async Task UserAgentHeaderShouldBeUsedAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("User-Agent")); - - var values = this._messageHandlerStub.RequestHeaders!.GetValues("User-Agent"); - - var value = values.SingleOrDefault(); - Assert.Equal("Semantic-Kernel", value); - } - - [Fact] - public async Task ProvidedEndpointShouldBeUsedAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.StartsWith("https://fake-random-test-host/fake-path", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task HttpClientBaseAddressShouldBeUsedAsync() - { - //Arrange - this._httpClient.BaseAddress = new Uri("https://fake-random-test-host/fake-path"); - - var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.StartsWith("https://fake-random-test-host/fake-path", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task DefaultAddressShouldBeUsedAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.StartsWith("https://api-inference.huggingface.co/models", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.Equal("https://fake-random-test-host/fake-path/fake-model", this._messageHandlerStub.RequestUri?.AbsoluteUri); - } - - [Fact] - public async Task ShouldSendPromptToServiceAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", httpClient: this._httpClient); - - //Act - await sut.GetCompletionsAsync("fake-text"); - - //Assert - var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); - Assert.NotNull(requestPayload); - - Assert.Equal("fake-text", requestPayload.Input); - } - - [Fact] - public async Task ShouldHandleServiceResponseAsync() - { - //Arrange - var sut = new HuggingFaceTextCompletion("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); - - //Act - var result = await sut.GetCompletionsAsync("fake-text"); - - //Assert - Assert.NotNull(result); - - var completions = result.SingleOrDefault(); - Assert.NotNull(completions); - - var completion = await completions.GetCompletionAsync(); - Assert.Equal("This is test completion response", completion); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs index 6a4a973408b6..5b90bba2ae47 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextEmbedding/HuggingFaceEmbeddingGenerationTests.cs @@ -6,13 +6,13 @@ using System.Net.Http; using System.Text.Json; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextEmbedding; +using Microsoft.SemanticKernel.Connectors.HuggingFace; using Xunit; namespace SemanticKernel.Connectors.UnitTests.HuggingFace.TextEmbedding; /// -/// Unit tests for class. +/// Unit tests for class. /// public sealed class HuggingFaceEmbeddingGenerationTests : IDisposable { @@ -31,7 +31,7 @@ public HuggingFaceEmbeddingGenerationTests() public async Task SpecifiedModelShouldBeUsedAsync() { //Arrange - var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGenerationService("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -44,7 +44,7 @@ public async Task SpecifiedModelShouldBeUsedAsync() public async Task UserAgentHeaderShouldBeUsedAsync() { //Arrange - var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGenerationService("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -62,7 +62,7 @@ public async Task UserAgentHeaderShouldBeUsedAsync() public async Task ProvidedEndpointShouldBeUsedAsync() { //Arrange - var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGenerationService("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -77,7 +77,7 @@ public async Task HttpClientBaseAddressShouldBeUsedAsync() //Arrange this._httpClient.BaseAddress = new Uri("https://fake-random-test-host/fake-path"); - var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this._httpClient); + var sut = new HuggingFaceTextEmbeddingGenerationService("fake-model", this._httpClient); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -90,7 +90,7 @@ public async Task HttpClientBaseAddressShouldBeUsedAsync() public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() { //Arrange - var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this._httpClient, endpoint: "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGenerationService("fake-model", this._httpClient, endpoint: "https://fake-random-test-host/fake-path"); //Act await sut.GenerateEmbeddingsAsync(new List()); @@ -103,7 +103,7 @@ public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() public async Task ShouldSendDataToServiceAsync() { //Arrange - var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGenerationService("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); var data = new List() { "test_string_1", "test_string_2", "test_string_3" }; //Act @@ -120,7 +120,7 @@ public async Task ShouldSendDataToServiceAsync() public async Task ShouldHandleServiceResponseAsync() { //Arrange - var sut = new HuggingFaceTextEmbeddingGeneration("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); + var sut = new HuggingFaceTextEmbeddingGenerationService("fake-model", this._httpClient, "https://fake-random-test-host/fake-path"); //Act var embeddings = await sut.GenerateEmbeddingsAsync(new List()); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextGeneration/HuggingFaceTextGenerationTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextGeneration/HuggingFaceTextGenerationTests.cs new file mode 100644 index 000000000000..bcb762124db4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/HuggingFace/TextGeneration/HuggingFaceTextGenerationTests.cs @@ -0,0 +1,247 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.HuggingFace; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.HuggingFace.TextGeneration; + +/// +/// Unit tests for class. +/// +public sealed class HuggingFaceTextGenerationTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + + public HuggingFaceTextGenerationTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._messageHandlerStub.ResponseToReturn.Content = new StringContent(HuggingFaceTestHelper.GetTestResponse("completion_test_response.json")); + + this._httpClient = new HttpClient(this._messageHandlerStub, false); + } + + [Fact] + public async Task SpecifiedModelShouldBeUsedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.EndsWith("/fake-model", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task NoAuthorizationHeaderShouldBeAddedIfApiKeyIsNotProvidedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", apiKey: null, httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.False(this._messageHandlerStub.RequestHeaders?.Contains("Authorization")); + } + + [Fact] + public async Task AuthorizationHeaderShouldBeAddedIfApiKeyIsProvidedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", apiKey: "fake-api-key", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("Authorization")); + + var values = this._messageHandlerStub.RequestHeaders!.GetValues("Authorization"); + + var value = values.SingleOrDefault(); + Assert.Equal("Bearer fake-api-key", value); + } + + [Fact] + public async Task UserAgentHeaderShouldBeUsedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("User-Agent")); + + var values = this._messageHandlerStub.RequestHeaders!.GetValues("User-Agent"); + + var value = values.SingleOrDefault(); + Assert.Equal("Semantic-Kernel", value); + } + + [Fact] + public async Task ProvidedEndpointShouldBeUsedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.StartsWith("https://fake-random-test-host/fake-path", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task HttpClientBaseAddressShouldBeUsedAsync() + { + //Arrange + this._httpClient.BaseAddress = new Uri("https://fake-random-test-host/fake-path"); + + var sut = new HuggingFaceTextGenerationService("fake-model", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.StartsWith("https://fake-random-test-host/fake-path", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task DefaultAddressShouldBeUsedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.StartsWith("https://api-inference.huggingface.co/models", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ModelUrlShouldBeBuiltSuccessfullyAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + Assert.Equal("https://fake-random-test-host/fake-path/fake-model", this._messageHandlerStub.RequestUri?.AbsoluteUri); + } + + [Fact] + public async Task ShouldSendPromptToServiceAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + + Assert.Equal("fake-text", requestPayload.Input); + } + + [Fact] + public async Task ShouldHandleServiceResponseAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); + + //Act + var contents = await sut.GetTextContentsAsync("fake-test"); + + //Assert + Assert.NotNull(contents); + + var content = contents.SingleOrDefault(); + Assert.NotNull(content); + + Assert.Equal("This is test completion response", content.Text); + } + + [Fact] + public async Task GetTextContentsShouldHaveModelIdDefinedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); + + //Act + var contents = await sut.GetTextContentsAsync("fake-test"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@" + [ + { + ""generated_text"": ""Why the sky is blue? | Dept. of Science & Mathematics Education | University of Notre Dame\nWhen I was in high school I had a pretty simple conception of reality. I believed that if something made sense to me, then it must also be true. I believed that some problems were so fundamental that I couldn’t understand"" + } + ]", + Encoding.UTF8, + "application/json") + }; + + // Act + var textContent = await sut.GetTextContentAsync("Any prompt"); + + // Assert + Assert.NotNull(textContent.ModelId); + Assert.Equal("fake-model", textContent.ModelId); + } + + [Fact] + public async Task GetStreamingTextContentsShouldHaveModelIdDefinedAsync() + { + //Arrange + var sut = new HuggingFaceTextGenerationService("fake-model", endpoint: "https://fake-random-test-host/fake-path", httpClient: this._httpClient); + + //Act + var contents = await sut.GetTextContentsAsync("fake-test"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@" + [ + { + ""generated_text"": ""Why the sky is blue? | Dept. of Science & Mathematics Education | University of Notre Dame\nWhen I was in high school I had a pretty simple conception of reality. I believed that if something made sense to me, then it must also be true. I believed that some problems were so fundamental that I couldn’t understand"" + } + ]", + Encoding.UTF8, + "application/json") + }; + + // Act + StreamingTextContent? lastTextContent = null; + await foreach (var textContent in sut.GetStreamingTextContentsAsync("Any prompt")) + { + lastTextContent = textContent; + }; + + // Assert + Assert.NotNull(lastTextContent!.ModelId); + Assert.Equal("fake-model", lastTextContent.ModelId); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs index ced200105aab..8b16482a806d 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Chroma/ChromaMemoryStoreTests.cs @@ -7,14 +7,13 @@ using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma.Http.ApiSchema; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Chroma; using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Chroma; +namespace SemanticKernel.Connectors.UnitTests.Chroma; /// /// Unit tests for class. @@ -115,7 +114,7 @@ public async Task ItThrowsExceptionOnNonExistentCollectionDeletionAsync() var exception = await Record.ExceptionAsync(() => store.DeleteCollectionAsync(CollectionName)); // Assert - Assert.IsType(exception); + Assert.IsType(exception); Assert.Equal(ExpectedExceptionMessage, exception.Message); } @@ -202,7 +201,7 @@ public async Task ItThrowsExceptionOnGettingMemoryRecordFromNonExistingCollectio this._chromaClientMock .Setup(client => client.GetCollectionAsync(CollectionName, CancellationToken.None)) - .Throws(new SKException(CollectionDoesNotExistErrorMessage)); + .Throws(new KernelException(CollectionDoesNotExistErrorMessage)); var store = new ChromaMemoryStore(this._chromaClientMock.Object); @@ -210,7 +209,7 @@ public async Task ItThrowsExceptionOnGettingMemoryRecordFromNonExistingCollectio var exception = await Record.ExceptionAsync(() => store.GetAsync(CollectionName, MemoryRecordKey, withEmbedding: true)); // Assert - Assert.IsType(exception); + Assert.IsType(exception); Assert.Equal(CollectionDoesNotExistErrorMessage, exception.Message); } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs index f2930cc47a34..3cb3c883c409 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/DuckDB/DuckDBMemoryStoreTests.cs @@ -4,12 +4,13 @@ using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; +using System.Runtime.CompilerServices; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.DuckDB; +using Microsoft.SemanticKernel.Connectors.DuckDB; using Microsoft.SemanticKernel.Memory; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.DuckDB; +namespace SemanticKernel.Connectors.UnitTests.DuckDB; /// /// Unit tests of . @@ -19,6 +20,11 @@ public class DuckDBMemoryStoreTests { private int _collectionNum = 0; + private string GetTestCollectionName([CallerMemberName] string testName = "") + { + return testName + this._collectionNum++; + } + private IEnumerable CreateBatchRecords(int numRecords) { Assert.True(numRecords % 2 == 0, "Number of records must be even"); @@ -61,8 +67,7 @@ public async Task ItCanCreateAndGetCollectionAsync() { // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.CreateCollectionAsync(collection); @@ -78,15 +83,14 @@ public async Task ItCanCheckIfCollectionExistsAsync() { // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); - string collection = "my_collection"; - this._collectionNum++; + string collection = "my_collection+++"; // Act await db.CreateCollectionAsync(collection); // Assert - Assert.True(await db.DoesCollectionExistAsync("my_collection")); - Assert.False(await db.DoesCollectionExistAsync("my_collection2")); + Assert.True(await db.DoesCollectionExistAsync("my_collection+++")); + Assert.False(await db.DoesCollectionExistAsync("my_collection---")); } [Fact] @@ -94,8 +98,7 @@ public async Task CreatingDuplicateCollectionDoesNothingAsync() { // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.CreateCollectionAsync(collection); @@ -112,8 +115,8 @@ public async Task CollectionsCanBeDeletedAsync() { // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + await db.CreateCollectionAsync(collection); var collections = await db.GetCollectionsAsync().ToListAsync(); Assert.True(collections.Count > 0); @@ -169,8 +172,7 @@ public async Task GetAsyncReturnsEmptyEmbeddingUnlessSpecifiedAsync() embedding: new float[] { 1, 2, 3 }, key: null, timestamp: null); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.CreateCollectionAsync(collection); @@ -197,8 +199,7 @@ public async Task ItCanUpsertAndRetrieveARecordWithNoTimestampAsync() embedding: new float[] { 1, 2, 3 }, key: null, timestamp: null); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.CreateCollectionAsync(collection); @@ -228,8 +229,7 @@ public async Task ItCanUpsertAndRetrieveARecordWithTimestampAsync() embedding: new float[] { 1, 2, 3 }, key: null, timestamp: DateTimeOffset.UtcNow); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.CreateCollectionAsync(collection); @@ -263,8 +263,7 @@ public async Task UpsertReplacesExistingRecordWithSameIdAsync() text: "text2", description: "description2", embedding: new float[] { 1, 2, 4 }); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.CreateCollectionAsync(collection); @@ -292,8 +291,7 @@ public async Task ExistingRecordCanBeRemovedAsync() text: "text", description: "description", embedding: new float[] { 1, 2, 3 }); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.CreateCollectionAsync(collection); @@ -310,8 +308,7 @@ public async Task RemovingNonExistingRecordDoesNothingAsync() { // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = "test_collection_for_record_deletion"; // Act await db.CreateCollectionAsync(collection); @@ -328,7 +325,7 @@ public async Task ItCanListAllDatabaseCollectionsAsync() // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); string[] testCollections = { "random_collection1", "random_collection2", "random_collection3" }; - this._collectionNum += 3; + await db.CreateCollectionAsync(testCollections[0]); await db.CreateCollectionAsync(testCollections[1]); await db.CreateCollectionAsync(testCollections[2]); @@ -360,8 +357,8 @@ public async Task GetNearestMatchesReturnsAllResultsWithNoMinScoreAsync() using var db = await DuckDBMemoryStore.ConnectAsync(); var compareEmbedding = new float[] { 1, 1, 1 }; int topN = 4; - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + await db.CreateCollectionAsync(collection); int i = 0; MemoryRecord testRecord = MemoryRecord.LocalRecord( @@ -422,8 +419,8 @@ public async Task GetNearestMatchAsyncReturnsEmptyEmbeddingUnlessSpecifiedAsync( // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); var compareEmbedding = new float[] { 1, 1, 1 }; - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + await db.CreateCollectionAsync(collection); int i = 0; MemoryRecord testRecord = MemoryRecord.LocalRecord( @@ -483,8 +480,8 @@ public async Task GetNearestMatchAsyncReturnsExpectedAsync() // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); var compareEmbedding = new float[] { 1, 1, 1 }; - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + await db.CreateCollectionAsync(collection); int i = 0; MemoryRecord testRecord = MemoryRecord.LocalRecord( @@ -543,8 +540,8 @@ public async Task GetNearestMatchesDifferentiatesIdenticalVectorsByKeyAsync() using var db = await DuckDBMemoryStore.ConnectAsync(); var compareEmbedding = new float[] { 1, 1, 1 }; int topN = 4; - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + await db.CreateCollectionAsync(collection); for (int i = 0; i < 10; i++) @@ -578,8 +575,8 @@ public async Task ItCanBatchUpsertRecordsAsync() // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); int numRecords = 10; - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + IEnumerable records = this.CreateBatchRecords(numRecords); // Act @@ -599,8 +596,8 @@ public async Task ItCanBatchGetRecordsAsync() // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); int numRecords = 10; - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + IEnumerable records = this.CreateBatchRecords(numRecords); var keys = db.UpsertBatchAsync(collection, records); @@ -620,8 +617,8 @@ public async Task ItCanBatchRemoveRecordsAsync() // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); int numRecords = 10; - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); + IEnumerable records = this.CreateBatchRecords(numRecords); await db.CreateCollectionAsync(collection); @@ -647,8 +644,7 @@ public async Task DeletingNonExistentCollectionDoesNothingAsync() { // Arrange using var db = await DuckDBMemoryStore.ConnectAsync(); - string collection = "test_collection" + this._collectionNum; - this._collectionNum++; + string collection = this.GetTestCollectionName(); // Act await db.DeleteCollectionAsync(collection); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs index e840cd936a56..ba60945ecb37 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs @@ -8,13 +8,13 @@ using System.Threading.Tasks; using Kusto.Cloud.Platform.Utils; using Kusto.Data.Common; -using Microsoft.SemanticKernel.Connectors.Memory.Kusto; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Connectors.Kusto; +using Microsoft.SemanticKernel.Http; using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Kusto; +namespace SemanticKernel.Connectors.UnitTests.Kusto; /// /// Unit tests for class. @@ -68,7 +68,7 @@ public async Task ItCanCreateCollectionAsync() .Verify(client => client.ExecuteControlCommandAsync( DatabaseName, It.Is(s => s.StartsWith($".create table {CollectionName}")), - It.Is(crp => string.Equals(crp.Application, Telemetry.HttpUserAgent, StringComparison.Ordinal)) + It.Is(crp => string.Equals(crp.Application, HttpHeaderValues.UserAgent, StringComparison.Ordinal)) ), Times.Once()); } @@ -87,7 +87,7 @@ public async Task ItCanDeleteCollectionAsync() .Verify(client => client.ExecuteControlCommandAsync( DatabaseName, It.Is(s => s.StartsWith($".drop table {CollectionName}")), - It.Is(crp => string.Equals(crp.Application, Telemetry.HttpUserAgent, StringComparison.Ordinal)) + It.Is(crp => string.Equals(crp.Application, HttpHeaderValues.UserAgent, StringComparison.Ordinal)) ), Times.Once()); } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/MongoDB/MongoDBMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/MongoDB/MongoDBMemoryStoreTests.cs new file mode 100644 index 000000000000..b484295ce5e2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/MongoDB/MongoDBMemoryStoreTests.cs @@ -0,0 +1,403 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.MongoDB; +using Microsoft.SemanticKernel.Memory; +using MongoDB.Driver; +using MongoDB.Driver.Core.Clusters; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.MongoDB; + +/// +/// Unit tests for class. +/// +public class MongoDBMemoryStoreTests +{ + private const string CollectionName = "test-collection"; + private const string DatabaseName = "test-database"; + + private readonly Mock _mongoClientMock; + private readonly Mock _mongoClusterMock; + private readonly Mock> _mongoCollectionMock; + private readonly Mock _mongoDatabaseMock; + + public MongoDBMemoryStoreTests() + { + this._mongoClientMock = new Mock(); + this._mongoDatabaseMock = new Mock(); + this._mongoCollectionMock = new Mock>(); + this._mongoClusterMock = new Mock(); + + this._mongoClientMock + .Setup(client => client.GetDatabase(DatabaseName, null)) + .Returns(this._mongoDatabaseMock.Object); + this._mongoClientMock + .Setup(client => client.Cluster) + .Returns(this._mongoClusterMock.Object); + this._mongoDatabaseMock + .Setup(client => client.GetCollection(CollectionName, null)) + .Returns(this._mongoCollectionMock.Object); + } + + [Fact] + public async Task ItCanCreateCollectionAsync() + { + // Arrange + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + + // Act + await memoryStore.CreateCollectionAsync(CollectionName); + + // Assert + this._mongoDatabaseMock.Verify(d => d.CreateCollectionAsync(CollectionName, default, default), Times.Once()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task ItCanCheckWhetherCollectionExistsAsync(bool collectionExists) + { + // Arrange + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + using var cursorMock = collectionExists ? new AsyncCursorMock(CollectionName) : new AsyncCursorMock(); + this._mongoDatabaseMock + .Setup(client => client.ListCollectionNamesAsync(default, default)) + .ReturnsAsync(cursorMock); + + // Act + var actualCollectionExists = await memoryStore.DoesCollectionExistAsync(CollectionName); + + // Assert + Assert.Equal(collectionExists, actualCollectionExists); + this._mongoDatabaseMock.Verify(client => client.ListCollectionNamesAsync(default, default), Times.Once()); + } + + [Fact] + public async Task ItCanDeleteCollectionAsync() + { + // Arrange + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + + // Act + await memoryStore.DeleteCollectionAsync(CollectionName); + + // Assert + this._mongoDatabaseMock.Verify(client => client.DropCollectionAsync(CollectionName, default), Times.Once()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task ItCanGetAsync(bool entryExists) + { + // Arrange + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + var memoryRecord = CreateRecord("id"); + + using var cursorMock = entryExists ? + new AsyncCursorMock(new MongoDBMemoryEntry(memoryRecord)) : + new AsyncCursorMock(); + + this._mongoCollectionMock + .Setup(c => c.FindAsync( + It.IsAny>(), + It.IsAny>(), + default)) + .ReturnsAsync(cursorMock); + + // Act + var actualMemoryRecord = await memoryStore.GetAsync(CollectionName, memoryRecord.Key, withEmbedding: true); + + // Assert + if (entryExists) + { + Assert.NotNull(actualMemoryRecord); + AssertMemoryRecordEqual(memoryRecord, actualMemoryRecord); + } + else + { + Assert.Null(actualMemoryRecord); + } + } + + [Fact] + public async Task ItCanGetBatchAsync() + { + // Arrange + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + var (memoryRecords, keys) = CreateRecords(10); + + using var cursorMock = new AsyncCursorMock(memoryRecords.Select(r => new MongoDBMemoryEntry(r)).ToArray()); + + this._mongoCollectionMock + .Setup(c => c.FindAsync( + It.IsAny>(), + It.IsAny>(), + default)) + .ReturnsAsync(cursorMock); + + // Act + var actualMemoryRecords = await memoryStore.GetBatchAsync(CollectionName, keys, withEmbeddings: true).ToListAsync(); + + // Assert + Assert.Equal(memoryRecords.Length, actualMemoryRecords.Count); + + for (var i = 0; i < memoryRecords.Length; i++) + { + AssertMemoryRecordEqual(memoryRecords[i], actualMemoryRecords[i]); + } + } + + [Fact] + public async Task ItCanGetCollectionsAsync() + { + // Arrange + var collections = new[] { "collection1", "collection2", "collection3" }; + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + using var cursorMock = new AsyncCursorMock(collections); + + this._mongoDatabaseMock + .Setup(client => client.ListCollectionNamesAsync(default, default)) + .ReturnsAsync(cursorMock); + + // Act + var actualCollections = await memoryStore.GetCollectionsAsync().ToListAsync(); + + // Assert + Assert.True(collections.SequenceEqual(actualCollections)); + } + + [Fact] + public async Task ItCanGetNearestMatchAsync() + { + // Arrange + const string ExpectedStage = "{ \"$vectorSearch\" : { \"queryVector\" : [1.0], \"path\" : \"embedding\", \"limit\" : 1, \"numCandidates\" : 10, \"index\" : \"default\" } }"; + + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + var memoryRecord = CreateRecord("id"); + using var cursorMock = new AsyncCursorMock(new MongoDBMemoryEntry(memoryRecord)); + + // Act + this._mongoCollectionMock + .Setup(c => c.AggregateAsync(It.IsAny>(), It.IsAny(), default)) + .ReturnsAsync(cursorMock); + var match = await memoryStore.GetNearestMatchAsync(CollectionName, new(new[] { 1f })); + + // Assert + AssertMemoryRecordEqual(memoryRecord, match.Value.Item1); + this._mongoCollectionMock.Verify(a => a.AggregateAsync(It.Is>(p => VerifyPipeline(p, ExpectedStage)), It.IsAny(), default), Times.Once()); + } + + [Fact] + public async Task ItCanGetNearestMatchesAsync() + { + // Arrange + const string ExpectedStage = "{ \"$vectorSearch\" : { \"queryVector\" : [1.0], \"path\" : \"embedding\", \"limit\" : 100, \"numCandidates\" : 1000, \"index\" : \"default\" } }"; + + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + var (memoryRecords, keys) = CreateRecords(10); + using var cursorMock = new AsyncCursorMock(memoryRecords.Select(r => new MongoDBMemoryEntry(r)).ToArray()); + + // Act + this._mongoCollectionMock + .Setup(c => c.AggregateAsync(It.IsAny>(), It.IsAny(), default)) + .ReturnsAsync(cursorMock); + var matches = await memoryStore.GetNearestMatchesAsync(CollectionName, new(new[] { 1f }), 100).ToListAsync(); + + // Assert + Assert.Equal(memoryRecords.Length, matches.Count); + + for (var i = 0; i < memoryRecords.Length; i++) + { + AssertMemoryRecordEqual(memoryRecords[i], matches[i].Item1); + } + + this._mongoCollectionMock.Verify(a => a.AggregateAsync(It.Is>(p => VerifyPipeline(p, ExpectedStage)), It.IsAny(), default), Times.Once()); + } + + [Fact] + public async Task ItCanRemoveAsync() + { + // Arrange + const string Key = "key"; + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + + // Act + await memoryStore.RemoveAsync(CollectionName, Key); + + // Assert + this._mongoCollectionMock.Verify(c => c.DeleteOneAsync(It.IsAny>(), default), Times.Once()); + } + + [Fact] + public async Task ItCanRemoveBatchAsync() + { + // Arrange + var keys = new string[] { "key1", "key2", "key3" }; + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + + // Act + await memoryStore.RemoveBatchAsync(CollectionName, keys); + + // Assert + this._mongoCollectionMock.Verify(c => c.DeleteManyAsync(It.IsAny>(), default), Times.Once()); + } + + [Fact] + public async Task ItCanUpsertAsync() + { + // Arrange + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + var memoryRecord = CreateRecord("id"); + + this._mongoCollectionMock + .Setup(c => c.ReplaceOneAsync( + It.IsAny>(), + It.Is(e => e.Id == memoryRecord.Key), + It.IsAny(), default)) + .ReturnsAsync(new ReplaceOneResult.Acknowledged(0, 0, memoryRecord.Key)); + + // Act + var actualMemoryRecordKey = await memoryStore.UpsertAsync(CollectionName, memoryRecord); + + // Assert + Assert.Equal(memoryRecord.Key, actualMemoryRecordKey); + + this._mongoCollectionMock.Verify(c => c.ReplaceOneAsync( + It.IsAny>(), + It.Is(e => e.Id == memoryRecord.Key), + It.IsAny(), + default)); + } + + [Fact] + public async Task ItCanUpsertBatchAsync() + { + // Arrange + using var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + var (memoryRecords, keys) = CreateRecords(10); + + foreach (var key in keys) + { + var entryMatch = It.Is(e => e.Id == key); + this._mongoCollectionMock + .Setup(c => c.ReplaceOneAsync( + It.IsAny>(), + It.Is(e => e.Id == key), + It.IsAny(), + default)) + .ReturnsAsync(new ReplaceOneResult.Acknowledged(0, 0, key)); + } + + // Act + var actualMemoryRecordKeys = await memoryStore.UpsertBatchAsync(CollectionName, memoryRecords).ToListAsync(); + + for (int i = 0; i < memoryRecords.Length; i++) + { + Assert.Equal(memoryRecords[i].Key, actualMemoryRecordKeys[i]); + + this._mongoCollectionMock.Verify(c => c.ReplaceOneAsync( + It.IsAny>(), + It.Is(e => e.Id == memoryRecords[i].Key), + It.IsAny(), + default)); + } + } + + [Fact] + public void ItDisposesClusterOnDispose() + { + // Arrange + var memoryStore = new MongoDBMemoryStore(this._mongoClientMock.Object, DatabaseName); + + // Act + memoryStore.Dispose(); + + // Assert + this._mongoClusterMock.Verify(c => c.Dispose(), Times.Once()); + } + + #region private ================================================================================ + + private sealed class AsyncCursorMock : IAsyncCursor + { + private T[] _items; + + public IEnumerable? Current { get; private set; } + + public AsyncCursorMock(params T[] items) + { + this._items = items ?? Array.Empty(); + } + + public void Dispose() + { + } + + public bool MoveNext(CancellationToken cancellationToken = default) + { + this.Current = this._items; + this._items = Array.Empty(); + + return this.Current.Any(); + } + + public Task MoveNextAsync(CancellationToken cancellationToken = default) => + Task.FromResult(this.MoveNext(cancellationToken)); + } + + private static MemoryRecord CreateRecord(string id) => + MemoryRecord.LocalRecord( + id: id, + text: $"text_{id}", + description: $"description_{id}", + key: id, + embedding: new[] { 1.1f, 2.2f, 3.3f }); + + private static (MemoryRecord[], string[]) CreateRecords(int count) + { + var keys = Enumerable.Range(0, count).Select(i => $"{i}").ToArray(); + var memoryRecords = keys.Select(k => CreateRecord(k)).ToArray(); + + return (memoryRecords, keys); + } + + private static void AssertMemoryRecordEqual(MemoryRecord expectedRecord, MemoryRecord actualRecord, bool assertEmbeddingEqual = true) + { + Assert.Equal(expectedRecord.Key, actualRecord.Key); + Assert.Equal(expectedRecord.Metadata.Id, actualRecord.Metadata.Id); + Assert.Equal(expectedRecord.Metadata.Text, actualRecord.Metadata.Text); + Assert.Equal(expectedRecord.Metadata.Description, actualRecord.Metadata.Description); + Assert.Equal(expectedRecord.Metadata.AdditionalMetadata, actualRecord.Metadata.AdditionalMetadata); + Assert.Equal(expectedRecord.Metadata.IsReference, actualRecord.Metadata.IsReference); + Assert.Equal(expectedRecord.Metadata.ExternalSourceName, actualRecord.Metadata.ExternalSourceName); + + if (assertEmbeddingEqual) + { + Assert.True(expectedRecord.Embedding.Span.SequenceEqual(actualRecord.Embedding.Span)); + } + else + { + Assert.True(actualRecord.Embedding.Span.IsEmpty); + } + } + + private static bool VerifyPipeline(PipelineDefinition pipeline, string expectedStage) + { + if (pipeline.Stages.Count() != 2) + { + return false; + } + + var actualStage = pipeline.Stages.First().ToString(); + return actualStage == expectedStage; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeKernelBuilderExtensionsTests.cs deleted file mode 100644 index 4a624fb4ca13..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeKernelBuilderExtensionsTests.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Net.Http; -using System.Net.Mime; -using System.Text; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.Memory.Pinecone; - -public sealed class PineconeKernelBuilderExtensionsTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - - public PineconeKernelBuilderExtensionsTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - - this._httpClient = new HttpClient(this._messageHandlerStub, false); - } - - [Fact] - public async Task PineconeMemoryStoreShouldBeProperlyInitializedAsync() - { - //Arrange - this._messageHandlerStub.ResponseToReturn.Content = new StringContent("[\"fake-index1\"]", Encoding.UTF8, MediaTypeNames.Application.Json); - - var builder = new KernelBuilder(); -#pragma warning disable CS0618 // This will be removed in a future release. - builder.WithPineconeMemoryStore("fake-environment", "fake-api-key", this._httpClient); -#pragma warning restore CS0618 // This will be removed in a future release. - builder.WithAzureTextEmbeddingGenerationService("fake-deployment-name", "https://fake-random-test-host/fake-path", "fake -api-key"); - var kernel = builder.Build(); //This call triggers the internal factory registered by WithPineconeMemoryStore method to create an instance of the PineconeMemoryStore class. - - //Act -#pragma warning disable CS0618 // This will be removed in a future release. - await kernel.Memory.GetCollectionsAsync(); //This call triggers a subsequent call to Pinecone memory store. -#pragma warning restore CS0618 // This will be removed in a future release. - - //Assert - Assert.Equal("https://controller.fake-environment.pinecone.io/databases", this._messageHandlerStub?.RequestUri?.AbsoluteUri); - - var headerValues = Enumerable.Empty(); - var headerExists = this._messageHandlerStub?.RequestHeaders?.TryGetValues("Api-Key", out headerValues); - Assert.True(headerExists); - Assert.Contains(headerValues!, (value) => value == "fake-api-key"); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryBuilderExtensionsTests.cs index abfc4e85f6d1..00d1a840fffa 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryBuilderExtensionsTests.cs @@ -6,14 +6,13 @@ using System.Net.Mime; using System.Text; using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone; -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Connectors.Pinecone; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Pinecone; +namespace SemanticKernel.Connectors.UnitTests.Pinecone; public sealed class PineconeMemoryBuilderExtensionsTests : IDisposable { @@ -31,7 +30,7 @@ public PineconeMemoryBuilderExtensionsTests() public async Task PineconeMemoryStoreShouldBeProperlyInitializedAsync() { // Arrange - var embeddingGenerationMock = Mock.Of(); + var embeddingGenerationMock = Mock.Of(); this._messageHandlerStub.ResponseToReturn.Content = new StringContent("[\"fake-index1\"]", Encoding.UTF8, MediaTypeNames.Application.Json); var builder = new MemoryBuilder(); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryStoreTests.cs index aef930aa8bca..d450a72360cf 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeMemoryStoreTests.cs @@ -6,14 +6,13 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone.Model; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Pinecone; using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Pinecone; +namespace SemanticKernel.Connectors.UnitTests.Pinecone; public class PineconeMemoryStoreTests { @@ -63,7 +62,7 @@ public async Task ItThrowsExceptionOnIndexCreationAsync() .ReturnsAsync(false); // Act - var exception = await Assert.ThrowsAsync(async () => await this._pineconeMemoryStore.CreateCollectionAsync("test")); + var exception = await Assert.ThrowsAsync(async () => await this._pineconeMemoryStore.CreateCollectionAsync("test")); // Assert this._mockPineconeClient diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeUtilsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeUtilsTests.cs index 26a08763c072..5c6ab6e60c66 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeUtilsTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Pinecone/PineconeUtilsTests.cs @@ -6,10 +6,10 @@ using System.Linq; using System.Text; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Pinecone; +using Microsoft.SemanticKernel.Connectors.Pinecone; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Pinecone; +namespace SemanticKernel.Connectors.UnitTests.Pinecone; public class PineconeUtilsTests { @@ -75,10 +75,10 @@ public void ConvertFilterToPineconeFilterShouldConvertFilterCorrectly() // Assert Assert.Equal(new Dictionary { - { "key1", new Dictionary { { "$eq", "value1" } } }, + { "key1", new Dictionary { { "$eq", "value1" } } }, { "key2", new Dictionary { { "$in", new List { 1, 2, 3 } } } }, - { "key3", new Dictionary { { "$eq", new DateTimeOffset(2023, 1, 1, 0, 0, 0, TimeSpan.Zero).ToUnixTimeSeconds() } } }, - { "key4", new Dictionary { { "$eq", "value4" } } } + { "key3", new Dictionary { { "$eq", new DateTimeOffset(2023, 1, 1, 0, 0, 0, TimeSpan.Zero).ToUnixTimeSeconds() } } }, + { "key4", new Dictionary { { "$eq", "value4" } } } }, result); } } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs index cf7fa84ea835..d17fe2da6b6f 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Postgres/PostgresMemoryStoreTests.cs @@ -5,12 +5,12 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Postgres; +using Microsoft.SemanticKernel.Connectors.Postgres; using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Postgres; +namespace SemanticKernel.Connectors.UnitTests.Postgres; /// /// Unit tests for class. diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantKernelBuilderExtensionsTests.cs deleted file mode 100644 index e7d1052faadb..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantKernelBuilderExtensionsTests.cs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Net.Mime; -using System.Text; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.Memory.Qdrant; - -public sealed class QdrantKernelBuilderExtensionsTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - - public QdrantKernelBuilderExtensionsTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - - this._httpClient = new HttpClient(this._messageHandlerStub, false); - } - - [Fact] - public async Task QdrantMemoryStoreShouldBeProperlyInitializedAsync() - { - //Arrange - this._httpClient.BaseAddress = new Uri("https://fake-random-qdrant-host"); - this._messageHandlerStub.ResponseToReturn.Content = new StringContent("{\"result\":{\"collections\":[]}}", Encoding.UTF8, MediaTypeNames.Application.Json); - - var builder = new KernelBuilder(); -#pragma warning disable CS0618 // This will be removed in a future release. - builder.WithQdrantMemoryStore(this._httpClient, 123); -#pragma warning restore CS0618 // This will be removed in a future release. - builder.WithAzureTextEmbeddingGenerationService("fake-deployment-name", "https://fake-random-text-embedding-generation-host/fake-path", "fake-api-key"); - var kernel = builder.Build(); //This call triggers the internal factory registered by WithQdrantMemoryStore method to create an instance of the QdrantMemoryStore class. - - //Act -#pragma warning disable CS0618 // This will be removed in a future release. - await kernel.Memory.GetCollectionsAsync(); //This call triggers a subsequent call to Qdrant memory store. -#pragma warning restore CS0618 // This will be removed in a future release. - - //Assert - Assert.Equal("https://fake-random-qdrant-host/collections", this._messageHandlerStub?.RequestUri?.AbsoluteUri); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryBuilderExtensionsTests.cs index 37ed036b54f6..f8e9a870c6f7 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryBuilderExtensionsTests.cs @@ -5,14 +5,13 @@ using System.Net.Mime; using System.Text; using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant; -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Connectors.Qdrant; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Qdrant; +namespace SemanticKernel.Connectors.UnitTests.Qdrant; public sealed class QdrantMemoryBuilderExtensionsTests : IDisposable { @@ -30,7 +29,7 @@ public QdrantMemoryBuilderExtensionsTests() public async Task QdrantMemoryStoreShouldBeProperlyInitializedAsync() { // Arrange - var embeddingGenerationMock = Mock.Of(); + var embeddingGenerationMock = Mock.Of(); this._httpClient.BaseAddress = new Uri("https://fake-random-qdrant-host"); this._messageHandlerStub.ResponseToReturn.Content = new StringContent("{\"result\":{\"collections\":[]}}", Encoding.UTF8, MediaTypeNames.Application.Json); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs index 3c4315ff8fa2..499164c31c68 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs @@ -6,13 +6,13 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Qdrant; using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Qdrant; +namespace SemanticKernel.Connectors.UnitTests.Qdrant; /// /// Tests for collection and upsert operations. diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests2.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests2.cs index abd56fbcee3d..de6124922f8b 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests2.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests2.cs @@ -6,12 +6,12 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +using Microsoft.SemanticKernel.Connectors.Qdrant; using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Qdrant; +namespace SemanticKernel.Connectors.UnitTests.Qdrant; /// /// Tests for Get and Remove operations. diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs index 7c9cd1adf2c9..caed0eea8e45 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs @@ -9,13 +9,13 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +using Microsoft.SemanticKernel.Connectors.Qdrant; using Microsoft.SemanticKernel.Memory; using Moq; using Moq.Protected; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Qdrant; +namespace SemanticKernel.Connectors.UnitTests.Qdrant; /// /// Tests for Search operations. diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantVectorDbClientTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantVectorDbClientTests.cs index 0a689e18a307..2223f25e62ee 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantVectorDbClientTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantVectorDbClientTests.cs @@ -3,10 +3,10 @@ using System; using System.Net.Http; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Qdrant; +using Microsoft.SemanticKernel.Connectors.Qdrant; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Qdrant; +namespace SemanticKernel.Connectors.UnitTests.Qdrant; public sealed class QdrantVectorDbClientTests : IDisposable { diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Redis/RedisMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Redis/RedisMemoryStoreTests.cs index e8f09a5f2f2f..9cd81a80f093 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Redis/RedisMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Redis/RedisMemoryStoreTests.cs @@ -7,14 +7,15 @@ using System.Numerics.Tensors; using System.Runtime.InteropServices; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Redis; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Redis; using Microsoft.SemanticKernel.Memory; using Moq; +using NRedisStack; using StackExchange.Redis; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Redis; +namespace SemanticKernel.Connectors.UnitTests.Redis; /// /// Unit tests of . @@ -744,7 +745,7 @@ public async Task GetNearestMatchAsyncThrowsExceptionOnInvalidVectorScoreAsync() } // Assert - var ex = await Assert.ThrowsAsync(async () => + var ex = await Assert.ThrowsAsync(async () => { // Act await store.GetNearestMatchAsync(collection, compareEmbedding, minRelevanceScore: threshold); @@ -756,6 +757,12 @@ public async Task GetNearestMatchAsyncThrowsExceptionOnInvalidVectorScoreAsync() private void MockCreateIndex(string collection, Action? callback = null) { + var mockBatch = new Mock(); + + this._mockDatabase + .Setup(x => x.CreateBatch(It.IsAny())) + .Returns(mockBatch.Object); + this._mockDatabase .Setup>(x => x.ExecuteAsync( It.Is(x => x == "FT.CREATE"), diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs index 1ceb459a5c18..35a7ff0ff7ad 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Sqlite/SqliteMemoryStoreTests.cs @@ -6,11 +6,11 @@ using System.IO; using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Sqlite; +using Microsoft.SemanticKernel.Connectors.Sqlite; using Microsoft.SemanticKernel.Memory; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Sqlite; +namespace SemanticKernel.Connectors.UnitTests.Sqlite; /// /// Unit tests of . diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateKernelBuilderExtensionsTests.cs deleted file mode 100644 index 0d771ef95194..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateKernelBuilderExtensionsTests.cs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Net.Mime; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.Memory.Weaviate; - -public sealed class WeaviateKernelBuilderExtensionsTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - - public WeaviateKernelBuilderExtensionsTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - - this._httpClient = new HttpClient(this._messageHandlerStub, false); - } - - [Theory] - [InlineData(null, "https://fake-random-test-weaviate-host/v1/objects/fake-key")] - [InlineData("v2", "https://fake-random-test-weaviate-host/v2/objects/fake-key")] - public async Task WeaviateMemoryStoreShouldBeProperlyInitializedAsync(string? apiVersion, string expectedAddress) - { - //Arrange - var getResponse = new - { - Properties = new Dictionary { - { "sk_id", "fake_id" }, - { "sk_description", "fake_description" }, - { "sk_text", "fake_text" }, - { "sk_additional_metadata", "fake_additional_metadata" } - } - }; - - this._messageHandlerStub.ResponseToReturn.Content = new StringContent(JsonSerializer.Serialize(getResponse, new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }), Encoding.UTF8, MediaTypeNames.Application.Json); - - var builder = new KernelBuilder(); -#pragma warning disable CS0618 // This will be removed in a future release. - builder.WithWeaviateMemoryStore(this._httpClient, "https://fake-random-test-weaviate-host", "fake-api-key", apiVersion); -#pragma warning restore CS0618 // This will be removed in a future release. - builder.WithAzureTextEmbeddingGenerationService("fake-deployment-name", "https://fake-random-test-host/fake-path", "fake -api-key"); - var kernel = builder.Build(); //This call triggers the internal factory registered by WithWeaviateMemoryStore method to create an instance of the WeaviateMemoryStore class. - - //Act -#pragma warning disable CS0618 // This will be removed in a future release. - await kernel.Memory.GetAsync("fake-collection", "fake-key"); //This call triggers a subsequent call to Weaviate memory store. -#pragma warning restore CS0618 // This will be removed in a future release. - - //Assert - Assert.Equal(expectedAddress, this._messageHandlerStub?.RequestUri?.AbsoluteUri); - - var headerValues = Enumerable.Empty(); - var headerExists = this._messageHandlerStub?.RequestHeaders?.TryGetValues("Authorization", out headerValues); - Assert.True(headerExists); - Assert.Contains(headerValues!, (value) => value == "fake-api-key"); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryBuilderExtensionsTests.cs index d9e90cbeba42..58fb5c23ee08 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryBuilderExtensionsTests.cs @@ -8,14 +8,13 @@ using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate; -using Microsoft.SemanticKernel.Plugins.Memory; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Memory; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Weaviate; +namespace SemanticKernel.Connectors.UnitTests.Weaviate; public sealed class WeaviateMemoryBuilderExtensionsTests : IDisposable { @@ -35,7 +34,7 @@ public WeaviateMemoryBuilderExtensionsTests() public async Task WeaviateMemoryStoreShouldBeProperlyInitializedAsync(string? apiVersion, string expectedAddress) { // Arrange - var embeddingGenerationMock = Mock.Of(); + var embeddingGenerationMock = Mock.Of(); var getResponse = new { diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs index fa47bff5ee10..a19a7df73192 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs @@ -8,10 +8,10 @@ using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate; +using Microsoft.SemanticKernel.Connectors.Weaviate; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.Memory.Weaviate; +namespace SemanticKernel.Connectors.UnitTests.Weaviate; /// /// Unit tests for class. diff --git a/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs b/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs new file mode 100644 index 000000000000..f83ac864d0c4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; + +namespace SemanticKernel.Connectors.UnitTests; + +internal sealed class MultipleHttpMessageHandlerStub : DelegatingHandler +{ + private int _callIteration = 0; + + public List RequestHeaders { get; private set; } + + public List ContentHeaders { get; private set; } + + public List RequestContents { get; private set; } + + public List RequestUris { get; private set; } + + public List Methods { get; private set; } + + public List ResponsesToReturn { get; set; } + + public MultipleHttpMessageHandlerStub() + { + this.RequestHeaders = []; + this.ContentHeaders = []; + this.RequestContents = []; + this.RequestUris = []; + this.Methods = []; + this.ResponsesToReturn = []; + } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + this._callIteration++; + + this.Methods.Add(request.Method); + this.RequestUris.Add(request.RequestUri); + this.RequestHeaders.Add(request.Headers); + this.ContentHeaders.Add(request.Content?.Headers); + + var content = request.Content == null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken); + + this.RequestContents.Add(content); + + return await Task.FromResult(this.ResponsesToReturn[this._callIteration - 1]); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs index 8ec0c712ad49..39bc2803fe19 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs @@ -1,85 +1,88 @@ // Copyright (c) Microsoft. All rights reserved. +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; using Xunit; namespace SemanticKernel.Connectors.UnitTests.OpenAI; /// -/// Unit tests of . +/// Unit tests of . /// public class AIServicesOpenAIExtensionsTests { [Fact] public void ItSucceedsWhenAddingDifferentServiceTypeWithSameId() { - KernelBuilder targetBuilder = new(); - targetBuilder.WithAzureTextCompletionService("depl", "https://url", "key", "azure"); - targetBuilder.WithAzureTextEmbeddingGenerationService("depl2", "https://url", "key", "azure"); + Kernel targetKernel = Kernel.CreateBuilder() + .AddAzureOpenAITextGeneration("depl", "https://url", "key", "azure") + .AddAzureOpenAITextEmbeddingGeneration("depl2", "https://url", "key", "azure") + .Build(); - IKernel targetKernel = targetBuilder.Build(); - Assert.NotNull(targetKernel.GetService("azure")); - Assert.NotNull(targetKernel.GetService("azure")); + Assert.NotNull(targetKernel.GetRequiredService("azure")); + Assert.NotNull(targetKernel.GetRequiredService("azure")); } [Fact] public void ItTellsIfAServiceIsAvailable() { - KernelBuilder targetBuilder = new(); - targetBuilder.WithAzureTextCompletionService("depl", "https://url", "key", serviceId: "azure"); - targetBuilder.WithOpenAITextCompletionService("model", "apikey", serviceId: "oai"); - targetBuilder.WithAzureTextEmbeddingGenerationService("depl2", "https://url2", "key", serviceId: "azure"); - targetBuilder.WithOpenAITextEmbeddingGenerationService("model2", "apikey2", serviceId: "oai2"); + Kernel targetKernel = Kernel.CreateBuilder() + .AddAzureOpenAITextGeneration("depl", "https://url", "key", serviceId: "azure") + .AddOpenAITextGeneration("model", "apikey", serviceId: "oai") + .AddAzureOpenAITextEmbeddingGeneration("depl2", "https://url2", "key", serviceId: "azure") + .AddOpenAITextEmbeddingGeneration("model2", "apikey2", serviceId: "oai2") + .Build(); // Assert - IKernel targetKernel = targetBuilder.Build(); - Assert.NotNull(targetKernel.GetService("azure")); - Assert.NotNull(targetKernel.GetService("oai")); - Assert.NotNull(targetKernel.GetService("azure")); - Assert.NotNull(targetKernel.GetService("oai")); + Assert.NotNull(targetKernel.GetRequiredService("azure")); + Assert.NotNull(targetKernel.GetRequiredService("oai")); + Assert.NotNull(targetKernel.GetRequiredService("azure")); + Assert.NotNull(targetKernel.GetRequiredService("oai")); } [Fact] public void ItCanOverwriteServices() { // Arrange - KernelBuilder targetBuilder = new(); - // Act - Assert no exception occurs - targetBuilder.WithAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "one"); - targetBuilder.WithAzureTextCompletionService("dep", "https://localhost", "key", serviceId: "one"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddAzureOpenAITextGeneration("depl", "https://localhost", "key", serviceId: "one"); + builder.Services.AddAzureOpenAITextGeneration("depl", "https://localhost", "key", serviceId: "one"); + + builder.Services.AddOpenAITextGeneration("model", "key", serviceId: "one"); + builder.Services.AddOpenAITextGeneration("model", "key", serviceId: "one"); - targetBuilder.WithOpenAITextCompletionService("model", "key", serviceId: "one"); - targetBuilder.WithOpenAITextCompletionService("model", "key", serviceId: "one"); + builder.Services.AddAzureOpenAITextEmbeddingGeneration("dep", "https://localhost", "key", serviceId: "one"); + builder.Services.AddAzureOpenAITextEmbeddingGeneration("dep", "https://localhost", "key", serviceId: "one"); - targetBuilder.WithAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "one"); - targetBuilder.WithAzureTextEmbeddingGenerationService("dep", "https://localhost", "key", serviceId: "one"); + builder.Services.AddOpenAITextEmbeddingGeneration("model", "key", serviceId: "one"); + builder.Services.AddOpenAITextEmbeddingGeneration("model", "key", serviceId: "one"); - targetBuilder.WithOpenAITextEmbeddingGenerationService("model", "key", serviceId: "one"); - targetBuilder.WithOpenAITextEmbeddingGenerationService("model", "key", serviceId: "one"); + builder.Services.AddAzureOpenAIChatCompletion("dep", "https://localhost", "key", serviceId: "one"); + builder.Services.AddAzureOpenAIChatCompletion("dep", "https://localhost", "key", serviceId: "one"); - targetBuilder.WithAzureChatCompletionService("dep", "https://localhost", "key", serviceId: "one"); - targetBuilder.WithAzureChatCompletionService("dep", "https://localhost", "key", serviceId: "one"); + builder.Services.AddOpenAIChatCompletion("model", "key", serviceId: "one"); + builder.Services.AddOpenAIChatCompletion("model", "key", serviceId: "one"); - targetBuilder.WithOpenAIChatCompletionService("model", "key", serviceId: "one"); - targetBuilder.WithOpenAIChatCompletionService("model", "key", serviceId: "one"); + builder.Services.AddOpenAITextToImage("model", "key", serviceId: "one"); + builder.Services.AddOpenAITextToImage("model", "key", serviceId: "one"); - targetBuilder.WithOpenAIImageGenerationService("model", "key", serviceId: "one"); - targetBuilder.WithOpenAIImageGenerationService("model", "key", serviceId: "one"); + builder.Services.AddSingleton(new OpenAITextGenerationService("model", "key")); + builder.Services.AddSingleton(new OpenAITextGenerationService("model", "key")); - targetBuilder.WithDefaultAIService(new OpenAITextCompletion("model", "key")); - targetBuilder.WithDefaultAIService(new OpenAITextCompletion("model", "key")); + builder.Services.AddSingleton((_) => new OpenAITextGenerationService("model", "key")); + builder.Services.AddSingleton((_) => new OpenAITextGenerationService("model", "key")); - targetBuilder.WithDefaultAIService((_) => new OpenAITextCompletion("model", "key")); - targetBuilder.WithDefaultAIService((_) => new OpenAITextCompletion("model", "key")); + builder.Services.AddKeyedSingleton("one", new OpenAITextGenerationService("model", "key")); + builder.Services.AddKeyedSingleton("one", new OpenAITextGenerationService("model", "key")); - targetBuilder.WithAIService("one", new OpenAITextCompletion("model", "key")); - targetBuilder.WithAIService("one", new OpenAITextCompletion("model", "key")); + builder.Services.AddKeyedSingleton("one", (_, _) => new OpenAITextGenerationService("model", "key")); + builder.Services.AddKeyedSingleton("one", (_, _) => new OpenAITextGenerationService("model", "key")); - targetBuilder.WithAIService("one", (loggerFactory) => new OpenAITextCompletion("model", "key")); - targetBuilder.WithAIService("one", (loggerFactory) => new OpenAITextCompletion("model", "key")); + builder.Build(); } } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs new file mode 100644 index 000000000000..76d5c6b7d81f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAIWithDataChatMessageContentTests +{ + [Fact] + public void ConstructorThrowsExceptionWhenAssistantMessageIsNotProvided() + { + // Arrange + var choice = new ChatWithDataChoice(); + + // Act & Assert + var exception = Assert.Throws(() => new AzureOpenAIWithDataChatMessageContent(choice, "model-id")); + + Assert.Contains("Chat is not valid", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void ConstructorReturnsInstanceWithNullToolContent() + { + // Arrange + var choice = new ChatWithDataChoice { Messages = [new() { Content = "Assistant content", Role = "assistant" }] }; + + // Act + var content = new AzureOpenAIWithDataChatMessageContent(choice, "model-id"); + + // Assert + Assert.Equal("Assistant content", content.Content); + Assert.Equal(AuthorRole.Assistant, content.Role); + + Assert.Null(content.ToolContent); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorReturnsInstanceWithNonNullToolContent(bool includeMetadata) + { + // Arrange + var choice = new ChatWithDataChoice + { + Messages = [ + new() { Content = "Assistant content", Role = "assistant" }, + new() { Content = "Tool content", Role = "tool" }] + }; + + // Act + var content = includeMetadata ? + new AzureOpenAIWithDataChatMessageContent(choice, "model-id", new Dictionary()) : + new AzureOpenAIWithDataChatMessageContent(choice, "model-id"); + + // Assert + Assert.Equal("Assistant content", content.Content); + Assert.Equal("Tool content", content.ToolContent); + Assert.Equal(AuthorRole.Assistant, content.Role); + + Assert.NotNull(content.Metadata); + Assert.Equal("Tool content", content.Metadata["ToolContent"]); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs new file mode 100644 index 000000000000..d3fd86820eb5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAIWithDataStreamingChatMessageContentTests +{ + [Theory] + [MemberData(nameof(ValidChoices))] + public void ConstructorWithValidChoiceSetsNonEmptyContent(object choice, string expectedContent) + { + // Arrange + var streamingChoice = choice as ChatWithDataStreamingChoice; + + // Act + var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); + + // Assert + Assert.Equal(expectedContent, content.Content); + } + + [Theory] + [MemberData(nameof(InvalidChoices))] + public void ConstructorWithInvalidChoiceSetsNullContent(object choice) + { + // Arrange + var streamingChoice = choice as ChatWithDataStreamingChoice; + + // Act + var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); + + // Assert + Assert.Null(content.Content); + } + + public static IEnumerable ValidChoices + { + get + { + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 1" } }] }, "Content 1" }; + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 2", Role = "Assistant" } }] }, "Content 2" }; + } + } + + public static IEnumerable InvalidChoices + { + get + { + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { EndTurn = true }] } }; + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content", Role = "tool" } }] } }; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs new file mode 100644 index 000000000000..6f7f271b3c42 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; + +/// +/// Unit tests for class. +/// +public sealed class OpenAIChatMessageContentTests +{ + [Fact] + public void ConstructorsWorkCorrectly() + { + // Arrange + List toolCalls = [new FakeChatCompletionsToolCall("id")]; + + // Act + var content1 = new OpenAIChatMessageContent(new ChatRole("user"), "content1", "model-id1", toolCalls); + var content2 = new OpenAIChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls); + + // Assert + this.AssertChatMessageContent(AuthorRole.User, "content1", "model-id1", toolCalls, content1); + this.AssertChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls, content2); + } + + [Fact] + public void GetOpenAIFunctionToolCallsReturnsCorrectList() + { + // Arrange + List toolCalls = [ + new ChatCompletionsFunctionToolCall("id1", "name", string.Empty), + new ChatCompletionsFunctionToolCall("id2", "name", string.Empty), + new FakeChatCompletionsToolCall("id3"), + new FakeChatCompletionsToolCall("id4")]; + + var content1 = new OpenAIChatMessageContent(AuthorRole.User, "content", "model-id", toolCalls); + var content2 = new OpenAIChatMessageContent(AuthorRole.User, "content", "model-id", []); + + // Act + var actualToolCalls1 = content1.GetOpenAIFunctionToolCalls(); + var actualToolCalls2 = content2.GetOpenAIFunctionToolCalls(); + + // Assert + Assert.Equal(2, actualToolCalls1.Count); + Assert.Equal("id1", actualToolCalls1[0].Id); + Assert.Equal("id2", actualToolCalls1[1].Id); + + Assert.Empty(actualToolCalls2); + } + + [Fact] + public void MetadataIsInitializedCorrectly() + { + // Arrange + var metadata = new Dictionary { { "key", "value" } }; + List toolCalls = [ + new ChatCompletionsFunctionToolCall("id1", "name", string.Empty), + new ChatCompletionsFunctionToolCall("id2", "name", string.Empty), + new FakeChatCompletionsToolCall("id3"), + new FakeChatCompletionsToolCall("id4")]; + + // Act + var content1 = new OpenAIChatMessageContent(AuthorRole.User, "content1", "model-id1", [], metadata); + var content2 = new OpenAIChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls, metadata); + + // Assert + Assert.NotNull(content1.Metadata); + Assert.Single(content1.Metadata); + + Assert.NotNull(content2.Metadata); + Assert.Equal(2, content2.Metadata.Count); + Assert.Equal("value", content2.Metadata["key"]); + + Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]); + + var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List; + Assert.NotNull(actualToolCalls); + + Assert.Equal(2, actualToolCalls.Count); + Assert.Equal("id1", actualToolCalls[0].Id); + Assert.Equal("id2", actualToolCalls[1].Id); + } + + private void AssertChatMessageContent( + AuthorRole expectedRole, + string expectedContent, + string expectedModelId, + IReadOnlyList expectedToolCalls, + OpenAIChatMessageContent actualContent) + { + Assert.Equal(expectedRole, actualContent.Role); + Assert.Equal(expectedContent, actualContent.Content); + Assert.Equal(expectedModelId, actualContent.ModelId); + Assert.Same(expectedToolCalls, actualContent.ToolCalls); + } + + private sealed class FakeChatCompletionsToolCall(string id) : ChatCompletionsToolCall(id) + { } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs new file mode 100644 index 000000000000..9b4d53adb17a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; + +/// +/// Unit tests for class. +/// +public sealed class OpenAIFunctionToolCallTests +{ + [Theory] + [InlineData("MyFunction", "MyFunction")] + [InlineData("MyPlugin_MyFunction", "MyPlugin_MyFunction")] + public void FullyQualifiedNameReturnsValidName(string toolCallName, string expectedName) + { + // Arrange + var toolCall = new ChatCompletionsFunctionToolCall("id", toolCallName, string.Empty); + var openAIFunctionToolCall = new OpenAIFunctionToolCall(toolCall); + + // Act & Assert + Assert.Equal(expectedName, openAIFunctionToolCall.FullyQualifiedName); + } + + [Fact] + public void ToStringReturnsCorrectValue() + { + // Arrange + var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}"); + var openAIFunctionToolCall = new OpenAIFunctionToolCall(toolCall); + + // Act & Assert + Assert.Equal("MyPlugin_MyFunction(location:San Diego, max_price:300)", openAIFunctionToolCall.ToString()); + } + + [Fact] + public void ConvertToolCallUpdatesWithEmptyIndexesReturnsEmptyToolCalls() + { + // Arrange + var toolCallIdsByIndex = new Dictionary(); + var functionNamesByIndex = new Dictionary(); + var functionArgumentBuildersByIndex = new Dictionary(); + + // Act + var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + ref toolCallIdsByIndex, + ref functionNamesByIndex, + ref functionArgumentBuildersByIndex); + + // Assert + Assert.Empty(toolCalls); + } + + [Fact] + public void ConvertToolCallUpdatesWithNotEmptyIndexesReturnsNotEmptyToolCalls() + { + // Arrange + var toolCallIdsByIndex = new Dictionary { { 3, "test-id" } }; + var functionNamesByIndex = new Dictionary { { 3, "test-function" } }; + var functionArgumentBuildersByIndex = new Dictionary { { 3, new("test-argument") } }; + + // Act + var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + ref toolCallIdsByIndex, + ref functionNamesByIndex, + ref functionArgumentBuildersByIndex); + + // Assert + Assert.Single(toolCalls); + + var toolCall = toolCalls[0]; + + Assert.Equal("test-id", toolCall.Id); + Assert.Equal("test-function", toolCall.Name); + Assert.Equal("test-argument", toolCall.Arguments); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs new file mode 100644 index 000000000000..38cd31b99053 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs @@ -0,0 +1,76 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; + +/// +/// Unit tests for class. +/// +public sealed class OpenAIPluginCollectionExtensionsTests +{ + [Fact] + public void TryGetFunctionAndArgumentsWithNonExistingFunctionReturnsFalse() + { + // Arrange + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", []); + var plugins = new KernelPluginCollection([plugin]); + + var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", string.Empty); + + // Act + var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); + + // Assert + Assert.False(result); + Assert.Null(actualFunction); + Assert.Null(actualArguments); + } + + [Fact] + public void TryGetFunctionAndArgumentsWithoutArgumentsReturnsTrue() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result", "MyFunction"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); + + var plugins = new KernelPluginCollection([plugin]); + var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", string.Empty); + + // Act + var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); + + // Assert + Assert.True(result); + Assert.Same(function, actualFunction); + Assert.Null(actualArguments); + } + + [Fact] + public void TryGetFunctionAndArgumentsWithArgumentsReturnsTrue() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result", "MyFunction"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); + + var plugins = new KernelPluginCollection([plugin]); + var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}"); + + // Act + var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); + + // Assert + Assert.True(result); + Assert.Same(function, actualFunction); + + Assert.NotNull(actualArguments); + + Assert.Equal("San Diego", actualArguments["location"]); + Assert.Equal("300", actualArguments["max_price"]); + + Assert.Null(actualArguments["null_argument"]); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs new file mode 100644 index 000000000000..fd0a830cc2d9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; + +/// +/// Unit tests for class. +/// +public sealed class OpenAIStreamingTextContentTests +{ + [Fact] + public void ToByteArrayWorksCorrectly() + { + // Arrange + var expectedBytes = Encoding.UTF8.GetBytes("content"); + var content = new OpenAIStreamingTextContent("content", 0, "model-id"); + + // Act + var actualBytes = content.ToByteArray(); + + // Assert + Assert.Equal(expectedBytes, actualBytes); + } + + [Theory] + [InlineData(null, "")] + [InlineData("content", "content")] + public void ToStringWorksCorrectly(string? content, string expectedString) + { + // Arrange + var textContent = new OpenAIStreamingTextContent(content!, 0, "model-id"); + + // Act + var actualString = textContent.ToString(); + + // Assert + Assert.Equal(expectedString, actualString); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs new file mode 100644 index 000000000000..4267c57435db --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Net; +using Azure; +using Azure.Core; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; + +/// +/// Unit tests for class. +/// +public sealed class RequestFailedExceptionExtensionsTests +{ + [Theory] + [InlineData(0, null)] + [InlineData(500, HttpStatusCode.InternalServerError)] + public void ToHttpOperationExceptionWithStatusReturnsValidException(int responseStatus, HttpStatusCode? httpStatusCode) + { + // Arrange + var exception = new RequestFailedException(responseStatus, "Error Message"); + + // Act + var actualException = exception.ToHttpOperationException(); + + // Assert + Assert.IsType(actualException); + Assert.Equal(httpStatusCode, actualException.StatusCode); + Assert.Equal("Error Message", actualException.Message); + Assert.Same(exception, actualException.InnerException); + } + + [Fact] + public void ToHttpOperationExceptionWithContentReturnsValidException() + { + // Arrange + using var response = new FakeResponse("Response Content", 500); + var exception = new RequestFailedException(response); + + // Act + var actualException = exception.ToHttpOperationException(); + + // Assert + Assert.IsType(actualException); + Assert.Equal(HttpStatusCode.InternalServerError, actualException.StatusCode); + Assert.Equal("Response Content", actualException.ResponseContent); + Assert.Same(exception, actualException.InnerException); + } + + #region private + + private sealed class FakeResponse(string responseContent, int status) : Response + { + private readonly string _responseContent = responseContent; + private readonly int _status = status; + private readonly IEnumerable _headers = new List(); + + public override BinaryData Content => BinaryData.FromString(this._responseContent); + public override int Status => this._status; + public override string ReasonPhrase => "Reason Phrase"; + public override Stream? ContentStream { get => null; set => throw new NotImplementedException(); } + public override string ClientRequestId { get => "Client Request Id"; set => throw new NotImplementedException(); } + + public override void Dispose() { } + protected override bool ContainsHeader(string name) => throw new NotImplementedException(); + protected override IEnumerable EnumerateHeaders() => this._headers; +#pragma warning disable CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes). + protected override bool TryGetHeader(string name, out string? value) => throw new NotImplementedException(); + protected override bool TryGetHeaderValues(string name, out IEnumerable? values) => throw new NotImplementedException(); +#pragma warning restore CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes). + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs new file mode 100644 index 000000000000..0e916047bcb4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs @@ -0,0 +1,626 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion; + +/// +/// Unit tests for +/// +public sealed class AzureOpenAIChatCompletionServiceTests : IDisposable +{ + private readonly MultipleHttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAIChatCompletionServiceTests() + { + this._messageHandlerStub = new MultipleHttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + + var mockLogger = new Mock(); + + mockLogger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true); + + this._mockLoggerFactory.Setup(l => l.CreateLogger(It.IsAny())).Returns(mockLogger.Object); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var service = includeLoggerFactory ? + new AzureOpenAIChatCompletionService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAIChatCompletionService("deployment", "https://endpoint", credentials, "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var client = new OpenAIClient("key"); + var service = includeLoggerFactory ? + new AzureOpenAIChatCompletionService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAIChatCompletionService("deployment", client, "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task GetTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }); + + // Act + var result = await service.GetTextContentsAsync("Prompt"); + + // Assert + Assert.True(result.Count > 0); + Assert.Equal("Test chat response", result[0].Text); + + var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + + Assert.NotNull(usage); + Assert.Equal(55, usage.PromptTokens); + Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(155, usage.TotalTokens); + } + + [Fact] + public async Task GetChatMessageContentsWithEmptyChoicesThrowsExceptionAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent("{\"id\":\"response-id\",\"object\":\"chat.completion\",\"created\":1704208954,\"model\":\"gpt-4\",\"choices\":[],\"usage\":{\"prompt_tokens\":55,\"completion_tokens\":100,\"total_tokens\":155},\"system_fingerprint\":null}") + }); + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([])); + + Assert.Equal("Chat completions not found", exception.Message); + } + + [Theory] + [InlineData(0)] + [InlineData(129)] + public async Task GetChatMessageContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt) + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + var settings = new OpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt }; + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([], settings)); + + Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + var settings = new OpenAIPromptExecutionSettings() + { + MaxTokens = 123, + Temperature = 0.6, + TopP = 0.5, + FrequencyPenalty = 1.6, + PresencePenalty = 1.2, + ResultsPerPrompt = 5, + Seed = 567, + TokenSelectionBiases = new Dictionary { { 2, 3 } }, + StopSequences = ["stop_sequence"] + }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("User Message"); + chatHistory.AddSystemMessage("System Message"); + chatHistory.AddAssistantMessage("Assistant Message"); + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + var requestContent = this._messageHandlerStub.RequestContents[0]; + + Assert.NotNull(requestContent); + + var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent)); + + var messages = content.GetProperty("messages"); + + var userMessage = messages[0]; + var systemMessage = messages[1]; + var assistantMessage = messages[2]; + + Assert.Equal("user", userMessage.GetProperty("role").GetString()); + Assert.Equal("User Message", userMessage.GetProperty("content").GetString()); + + Assert.Equal("system", systemMessage.GetProperty("role").GetString()); + Assert.Equal("System Message", systemMessage.GetProperty("content").GetString()); + + Assert.Equal("assistant", assistantMessage.GetProperty("role").GetString()); + Assert.Equal("Assistant Message", assistantMessage.GetProperty("content").GetString()); + + Assert.Equal(123, content.GetProperty("max_tokens").GetInt32()); + Assert.Equal(0.6, content.GetProperty("temperature").GetDouble()); + Assert.Equal(0.5, content.GetProperty("top_p").GetDouble()); + Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble()); + Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble()); + Assert.Equal(5, content.GetProperty("n").GetInt32()); + Assert.Equal(567, content.GetProperty("seed").GetInt32()); + Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32()); + Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString()); + } + + [Theory] + [MemberData(nameof(ResponseFormats))] + public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(object responseFormat, string? expectedResponseType) + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + var settings = new OpenAIPromptExecutionSettings + { + ResponseFormat = responseFormat + }; + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }); + + // Act + var result = await service.GetChatMessageContentsAsync([], settings); + + // Assert + var requestContent = this._messageHandlerStub.RequestContents[0]; + + Assert.NotNull(requestContent); + + var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent)); + + Assert.Equal(expectedResponseType, content.GetProperty("response_format").GetProperty("type").GetString()); + } + + [Theory] + [MemberData(nameof(ToolCallBehaviors))] + public async Task GetChatMessageContentsWorksCorrectlyAsync(ToolCallBehavior behavior) + { + // Arrange + var kernel = Kernel.CreateBuilder().Build(); + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = behavior }; + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }); + + // Act + var result = await service.GetChatMessageContentsAsync([], settings, kernel); + + // Assert + Assert.True(result.Count > 0); + Assert.Equal("Test chat response", result[0].Content); + + var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + + Assert.NotNull(usage); + Assert.Equal(55, usage.PromptTokens); + Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(155, usage.TotalTokens); + } + + [Fact] + public async Task GetChatMessageContentsWithFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function1 = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var function2 = KernelFunctionFactory.CreateFromMethod((string argument) => + { + functionCallCount++; + throw new ArgumentException("Some exception"); + }, "FunctionWithException"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + + this._messageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act + var result = await service.GetChatMessageContentsAsync([], settings, kernel); + + // Assert + Assert.True(result.Count > 0); + Assert.Equal("Test chat response", result[0].Content); + + Assert.Equal(2, functionCallCount); + } + + [Fact] + public async Task GetChatMessageContentsWithFunctionCallMaximumAutoInvokeAttemptsAsync() + { + // Arrange + const int DefaultMaximumAutoInvokeAttempts = 5; + const int AutoInvokeResponsesCount = 6; + + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var responses = new List(); + + for (var i = 0; i < AutoInvokeResponsesCount; i++) + { + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }); + } + + this._messageHandlerStub.ResponsesToReturn = responses; + + // Act + var result = await service.GetChatMessageContentsAsync([], settings, kernel); + + // Assert + Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); + } + + [Fact] + public async Task GetChatMessageContentsWithRequiredFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); + var openAIFunction = plugin.GetFunctionsMetadata().First().ToOpenAIFunction(); + + kernel.Plugins.Add(plugin); + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + + this._messageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act + var result = await service.GetChatMessageContentsAsync([], settings, kernel); + + // Assert + Assert.Equal(1, functionCallCount); + + var requestContents = this._messageHandlerStub.RequestContents; + + Assert.Equal(2, requestContents.Count); + + requestContents.ForEach(Assert.NotNull); + + var firstContent = Encoding.UTF8.GetString(requestContents[0]!); + var secondContent = Encoding.UTF8.GetString(requestContents[1]!); + + var firstContentJson = JsonSerializer.Deserialize(firstContent); + var secondContentJson = JsonSerializer.Deserialize(secondContent); + + Assert.Equal(1, firstContentJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("MyPlugin-GetCurrentWeather", firstContentJson.GetProperty("tool_choice").GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("none", secondContentJson.GetProperty("tool_choice").GetString()); + } + + [Fact] + public async Task GetStreamingTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }); + + // Act & Assert + await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) + { + Assert.Equal("Test chat streaming response", chunk.Text); + } + } + + [Fact] + public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }); + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([])) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function1 = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var function2 = KernelFunctionFactory.CreateFromMethod((string argument) => + { + functionCallCount++; + throw new ArgumentException("Some exception"); + }, "FunctionWithException"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_multiple_function_calls_test_response.txt")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; + + this._messageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel)) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + + Assert.Equal(2, functionCallCount); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvokeAttemptsAsync() + { + // Arrange + const int DefaultMaximumAutoInvokeAttempts = 5; + const int AutoInvokeResponsesCount = 6; + + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var responses = new List(); + + for (var i = 0; i < AutoInvokeResponsesCount; i++) + { + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) }); + } + + this._messageHandlerStub.ResponsesToReturn = responses; + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel)) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + + Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); + var openAIFunction = plugin.GetFunctionsMetadata().First().ToOpenAIFunction(); + + kernel.Plugins.Add(plugin); + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; + + this._messageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel)) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + + Assert.Equal(1, functionCallCount); + + var requestContents = this._messageHandlerStub.RequestContents; + + Assert.Equal(2, requestContents.Count); + + requestContents.ForEach(Assert.NotNull); + + var firstContent = Encoding.UTF8.GetString(requestContents[0]!); + var secondContent = Encoding.UTF8.GetString(requestContents[1]!); + + var firstContentJson = JsonSerializer.Deserialize(firstContent); + var secondContentJson = JsonSerializer.Deserialize(secondContent); + + Assert.Equal(1, firstContentJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("MyPlugin-GetCurrentWeather", firstContentJson.GetProperty("tool_choice").GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("none", secondContentJson.GetProperty("tool_choice").GetString()); + } + + [Fact] + public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync() + { + // Arrange + const string Prompt = "This is test prompt"; + const string SystemMessage = "This is test system message"; + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + + this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => service); + Kernel kernel = builder.Build(); + + // Act + var result = await kernel.InvokePromptAsync(Prompt, new(settings)); + + // Assert + Assert.Equal("Test chat response", result.ToString()); + + var requestContentByteArray = this._messageHandlerStub.RequestContents[0]; + + Assert.NotNull(requestContentByteArray); + + var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray)); + + var messages = requestContent.GetProperty("messages"); + + Assert.Equal(2, messages.GetArrayLength()); + + Assert.Equal("This is test system message", messages[0].GetProperty("content").GetString()); + Assert.Equal("system", messages[0].GetProperty("role").GetString()); + + Assert.Equal("This is test prompt", messages[1].GetProperty("content").GetString()); + Assert.Equal("user", messages[1].GetProperty("role").GetString()); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } + + public static TheoryData ToolCallBehaviors => new() + { + ToolCallBehavior.EnableKernelFunctions, + ToolCallBehavior.AutoInvokeKernelFunctions + }; + + public static TheoryData ResponseFormats => new() + { + { new FakeChatCompletionsResponseFormat(), null }, + { "json_object", "json_object" }, + { "text", "text" } + }; + + private sealed class FakeChatCompletionsResponseFormat : ChatCompletionsResponseFormat { } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs new file mode 100644 index 000000000000..ebc14928d444 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs @@ -0,0 +1,359 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion; + +/// +/// Unit tests for +/// +public sealed class OpenAIChatCompletionServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly OpenAIFunction _timepluginDate, _timepluginNow; + private readonly OpenAIPromptExecutionSettings _executionSettings; + private readonly Mock _mockLoggerFactory; + + public OpenAIChatCompletionServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + + IList functions = KernelPluginFactory.CreateFromFunctions("TimePlugin", new[] + { + KernelFunctionFactory.CreateFromMethod((string? format = null) => DateTime.Now.Date.ToString(format, CultureInfo.InvariantCulture), "Date", "TimePlugin.Date"), + KernelFunctionFactory.CreateFromMethod((string? format = null) => DateTime.Now.ToString(format, CultureInfo.InvariantCulture), "Now", "TimePlugin.Now"), + }).GetFunctionsMetadata(); + + this._timepluginDate = functions[0].ToOpenAIFunction(); + this._timepluginNow = functions[1].ToOpenAIFunction(); + + this._executionSettings = new() + { + ToolCallBehavior = ToolCallBehavior.EnableFunctions(new[] { this._timepluginDate, this._timepluginNow }) + }; + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new OpenAIChatCompletionService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : + new OpenAIChatCompletionService("model-id", "api-key", "organization"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var client = new OpenAIClient("key"); + var service = includeLoggerFactory ? + new OpenAIChatCompletionService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : + new OpenAIChatCompletionService("model-id", client); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingAutoAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(new ChatHistory(), this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingNowAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + this._executionSettings.ToolCallBehavior = ToolCallBehavior.RequireFunction(this._timepluginNow); + + // Act + await chatCompletion.GetChatMessageContentsAsync(new ChatHistory(), this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(1, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + } + + [Fact] + public async Task ItCreatesNoFunctionsWhenUsingNoneAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + this._executionSettings.ToolCallBehavior = null; + + // Act + await chatCompletion.GetChatMessageContentsAsync(new ChatHistory(), this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.False(optionsJson.TryGetProperty("functions", out var _)); + } + + [Fact] + public async Task ItAddsIdToChatMessageAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello", metadata: new Dictionary() { { OpenAIChatMessageContent.ToolIdProperty, "John Doe" } }); + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("messages").GetArrayLength()); + Assert.Equal("John Doe", optionsJson.GetProperty("messages")[1].GetProperty("tool_call_id").GetString()); + } + + [Fact] + public async Task ItGetChatMessageContentsShouldHaveModelIdDefinedAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(AzureChatCompletionResponse, Encoding.UTF8, "application/json") }; + + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + + // Act + var chatMessage = await chatCompletion.GetChatMessageContentAsync(chatHistory, this._executionSettings); + + // Assert + Assert.NotNull(chatMessage.ModelId); + Assert.Equal("gpt-3.5-turbo", chatMessage.ModelId); + } + + [Fact] + public async Task ItGetTextContentsShouldHaveModelIdDefinedAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(AzureChatCompletionResponse, Encoding.UTF8, "application/json") }; + + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + + // Act + var textContent = await chatCompletion.GetTextContentAsync("hello", this._executionSettings); + + // Assert + Assert.NotNull(textContent.ModelId); + Assert.Equal("gpt-3.5-turbo", textContent.ModelId); + } + + [Fact] + public async Task GetStreamingTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) + { + Assert.Equal("Test chat streaming response", chunk.Text); + } + } + + [Fact] + public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() + { + // Arrange + var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([])) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + } + + [Fact] + public async Task ItAddsSystemMessageAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("messages").GetArrayLength()); + Assert.Equal("Assistant is a large language model.", optionsJson.GetProperty("messages")[0].GetProperty("content").GetString()); + Assert.Equal("system", optionsJson.GetProperty("messages")[0].GetProperty("role").GetString()); + Assert.Equal("Hello", optionsJson.GetProperty("messages")[1].GetProperty("content").GetString()); + Assert.Equal("user", optionsJson.GetProperty("messages")[1].GetProperty("role").GetString()); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } + + private const string ChatCompletionResponse = @"{ + ""id"": ""chatcmpl-8IlRBQU929ym1EqAY2J4T7GGkW5Om"", + ""object"": ""chat.completion"", + ""created"": 1699482945, + ""model"": ""gpt-3.5-turbo"", + ""choices"": [ + { + ""index"": 0, + ""message"": { + ""role"": ""assistant"", + ""content"": null, + ""function_call"": { + ""name"": ""TimePlugin_Date"", + ""arguments"": ""{}"" + } + }, + ""finish_reason"": ""stop"" + } + ], + ""usage"": { + ""prompt_tokens"": 52, + ""completion_tokens"": 1, + ""total_tokens"": 53 + } +}"; + private const string AzureChatCompletionResponse = @"{ + ""id"": ""chatcmpl-8S914omCBNQ0KU1NFtxmupZpzKWv2"", + ""object"": ""chat.completion"", + ""created"": 1701718534, + ""model"": ""gpt-3.5-turbo"", + ""prompt_filter_results"": [ + { + ""prompt_index"": 0, + ""content_filter_results"": { + ""hate"": { + ""filtered"": false, + ""severity"": ""safe"" + }, + ""self_harm"": { + ""filtered"": false, + ""severity"": ""safe"" + }, + ""sexual"": { + ""filtered"": false, + ""severity"": ""safe"" + }, + ""violence"": { + ""filtered"": false, + ""severity"": ""safe"" + } + } + } + ], + ""choices"": [ + { + ""index"": 0, + ""finish_reason"": ""stop"", + ""message"": { + ""role"": ""assistant"", + ""content"": ""Hello! How can I help you today? Please provide me with a question or topic you would like information on."" + }, + ""content_filter_results"": { + ""hate"": { + ""filtered"": false, + ""severity"": ""safe"" + }, + ""self_harm"": { + ""filtered"": false, + ""severity"": ""safe"" + }, + ""sexual"": { + ""filtered"": false, + ""severity"": ""safe"" + }, + ""violence"": { + ""filtered"": false, + ""severity"": ""safe"" + } + } + } + ], + ""usage"": { + ""prompt_tokens"": 23, + ""completion_tokens"": 23, + ""total_tokens"": 46 + } +}"; +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureChatCompletionWithDataTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureChatCompletionWithDataTests.cs deleted file mode 100644 index e3cdc12654e6..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureChatCompletionWithDataTests.cs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ChatCompletionWithData; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletionWithData; - -/// -/// Unit tests for -/// -public sealed class AzureChatCompletionWithDataTests : IDisposable -{ - private readonly AzureChatCompletionWithDataConfig _config; - - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - - public AzureChatCompletionWithDataTests() - { - this._config = this.GetConfig(); - - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - } - - [Fact] - public async Task SpecifiedConfigurationShouldBeUsedAsync() - { - // Arrange - const string ExpectedUri = "https://fake-completion-endpoint/openai/deployments/fake-completion-model-id/extensions/chat/completions?api-version=fake-api-version"; - var chatCompletion = new AzureChatCompletionWithData(this._config, this._httpClient); - - // Act - await chatCompletion.GetChatCompletionsAsync(new ChatHistory()); - - // Assert - var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; - var actualRequestHeaderValues = this._messageHandlerStub.RequestHeaders!.GetValues("Api-Key"); - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - - Assert.Equal(ExpectedUri, actualUri); - - Assert.Contains("fake-completion-api-key", actualRequestHeaderValues); - Assert.Contains("https://fake-data-source-endpoint", actualRequestContent, StringComparison.OrdinalIgnoreCase); - Assert.Contains("fake-data-source-api-key", actualRequestContent, StringComparison.OrdinalIgnoreCase); - Assert.Contains("fake-data-source-index", actualRequestContent, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task DefaultApiVersionShouldBeUsedAsync() - { - // Arrange - var config = this.GetConfig(); - config.CompletionApiVersion = string.Empty; - - var chatCompletion = new AzureChatCompletionWithData(config, this._httpClient); - - // Act - await chatCompletion.GetChatCompletionsAsync(new ChatHistory()); - - // Assert - var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; - - Assert.Contains("2023-06-01-preview", actualUri, StringComparison.OrdinalIgnoreCase); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - private AzureChatCompletionWithDataConfig GetConfig() - { - return new AzureChatCompletionWithDataConfig - { - CompletionModelId = "fake-completion-model-id", - CompletionEndpoint = "https://fake-completion-endpoint", - CompletionApiKey = "fake-completion-api-key", - CompletionApiVersion = "fake-api-version", - DataSourceEndpoint = "https://fake-data-source-endpoint", - DataSourceApiKey = "fake-data-source-api-key", - DataSourceIndex = "fake-data-source-index" - }; - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs new file mode 100644 index 000000000000..485e04e3b8c0 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs @@ -0,0 +1,199 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletionWithData; + +/// +/// Unit tests for +/// +public sealed class AzureOpenAIChatCompletionWithDataTests : IDisposable +{ + private readonly AzureOpenAIChatCompletionWithDataConfig _config; + + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAIChatCompletionWithDataTests() + { + this._config = this.GetConfig(); + + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient, this._mockLoggerFactory.Object) : + new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); + + // Assert + Assert.NotNull(service); + Assert.Equal("fake-completion-model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task SpecifiedConfigurationShouldBeUsedAsync() + { + // Arrange + const string ExpectedUri = "https://fake-completion-endpoint/openai/deployments/fake-completion-model-id/extensions/chat/completions?api-version=fake-api-version"; + var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); + + // Act + await service.GetChatMessageContentsAsync([]); + + // Assert + var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; + var actualRequestHeaderValues = this._messageHandlerStub.RequestHeaders!.GetValues("Api-Key"); + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + + Assert.Equal(ExpectedUri, actualUri); + + Assert.Contains("fake-completion-api-key", actualRequestHeaderValues); + Assert.Contains("https://fake-data-source-endpoint", actualRequestContent, StringComparison.OrdinalIgnoreCase); + Assert.Contains("fake-data-source-api-key", actualRequestContent, StringComparison.OrdinalIgnoreCase); + Assert.Contains("fake-data-source-index", actualRequestContent, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task DefaultApiVersionShouldBeUsedAsync() + { + // Arrange + var config = this.GetConfig(); + config.CompletionApiVersion = string.Empty; + + var service = new AzureOpenAIChatCompletionWithDataService(config, this._httpClient); + + // Act + await service.GetChatMessageContentsAsync([]); + + // Assert + var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; + + Assert.Contains("2023-06-01-preview", actualUri, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task GetChatMessageContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_with_data_test_response.json")) + }; + + // Act + var result = await service.GetChatMessageContentsAsync([]); + + // Assert + Assert.True(result.Count > 0); + Assert.Equal("Test chat with data response", result[0].Content); + + var usage = result[0].Metadata?["Usage"] as ChatWithDataUsage; + + Assert.NotNull(usage); + Assert.Equal(55, usage.PromptTokens); + Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(155, usage.TotalTokens); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_with_data_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([])) + { + Assert.Equal("Test chat with data streaming response", chunk.Content); + } + } + + [Fact] + public async Task GetTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_with_data_test_response.json")) + }; + + // Act + var result = await service.GetTextContentsAsync("Prompt"); + + // Assert + Assert.True(result.Count > 0); + Assert.Equal("Test chat with data response", result[0].Text); + + var usage = result[0].Metadata?["Usage"] as ChatWithDataUsage; + + Assert.NotNull(usage); + Assert.Equal(55, usage.PromptTokens); + Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(155, usage.TotalTokens); + } + + [Fact] + public async Task GetStreamingTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_with_data_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) + { + Assert.Equal("Test chat with data streaming response", chunk.Text); + } + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } + + private AzureOpenAIChatCompletionWithDataConfig GetConfig() + { + return new AzureOpenAIChatCompletionWithDataConfig + { + CompletionModelId = "fake-completion-model-id", + CompletionEndpoint = "https://fake-completion-endpoint", + CompletionApiKey = "fake-completion-api-key", + CompletionApiVersion = "fake-api-version", + DataSourceEndpoint = "https://fake-data-source-endpoint", + DataSourceApiKey = "fake-data-source-api-key", + DataSourceIndex = "fake-data-source-index" + }; + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs new file mode 100644 index 000000000000..722ee4d0817c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI; +public class ChatHistoryExtensionsTests +{ + [Fact] + public async Task ItCanAddMessageFromStreamingChatContentsAsync() + { + var metadata = new Dictionary() + { + { "message", "something" }, + }; + + var chatHistoryStreamingContents = new List + { + new(AuthorRole.User, "Hello ", metadata: metadata), + new(null, ", ", metadata: metadata), + new(null, "I ", metadata: metadata), + new(null, "am ", metadata : metadata), + new(null, "a ", metadata : metadata), + new(null, "test ", metadata : metadata), + }.ToAsyncEnumerable(); + + var chatHistory = new ChatHistory(); + var finalContent = "Hello , I am a test "; + string processedContent = string.Empty; + await foreach (var chatMessageChunk in chatHistory.AddStreamingMessageAsync(chatHistoryStreamingContents)) + { + processedContent += chatMessageChunk.Content; + } + + Assert.Single(chatHistory); + Assert.Equal(finalContent, processedContent); + Assert.Equal(finalContent, chatHistory[0].Content); + Assert.Equal(AuthorRole.User, chatHistory[0].Role); + Assert.Equal(metadata["message"], chatHistory[0].Metadata!["message"]); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/FunctionViewExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/FunctionViewExtensionsTests.cs deleted file mode 100644 index 1da8ccb3ee1f..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/FunctionViewExtensionsTests.cs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Linq; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; -public sealed class FunctionViewExtensionsTests -{ - [Fact] - public void ItCanConvertToOpenAIFunctionNoParameters() - { - // Arrange - var sut = new FunctionView( - Name: "foo", - PluginName: "bar", - Description: "baz"); - - // Act - var result = sut.ToOpenAIFunction(); - - // Assert - Assert.Equal(sut.Name, result.FunctionName); - Assert.Equal(sut.PluginName, result.PluginName); - Assert.Equal(sut.Description, result.Description); - Assert.Equal($"{sut.PluginName}-{sut.Name}", result.FullyQualifiedName); - } - - [Fact] - public void ItCanConvertToOpenAIFunctionNoPluginName() - { - // Arrange - var sut = new FunctionView( - Name: "foo", - PluginName: string.Empty, - Description: "baz"); - - // Act - var result = sut.ToOpenAIFunction(); - - // Assert - Assert.Equal(sut.Name, result.FunctionName); - Assert.Equal(sut.PluginName, result.PluginName); - Assert.Equal(sut.Description, result.Description); - Assert.Equal(sut.Name, result.FullyQualifiedName); - } - - [Fact] - public void ItCanConvertToOpenAIFunctionWithParameter() - { - // Arrange - var param1 = new ParameterView( - Name: "param1", - Description: "This is param1", - DefaultValue: "1", - Type: new ParameterViewType("int"), - IsRequired: false); - - var sut = new FunctionView( - Name: "foo", - PluginName: "bar", - Description: "baz", - Parameters: new List { param1 }); - - // Act - var result = sut.ToOpenAIFunction(); - var outputParam = result.Parameters.First(); - - // Assert - Assert.Equal("int", outputParam.Type); - Assert.Equal(param1.Name, outputParam.Name); - Assert.Equal("This is param1 (default value: 1)", outputParam.Description); - Assert.Equal(param1.IsRequired, outputParam.IsRequired); - } - - [Fact] - public void ItCanConvertToOpenAIFunctionWithParameterNoType() - { - // Arrange - var param1 = new ParameterView( - Name: "param1", - Description: "This is param1", - Type: null, - IsRequired: false); - - var sut = new FunctionView( - Name: "foo", - PluginName: "bar", - Description: "baz", - Parameters: new List { param1 }); - - // Act - var result = sut.ToOpenAIFunction(); - var outputParam = result.Parameters.First(); - - // Assert - Assert.Equal("string", outputParam.Type); - Assert.Equal(param1.Name, outputParam.Name); - Assert.Equal(param1.Description, outputParam.Description); - Assert.Equal(param1.IsRequired, outputParam.IsRequired); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs new file mode 100644 index 000000000000..9f609814d941 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Linq; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +#pragma warning disable CA1812 // Uninstantiated internal types + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; + +public sealed class KernelFunctionMetadataExtensionsTests +{ + [Fact] + public void ItCanConvertToOpenAIFunctionNoParameters() + { + // Arrange + var sut = new KernelFunctionMetadata("foo") + { + PluginName = "bar", + Description = "baz", + ReturnParameter = new KernelReturnParameterMetadata + { + Description = "retDesc", + Schema = KernelJsonSchema.Parse("{\"type\": \"object\" }"), + } + }; + + // Act + var result = sut.ToOpenAIFunction(); + + // Assert + Assert.Equal(sut.Name, result.FunctionName); + Assert.Equal(sut.PluginName, result.PluginName); + Assert.Equal(sut.Description, result.Description); + Assert.Equal($"{sut.PluginName}-{sut.Name}", result.FullyQualifiedName); + + Assert.NotNull(result.ReturnParameter); + Assert.Equal("retDesc", result.ReturnParameter.Description); + Assert.Equivalent(KernelJsonSchema.Parse("{\"type\": \"object\" }"), result.ReturnParameter.Schema); + Assert.Null(result.ReturnParameter.ParameterType); + } + + [Fact] + public void ItCanConvertToOpenAIFunctionNoPluginName() + { + // Arrange + var sut = new KernelFunctionMetadata("foo") + { + PluginName = string.Empty, + Description = "baz", + ReturnParameter = new KernelReturnParameterMetadata + { + Description = "retDesc", + Schema = KernelJsonSchema.Parse("{\"type\": \"object\" }"), + } + }; + + // Act + var result = sut.ToOpenAIFunction(); + + // Assert + Assert.Equal(sut.Name, result.FunctionName); + Assert.Equal(sut.PluginName, result.PluginName); + Assert.Equal(sut.Description, result.Description); + Assert.Equal(sut.Name, result.FullyQualifiedName); + + Assert.NotNull(result.ReturnParameter); + Assert.Equal("retDesc", result.ReturnParameter.Description); + Assert.Equivalent(KernelJsonSchema.Parse("{\"type\": \"object\" }"), result.ReturnParameter.Schema); + Assert.Null(result.ReturnParameter.ParameterType); + } + + [Theory] + [InlineData(false)] + [InlineData(true)] + public void ItCanConvertToOpenAIFunctionWithParameter(bool withSchema) + { + // Arrange + var param1 = new KernelParameterMetadata("param1") + { + Description = "This is param1", + DefaultValue = "1", + ParameterType = typeof(int), + IsRequired = false, + Schema = withSchema ? KernelJsonSchema.Parse("{\"type\":\"integer\"}") : null, + }; + + var sut = new KernelFunctionMetadata("foo") + { + PluginName = "bar", + Description = "baz", + Parameters = new[] { param1 }, + ReturnParameter = new KernelReturnParameterMetadata + { + Description = "retDesc", + Schema = KernelJsonSchema.Parse("{\"type\": \"object\" }"), + } + }; + + // Act + var result = sut.ToOpenAIFunction(); + var outputParam = result.Parameters![0]; + + // Assert + Assert.Equal(param1.Name, outputParam.Name); + Assert.Equal("This is param1 (default value: 1)", outputParam.Description); + Assert.Equal(param1.IsRequired, outputParam.IsRequired); + Assert.NotNull(outputParam.Schema); + Assert.Equal("integer", outputParam.Schema.RootElement.GetProperty("type").GetString()); + + Assert.NotNull(result.ReturnParameter); + Assert.Equal("retDesc", result.ReturnParameter.Description); + Assert.Equivalent(KernelJsonSchema.Parse("{\"type\": \"object\" }"), result.ReturnParameter.Schema); + Assert.Null(result.ReturnParameter.ParameterType); + } + + [Fact] + public void ItCanConvertToOpenAIFunctionWithParameterNoType() + { + // Arrange + var param1 = new KernelParameterMetadata("param1") { Description = "This is param1" }; + + var sut = new KernelFunctionMetadata("foo") + { + PluginName = "bar", + Description = "baz", + Parameters = new[] { param1 }, + ReturnParameter = new KernelReturnParameterMetadata + { + Description = "retDesc", + Schema = KernelJsonSchema.Parse("{\"type\": \"object\" }"), + } + }; + + // Act + var result = sut.ToOpenAIFunction(); + var outputParam = result.Parameters![0]; + + // Assert + Assert.Equal(param1.Name, outputParam.Name); + Assert.Equal(param1.Description, outputParam.Description); + Assert.Equal(param1.IsRequired, outputParam.IsRequired); + + Assert.NotNull(result.ReturnParameter); + Assert.Equal("retDesc", result.ReturnParameter.Description); + Assert.Equivalent(KernelJsonSchema.Parse("{\"type\": \"object\" }"), result.ReturnParameter.Schema); + Assert.Null(result.ReturnParameter.ParameterType); + } + + [Fact] + public void ItCanConvertToOpenAIFunctionWithNoReturnParameterType() + { + // Arrange + var param1 = new KernelParameterMetadata("param1") + { + Description = "This is param1", + ParameterType = typeof(int), + }; + + var sut = new KernelFunctionMetadata("foo") + { + PluginName = "bar", + Description = "baz", + Parameters = new[] { param1 }, + }; + + // Act + var result = sut.ToOpenAIFunction(); + var outputParam = result.Parameters![0]; + + // Assert + Assert.Equal(param1.Name, outputParam.Name); + Assert.Equal(param1.Description, outputParam.Description); + Assert.Equal(param1.IsRequired, outputParam.IsRequired); + Assert.NotNull(outputParam.Schema); + Assert.Equal("integer", outputParam.Schema.RootElement.GetProperty("type").GetString()); + } + + [Fact] + public void ItCanCreateValidOpenAIFunctionManualForPlugin() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromType("MyPlugin"); + + var functionMetadata = kernel.Plugins["MyPlugin"].First().Metadata; + + var sut = functionMetadata.ToOpenAIFunction(); + + // Act + var result = sut.ToFunctionDefinition(); + + // Assert + Assert.NotNull(result); + Assert.Equal( + "{\"type\":\"object\",\"required\":[\"parameter1\",\"parameter2\",\"parameter3\"],\"properties\":{\"parameter1\":{\"type\":\"string\",\"description\":\"String parameter\"},\"parameter2\":{\"enum\":[\"Value1\",\"Value2\"],\"description\":\"Enum parameter\"},\"parameter3\":{\"type\":\"string\",\"format\":\"date-time\",\"description\":\"DateTime parameter\"}}}", + result.Parameters.ToString() + ); + } + + [Fact] + public void ItCanCreateValidOpenAIFunctionManualForPrompt() + { + // Arrange + var promptTemplateConfig = new PromptTemplateConfig("Hello AI") + { + Description = "My sample function." + }; + promptTemplateConfig.InputVariables.Add(new InputVariable + { + Name = "parameter1", + Description = "String parameter", + JsonSchema = "{\"type\":\"string\",\"description\":\"String parameter\"}" + }); + promptTemplateConfig.InputVariables.Add(new InputVariable + { + Name = "parameter2", + Description = "Enum parameter", + JsonSchema = "{\"enum\":[\"Value1\",\"Value2\"],\"description\":\"Enum parameter\"}" + }); + var function = KernelFunctionFactory.CreateFromPrompt(promptTemplateConfig); + var functionMetadata = function.Metadata; + var sut = functionMetadata.ToOpenAIFunction(); + + // Act + var result = sut.ToFunctionDefinition(); + + // Assert + Assert.NotNull(result); + Assert.Equal( + "{\"type\":\"object\",\"required\":[\"parameter1\",\"parameter2\"],\"properties\":{\"parameter1\":{\"type\":\"string\",\"description\":\"String parameter\"},\"parameter2\":{\"enum\":[\"Value1\",\"Value2\"],\"description\":\"Enum parameter\"}}}", + result.Parameters.ToString() + ); + } + + private enum MyEnum + { + Value1, + Value2 + } + + private sealed class MyPlugin + { + [KernelFunction, Description("My sample function.")] + public string MyFunction( + [Description("String parameter")] string parameter1, + [Description("Enum parameter")] MyEnum parameter2, + [Description("DateTime parameter")] DateTime parameter3 + ) + { + return "return"; + } + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionResponseTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionResponseTests.cs deleted file mode 100644 index ee2b6d35dbd4..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionResponseTests.cs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; -public sealed class OpenAIFunctionResponseTests -{ - [Fact] - public void ItCanConvertFromFunctionCallWithPluginName() - { - // Arrange - var sut = new FunctionCall("foo-bar", "{}"); - - // Act - var result = OpenAIFunctionResponse.FromFunctionCall(sut); - - // Assert - Assert.Equal("foo", result.PluginName); - Assert.Equal("bar", result.FunctionName); - } - - [Fact] - public void ItCanConvertFromFunctionCallWithNoPluginName() - { - // Arrange - var sut = new FunctionCall("foo", "{}"); - - // Act - var result = OpenAIFunctionResponse.FromFunctionCall(sut); - - // Assert - Assert.Equal(string.Empty, result.PluginName); - Assert.Equal("foo", result.FunctionName); - } - - [Fact] - public void ItCanConvertFromFunctionCallWithNoParameters() - { - // Arrange - var sut = new FunctionCall("foo", "{}"); - - // Act - var result = OpenAIFunctionResponse.FromFunctionCall(sut); - - // Assert - Assert.Equal(new Dictionary(), result.Parameters); - } - - [Fact] - public void ItCanConvertFromFunctionCallWithParameters() - { - // Arrange - var sut = new FunctionCall("foo", "{ \"param1\": \"bar\", \"param2\": 5 }"); - - // Act - var result = OpenAIFunctionResponse.FromFunctionCall(sut); - - // Assert - Assert.True(result.Parameters.TryGetValue("param1", out object? value1)); - Assert.Equal("bar", value1.ToString()); - Assert.True(result.Parameters.TryGetValue("param2", out object? value2)); - Assert.Equal("5", value2.ToString()); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs index 112f73a17c69..ea763440c43e 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs @@ -1,22 +1,55 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text.Json; using Azure.AI.OpenAI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.AzureSdk; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; + public sealed class OpenAIFunctionTests { + [Theory] + [InlineData(null, null, "", "")] + [InlineData("name", "description", "name", "description")] + public void ItInitializesOpenAIFunctionParameterCorrectly(string? name, string? description, string expectedName, string expectedDescription) + { + // Arrange & Act + var schema = KernelJsonSchema.Parse("{\"type\": \"object\" }"); + var functionParameter = new OpenAIFunctionParameter(name, description, true, typeof(string), schema); + + // Assert + Assert.Equal(expectedName, functionParameter.Name); + Assert.Equal(expectedDescription, functionParameter.Description); + Assert.True(functionParameter.IsRequired); + Assert.Equal(typeof(string), functionParameter.ParameterType); + Assert.Same(schema, functionParameter.Schema); + } + + [Theory] + [InlineData(null, "")] + [InlineData("description", "description")] + public void ItInitializesOpenAIFunctionReturnParameterCorrectly(string? description, string expectedDescription) + { + // Arrange & Act + var schema = KernelJsonSchema.Parse("{\"type\": \"object\" }"); + var functionParameter = new OpenAIFunctionReturnParameter(description, typeof(string), schema); + + // Assert + Assert.Equal(expectedDescription, functionParameter.Description); + Assert.Equal(typeof(string), functionParameter.ParameterType); + Assert.Same(schema, functionParameter.Schema); + } + [Fact] public void ItCanConvertToFunctionDefinitionWithNoPluginName() { // Arrange - var sut = new OpenAIFunction - { - FunctionName = "myfunc", - PluginName = string.Empty, - Description = "This is a description of the function.", - }; + OpenAIFunction sut = KernelFunctionFactory.CreateFromMethod(() => { }, "myfunc", "This is a description of the function.").Metadata.ToOpenAIFunction(); // Act FunctionDefinition result = sut.ToFunctionDefinition(); @@ -26,16 +59,27 @@ public void ItCanConvertToFunctionDefinitionWithNoPluginName() Assert.Equal(sut.Description, result.Description); } + [Fact] + public void ItCanConvertToFunctionDefinitionWithNullParameters() + { + // Arrange + OpenAIFunction sut = new("plugin", "function", "description", null, null); + + // Act + var result = sut.ToFunctionDefinition(); + + // Assert + Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.Parameters.ToString()); + } + [Fact] public void ItCanConvertToFunctionDefinitionWithPluginName() { // Arrange - var sut = new OpenAIFunction + OpenAIFunction sut = KernelPluginFactory.CreateFromFunctions("myplugin", new[] { - FunctionName = "myfunc", - PluginName = "myplugin", - Description = "This is a description of the function.", - }; + KernelFunctionFactory.CreateFromMethod(() => { }, "myfunc", "This is a description of the function.") + }).GetFunctionsMetadata()[0].ToOpenAIFunction(); // Act FunctionDefinition result = sut.ToFunctionDefinition(); @@ -44,4 +88,102 @@ public void ItCanConvertToFunctionDefinitionWithPluginName() Assert.Equal("myplugin-myfunc", result.Name); Assert.Equal(sut.Description, result.Description); } + + [Fact] + public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndReturnParameterType() + { + string expectedParameterSchema = "{ \"type\": \"object\", \"required\": [\"param1\", \"param2\"], \"properties\": { \"param1\": { \"type\": \"string\", \"description\": \"String param 1\" }, \"param2\": { \"type\": \"integer\", \"description\": \"Int param 2\" } } } "; + + KernelPlugin plugin = KernelPluginFactory.CreateFromFunctions("Tests", new[] + { + KernelFunctionFactory.CreateFromMethod( + [return: Description("My test Result")] ([Description("String param 1")] string param1, [Description("Int param 2")] int param2) => "", + "TestFunction", + "My test function") + }); + + OpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); + + FunctionDefinition functionDefinition = sut.ToFunctionDefinition(); + + var exp = JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)); + var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters)); + + Assert.NotNull(functionDefinition); + Assert.Equal("Tests-TestFunction", functionDefinition.Name); + Assert.Equal("My test function", functionDefinition.Description); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters))); + } + + [Fact] + public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndNoReturnParameterType() + { + string expectedParameterSchema = "{ \"type\": \"object\", \"required\": [\"param1\", \"param2\"], \"properties\": { \"param1\": { \"type\": \"string\", \"description\": \"String param 1\" }, \"param2\": { \"type\": \"integer\", \"description\": \"Int param 2\" } } } "; + + KernelPlugin plugin = KernelPluginFactory.CreateFromFunctions("Tests", new[] + { + KernelFunctionFactory.CreateFromMethod( + [return: Description("My test Result")] ([Description("String param 1")] string param1, [Description("Int param 2")] int param2) => { }, + "TestFunction", + "My test function") + }); + + OpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); + + FunctionDefinition functionDefinition = sut.ToFunctionDefinition(); + + Assert.NotNull(functionDefinition); + Assert.Equal("Tests-TestFunction", functionDefinition.Name); + Assert.Equal("My test function", functionDefinition.Description); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters))); + } + + [Fact] + public void ItCanConvertToFunctionDefinitionsWithNoParameterTypes() + { + // Arrange + OpenAIFunction f = KernelFunctionFactory.CreateFromMethod( + () => { }, + parameters: new[] { new KernelParameterMetadata("param1") }).Metadata.ToOpenAIFunction(); + + // Act + FunctionDefinition result = f.ToFunctionDefinition(); + ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!; + + // Assert + Assert.NotNull(pd.properties); + Assert.Single(pd.properties); + Assert.Equal( + JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"string\" }")), + JsonSerializer.Serialize(pd.properties.First().Value.RootElement)); + } + + [Fact] + public void ItCanConvertToFunctionDefinitionsWithNoParameterTypesButWithDescriptions() + { + // Arrange + OpenAIFunction f = KernelFunctionFactory.CreateFromMethod( + () => { }, + parameters: new[] { new KernelParameterMetadata("param1") { Description = "something neat" } }).Metadata.ToOpenAIFunction(); + + // Act + FunctionDefinition result = f.ToFunctionDefinition(); + ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!; + + // Assert + Assert.NotNull(pd.properties); + Assert.Single(pd.properties); + Assert.Equal( + JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"string\", \"description\":\"something neat\" }")), + JsonSerializer.Serialize(pd.properties.First().Value.RootElement)); + } + +#pragma warning disable CA1812 // uninstantiated internal class + private sealed class ParametersData + { + public string? type { get; set; } + public string[]? required { get; set; } + public Dictionary? properties { get; set; } + } +#pragma warning restore CA1812 } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ImageGeneration/AzureOpenAIImageGenerationTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ImageGeneration/AzureOpenAIImageGenerationTests.cs deleted file mode 100644 index 4c8ce1784dd3..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ImageGeneration/AzureOpenAIImageGenerationTests.cs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net; -using System.Net.Http; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.ImageGeneration; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ImageGeneration; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAIImageGenerationTests -{ - /// - /// Returns a mocked instance of . - /// - /// The to return for image generation. - /// The to return for image result. - /// A mocked instance. - private static HttpClient GetHttpClientMock(HttpResponseMessage generationResult, HttpResponseMessage imageResult) - { - var httpClientHandler = new Mock(); - - httpClientHandler - .Protected() - .Setup>( - "SendAsync", - ItExpr.Is(request => request.RequestUri!.AbsolutePath.Contains("openai/images/generations:submit")), - ItExpr.IsAny()) - .ReturnsAsync(generationResult); - - httpClientHandler - .Protected() - .Setup>( - "SendAsync", - ItExpr.Is(request => request.RequestUri!.AbsolutePath.Contains("openai/operations/images")), - ItExpr.IsAny()) - .ReturnsAsync(imageResult); - - return new HttpClient(httpClientHandler.Object); - } - - /// - /// Creates an instance of to return with test data. - /// - /// The HTTP status code for the response. - /// The name of the test response file. - /// An instance of with the specified test data. - private static HttpResponseMessage CreateResponseMessage(HttpStatusCode statusCode, string fileName) - { - var response = new HttpResponseMessage(statusCode); - response.Content = new StringContent(OpenAITestHelper.GetTestResponse(fileName), Encoding.UTF8, "application/json"); - return response; - } - - [Fact] - public async Task ItShouldGenerateImageSuccussedAsync() - { - //Arrange - using var generateResult = CreateResponseMessage(HttpStatusCode.Accepted, "image_generation_test_response.json"); - using var imageResult = CreateResponseMessage(HttpStatusCode.OK, "image_result_test_response.json"); - using var mockHttpClient = GetHttpClientMock(generateResult, imageResult); - - var generation = new AzureOpenAIImageGeneration("https://fake-endpoint/", "fake-api-key", mockHttpClient); - - //Act - var result = await generation.GenerateImageAsync("description", 256, 256); - - //Assert - Assert.NotNull(result); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs new file mode 100644 index 000000000000..08bde153aa4a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure.Core; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Memory; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI; + +/// +/// Unit tests for class. +/// +public sealed class OpenAIMemoryBuilderExtensionsTests +{ + private readonly Mock _mockMemoryStore = new(); + + [Fact] + public void AzureOpenAITextEmbeddingGenerationWithApiKeyWorksCorrectly() + { + // Arrange + var builder = new MemoryBuilder(); + + // Act + var memory = builder + .WithAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key", "model-id") + .WithMemoryStore(this._mockMemoryStore.Object) + .Build(); + + // Assert + Assert.NotNull(memory); + } + + [Fact] + public void AzureOpenAITextEmbeddingGenerationWithTokenCredentialWorksCorrectly() + { + // Arrange + var builder = new MemoryBuilder(); + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + + // Act + var memory = builder + .WithAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials, "model-id") + .WithMemoryStore(this._mockMemoryStore.Object) + .Build(); + + // Assert + Assert.NotNull(memory); + } + + [Fact] + public void OpenAITextEmbeddingGenerationWithApiKeyWorksCorrectly() + { + // Arrange + var builder = new MemoryBuilder(); + + // Act + var memory = builder + .WithOpenAITextEmbeddingGeneration("model-id", "api-key", "organization-id") + .WithMemoryStore(this._mockMemoryStore.Object) + .Build(); + + // Assert + Assert.NotNull(memory); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..1cdee512adbc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs @@ -0,0 +1,186 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI; + +/// +/// Unit tests of OpenAIPromptExecutionSettings +/// +public class OpenAIPromptExecutionSettingsTests +{ + [Fact] + public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults() + { + // Arrange + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(null, 128); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(1, executionSettings.Temperature); + Assert.Equal(1, executionSettings.TopP); + Assert.Equal(0, executionSettings.FrequencyPenalty); + Assert.Equal(0, executionSettings.PresencePenalty); + Assert.Equal(1, executionSettings.ResultsPerPrompt); + Assert.Null(executionSettings.StopSequences); + Assert.Null(executionSettings.TokenSelectionBiases); + Assert.Equal(128, executionSettings.MaxTokens); + } + + [Fact] + public void ItUsesExistingOpenAIExecutionSettings() + { + // Arrange + OpenAIPromptExecutionSettings actualSettings = new() + { + Temperature = 0.7, + TopP = 0.7, + FrequencyPenalty = 0.7, + PresencePenalty = 0.7, + ResultsPerPrompt = 2, + StopSequences = new string[] { "foo", "bar" }, + ChatSystemPrompt = "chat system prompt", + MaxTokens = 128, + TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(actualSettings, executionSettings); + } + + [Fact] + public void ItCanUseOpenAIExecutionSettings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new() { + { "max_tokens", 1000 }, + { "temperature", 0 } + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(1000, executionSettings.MaxTokens); + Assert.Equal(0, executionSettings.Temperature); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", 0.7 }, + { "top_p", 0.7 }, + { "frequency_penalty", 0.7 }, + { "presence_penalty", 0.7 }, + { "results_per_prompt", 2 }, + { "stop_sequences", new [] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "max_tokens", 128 }, + { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } }, + { "seed", 123456 }, + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + AssertExecutionSettings(executionSettings); + Assert.Equal(executionSettings.Seed, 123456); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", "0.7" }, + { "top_p", "0.7" }, + { "frequency_penalty", "0.7" }, + { "presence_penalty", "0.7" }, + { "results_per_prompt", "2" }, + { "stop_sequences", new [] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "max_tokens", "128" }, + { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } } + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase() + { + // Arrange + var json = @"{ + ""temperature"": 0.7, + ""top_p"": 0.7, + ""frequency_penalty"": 0.7, + ""presence_penalty"": 0.7, + ""results_per_prompt"": 2, + ""stop_sequences"": [ ""foo"", ""bar"" ], + ""chat_system_prompt"": ""chat system prompt"", + ""token_selection_biases"": { ""1"": 2, ""3"": 4 }, + ""max_tokens"": 128 +}"; + var actualSettings = JsonSerializer.Deserialize(json); + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Theory] + [InlineData("", "Assistant is a large language model.")] + [InlineData("System prompt", "System prompt")] + public void ItUsesCorrectChatSystemPrompt(string chatSystemPrompt, string expectedChatSystemPrompt) + { + // Arrange & Act + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = chatSystemPrompt }; + + // Assert + Assert.Equal(expectedChatSystemPrompt, settings.ChatSystemPrompt); + } + + private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings) + { + Assert.NotNull(executionSettings); + Assert.Equal(0.7, executionSettings.Temperature); + Assert.Equal(0.7, executionSettings.TopP); + Assert.Equal(0.7, executionSettings.FrequencyPenalty); + Assert.Equal(0.7, executionSettings.PresencePenalty); + Assert.Equal(2, executionSettings.ResultsPerPrompt); + Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences); + Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt); + Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases); + Assert.Equal(128, executionSettings.MaxTokens); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIRequestSettingsConverterTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIRequestSettingsConverterTests.cs deleted file mode 100644 index a7d1feec60da..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIRequestSettingsConverterTests.cs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Unit tests of OpenAIRequestSettingsConverter -/// -public class OpenAIRequestSettingsConverterTests -{ - [Fact] - public void ItDeserialisesOpenAIRequestSettingsWithCorrectDefaults() - { - // Arrange - JsonSerializerOptions options = new(); - options.Converters.Add(new OpenAIRequestSettingsConverter()); - var json = "{}"; - - // Act - var requestSettings = JsonSerializer.Deserialize(json, options); - - // Assert - Assert.NotNull(requestSettings); - Assert.Equal(0, requestSettings.Temperature); - Assert.Equal(0, requestSettings.TopP); - Assert.Equal(0, requestSettings.FrequencyPenalty); - Assert.Equal(0, requestSettings.PresencePenalty); - Assert.Equal(1, requestSettings.ResultsPerPrompt); - Assert.Equal(Array.Empty(), requestSettings.StopSequences); - Assert.Equal(new Dictionary(), requestSettings.TokenSelectionBiases); - Assert.Null(requestSettings.ServiceId); - Assert.Null(requestSettings.MaxTokens); - } - - [Fact] - public void ItDeserialisesOpenAIRequestSettingsWithSnakeCaseNaming() - { - // Arrange - JsonSerializerOptions options = new(); - options.Converters.Add(new OpenAIRequestSettingsConverter()); - var json = @"{ - ""temperature"": 0.7, - ""top_p"": 0.7, - ""frequency_penalty"": 0.7, - ""presence_penalty"": 0.7, - ""results_per_prompt"": 2, - ""stop_sequences"": [ ""foo"", ""bar"" ], - ""token_selection_biases"": { ""1"": 2, ""3"": 4 }, - ""service_id"": ""service"", - ""max_tokens"": 128 -}"; - - // Act - var requestSettings = JsonSerializer.Deserialize(json, options); - - // Assert - Assert.NotNull(requestSettings); - Assert.Equal(0.7, requestSettings.Temperature); - Assert.Equal(0.7, requestSettings.TopP); - Assert.Equal(0.7, requestSettings.FrequencyPenalty); - Assert.Equal(0.7, requestSettings.PresencePenalty); - Assert.Equal(2, requestSettings.ResultsPerPrompt); - Assert.Equal(new string[] { "foo", "bar" }, requestSettings.StopSequences); - Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, requestSettings.TokenSelectionBiases); - Assert.Equal("service", requestSettings.ServiceId); - Assert.Equal(128, requestSettings.MaxTokens); - } - - [Fact] - public void ItDeserialisesOpenAIRequestSettingsWithPascalCaseNaming() - { - // Arrange - JsonSerializerOptions options = new(); - options.Converters.Add(new OpenAIRequestSettingsConverter()); - var json = @"{ - ""Temperature"": 0.7, - ""TopP"": 0.7, - ""FrequencyPenalty"": 0.7, - ""PresencePenalty"": 0.7, - ""ResultsPerPrompt"": 2, - ""StopSequences"": [ ""foo"", ""bar"" ], - ""TokenSelectionBiases"": { ""1"": 2, ""3"": 4 }, - ""ServiceId"": ""service"", - ""MaxTokens"": 128 -}"; - - // Act - var requestSettings = JsonSerializer.Deserialize(json, options); - - // Assert - Assert.NotNull(requestSettings); - Assert.Equal(0.7, requestSettings.Temperature); - Assert.Equal(0.7, requestSettings.TopP); - Assert.Equal(0.7, requestSettings.FrequencyPenalty); - Assert.Equal(0.7, requestSettings.PresencePenalty); - Assert.Equal(2, requestSettings.ResultsPerPrompt); - Assert.Equal(new string[] { "foo", "bar" }, requestSettings.StopSequences); - Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, requestSettings.TokenSelectionBiases); - Assert.Equal("service", requestSettings.ServiceId); - Assert.Equal(128, requestSettings.MaxTokens); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIRequestSettingsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIRequestSettingsTests.cs deleted file mode 100644 index bcdbb64603a4..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIRequestSettingsTests.cs +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Text; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Unit tests of OpenAIRequestSettings -/// -public class OpenAIRequestSettingsTests -{ - [Fact] - public void ItCreatesOpenAIRequestSettingsWithCorrectDefaults() - { - // Arrange - // Act - OpenAIRequestSettings requestSettings = OpenAIRequestSettings.FromRequestSettings(null, 128); - - // Assert - Assert.NotNull(requestSettings); - Assert.Equal(0, requestSettings.Temperature); - Assert.Equal(0, requestSettings.TopP); - Assert.Equal(0, requestSettings.FrequencyPenalty); - Assert.Equal(0, requestSettings.PresencePenalty); - Assert.Equal(1, requestSettings.ResultsPerPrompt); - Assert.Equal(Array.Empty(), requestSettings.StopSequences); - Assert.Equal(new Dictionary(), requestSettings.TokenSelectionBiases); - Assert.Null(requestSettings.ServiceId); - Assert.Equal(128, requestSettings.MaxTokens); - } - - [Fact] - public void ItUsesExistingOpenAIRequestSettings() - { - // Arrange - OpenAIRequestSettings actualSettings = new() - { - Temperature = 0.7, - TopP = 0.7, - FrequencyPenalty = 0.7, - PresencePenalty = 0.7, - ResultsPerPrompt = 2, - StopSequences = new string[] { "foo", "bar" }, - ChatSystemPrompt = "chat system prompt", - MaxTokens = 128, - ServiceId = "service", - TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, - }; - - // Act - OpenAIRequestSettings requestSettings = OpenAIRequestSettings.FromRequestSettings(actualSettings); - - // Assert - Assert.NotNull(requestSettings); - Assert.Equal(actualSettings, requestSettings); - } - - [Fact] - public void ItCanUseOpenAIRequestSettings() - { - // Arrange - AIRequestSettings actualSettings = new() - { - ServiceId = "service", - }; - - // Act - OpenAIRequestSettings requestSettings = OpenAIRequestSettings.FromRequestSettings(actualSettings, null); - - // Assert - Assert.NotNull(requestSettings); - Assert.Equal(actualSettings.ServiceId, requestSettings.ServiceId); - } - - [Fact] - public void ItCreatesOpenAIRequestSettingsFromExtraPropertiesSnakeCase() - { - // Arrange - AIRequestSettings actualSettings = new() - { - ServiceId = "service", - ExtensionData = new Dictionary() - { - { "temperature", 0.7 }, - { "top_p", 0.7 }, - { "frequency_penalty", 0.7 }, - { "presence_penalty", 0.7 }, - { "results_per_prompt", 2 }, - { "stop_sequences", new [] { "foo", "bar" } }, - { "chat_system_prompt", "chat system prompt" }, - { "max_tokens", 128 }, - { "service_id", "service" }, - { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } } - } - }; - - // Act - OpenAIRequestSettings requestSettings = OpenAIRequestSettings.FromRequestSettings(actualSettings, null); - - // Assert - AssertRequestSettings(requestSettings); - } - - [Fact] - public void ItCreatesOpenAIRequestSettingsFromExtraPropertiesPascalCase() - { - // Arrange - AIRequestSettings actualSettings = new() - { - ServiceId = "service", - ExtensionData = new Dictionary() - { - { "Temperature", 0.7 }, - { "TopP", 0.7 }, - { "FrequencyPenalty", 0.7 }, - { "PresencePenalty", 0.7 }, - { "ResultsPerPrompt", 2 }, - { "StopSequences", new[] { "foo", "bar" } }, - { "ChatSystemPrompt", "chat system prompt" }, - { "MaxTokens", 128 }, - { "ServiceId", "service" }, - { "TokenSelectionBiases", new Dictionary() { { 1, 2 }, { 3, 4 } } } - } - }; - - // Act - OpenAIRequestSettings requestSettings = OpenAIRequestSettings.FromRequestSettings(actualSettings); - - // Assert - AssertRequestSettings(requestSettings); - } - - [Fact] - public void ItCreatesOpenAIRequestSettingsFromJsonSnakeCase() - { - // Arrange - var json = @"{ - ""temperature"": 0.7, - ""top_p"": 0.7, - ""frequency_penalty"": 0.7, - ""presence_penalty"": 0.7, - ""results_per_prompt"": 2, - ""stop_sequences"": [ ""foo"", ""bar"" ], - ""chat_system_prompt"": ""chat system prompt"", - ""token_selection_biases"": { ""1"": 2, ""3"": 4 }, - ""service_id"": ""service"", - ""max_tokens"": 128 -}"; - var actualSettings = Json.Deserialize(json); - - // Act - OpenAIRequestSettings requestSettings = OpenAIRequestSettings.FromRequestSettings(actualSettings); - - // Assert - AssertRequestSettings(requestSettings); - } - - [Fact] - public void ItCreatesOpenAIRequestSettingsFromJsonPascalCase() - { - // Arrange - var json = @"{ - ""Temperature"": 0.7, - ""TopP"": 0.7, - ""FrequencyPenalty"": 0.7, - ""PresencePenalty"": 0.7, - ""ResultsPerPrompt"": 2, - ""StopSequences"": [ ""foo"", ""bar"" ], - ""ChatSystemPrompt"": ""chat system prompt"", - ""TokenSelectionBiases"": { ""1"": 2, ""3"": 4 }, - ""ServiceId"": ""service"", - ""MaxTokens"": 128 -}"; - var actualSettings = Json.Deserialize(json); - - // Act - OpenAIRequestSettings requestSettings = OpenAIRequestSettings.FromRequestSettings(actualSettings); - - // Assert - AssertRequestSettings(requestSettings); - } - - private static void AssertRequestSettings(OpenAIRequestSettings requestSettings) - { - Assert.NotNull(requestSettings); - Assert.Equal(0.7, requestSettings.Temperature); - Assert.Equal(0.7, requestSettings.TopP); - Assert.Equal(0.7, requestSettings.FrequencyPenalty); - Assert.Equal(0.7, requestSettings.PresencePenalty); - Assert.Equal(2, requestSettings.ResultsPerPrompt); - Assert.Equal(new string[] { "foo", "bar" }, requestSettings.StopSequences); - Assert.Equal("chat system prompt", requestSettings.ChatSystemPrompt); - Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, requestSettings.TokenSelectionBiases); - Assert.Equal("service", requestSettings.ServiceId); - Assert.Equal(128, requestSettings.MaxTokens); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..0d263e93659b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs @@ -0,0 +1,497 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToImage; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI; + +/// +/// Unit tests for class. +/// +public sealed class OpenAIServiceCollectionExtensionsTests : IDisposable +{ + private readonly HttpClient _httpClient; + + public OpenAIServiceCollectionExtensionsTests() + { + this._httpClient = new HttpClient(); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAITextGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.OpenAIClientInline => builder.AddAzureOpenAITextGeneration("deployment-name", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAITextGeneration("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAITextGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAITextGeneration("deployment-name", client), + InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAITextGeneration("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void KernelBuilderAddOpenAITextGenerationAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddOpenAITextGeneration("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.AddOpenAITextGeneration("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAITextGeneration("model-id"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAITextGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void ServiceCollectionAddOpenAITextGenerationAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddOpenAITextGeneration("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.Services.AddOpenAITextGeneration("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAITextGeneration("model-id"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAITextGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.OpenAIClientInline => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextEmbeddingGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), + InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextEmbeddingGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void KernelBuilderAddOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddOpenAITextEmbeddingGeneration("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.AddOpenAITextEmbeddingGeneration("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAITextEmbeddingGeneration("model-id"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAITextEmbeddingGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void ServiceCollectionAddOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddOpenAITextEmbeddingGeneration("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.Services.AddOpenAITextEmbeddingGeneration("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAITextEmbeddingGeneration("model-id"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAITextEmbeddingGenerationService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + [InlineData(InitializationType.ChatCompletionWithData)] + public void KernelBuilderAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new OpenAIClient("key"); + var config = this.GetCompletionWithDataConfig(); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), + InitializationType.OpenAIClientInline => builder.AddAzureOpenAIChatCompletion("deployment-name", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAIChatCompletion("deployment-name"), + InitializationType.ChatCompletionWithData => builder.AddAzureOpenAIChatCompletion(config), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + + if (type == InitializationType.ChatCompletionWithData) + { + Assert.True(service is AzureOpenAIChatCompletionWithDataService); + } + else + { + Assert.True(service is AzureOpenAIChatCompletionService); + } + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + [InlineData(InitializationType.ChatCompletionWithData)] + public void ServiceCollectionAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new OpenAIClient("key"); + var config = this.GetCompletionWithDataConfig(); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), + InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", client), + InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAIChatCompletion("deployment-name"), + InitializationType.ChatCompletionWithData => builder.Services.AddAzureOpenAIChatCompletion(config), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + + if (type == InitializationType.ChatCompletionWithData) + { + Assert.True(service is AzureOpenAIChatCompletionWithDataService); + } + else + { + Assert.True(service is AzureOpenAIChatCompletionService); + } + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void KernelBuilderAddOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddOpenAIChatCompletion("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.AddOpenAIChatCompletion("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAIChatCompletion("model-id"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAIChatCompletionService); + } + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void ServiceCollectionAddOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient("key"); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddOpenAIChatCompletion("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.Services.AddOpenAIChatCompletion("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAIChatCompletion("model-id"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAIChatCompletionService); + } + + [Fact] + public void KernelBuilderAddAzureOpenAITextToImageAddsValidService() + { + // Arrange + var builder = Kernel.CreateBuilder(); + + // Act + builder = builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"); + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextToImageService); + } + + [Fact] + public void ServiceCollectionAddAzureOpenAITextToImageAddsValidService() + { + // Arrange + var builder = Kernel.CreateBuilder(); + + // Act + builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"); + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextToImageService); + } + + [Fact] + public void KernelBuilderAddOpenAITextToImageAddsValidService() + { + // Arrange + var builder = Kernel.CreateBuilder(); + + // Act + builder = builder.AddOpenAITextToImage("model-id", "api-key"); + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAITextToImageService); + } + + [Fact] + public void ServiceCollectionAddOpenAITextToImageAddsValidService() + { + // Arrange + var builder = Kernel.CreateBuilder(); + + // Act + builder.Services.AddOpenAITextToImage("model-id", "api-key"); + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is OpenAITextToImageService); + } + + public void Dispose() + { + this._httpClient.Dispose(); + } + + public enum InitializationType + { + ApiKey, + TokenCredential, + OpenAIClientInline, + OpenAIClientInServiceProvider, + ChatCompletionWithData + } + + private AzureOpenAIChatCompletionWithDataConfig GetCompletionWithDataConfig() + { + return new() + { + CompletionApiKey = "completion-api-key", + CompletionApiVersion = "completion-v1", + CompletionEndpoint = "https://completion-endpoint", + CompletionModelId = "completion-model-id", + DataSourceApiKey = "data-source-api-key", + DataSourceEndpoint = "https://data-source-endpoint", + DataSourceIndex = "data-source-index" + }; + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_multiple_function_calls_test_response.json new file mode 100644 index 000000000000..d339ae99b6ab --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_multiple_function_calls_test_response.json @@ -0,0 +1,56 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-GetCurrentWeather", + "arguments": "{\n\"location\": \"Boston, MA\"\n}" + } + }, + { + "id": "2", + "type": "function", + "function": { + "name": "MyPlugin-FunctionWithException", + "arguments": "{\n\"argument\": \"value\"\n}" + } + }, + { + "id": "3", + "type": "function", + "function": { + "name": "MyPlugin-NonExistentFunction", + "arguments": "{\n\"argument\": \"value\"\n}" + } + }, + { + "id": "4", + "type": "function", + "function": { + "name": "MyPlugin-InvalidArguments", + "arguments": "invalid_arguments_format" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_single_function_call_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_single_function_call_test_response.json new file mode 100644 index 000000000000..6c93e434f259 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_single_function_call_test_response.json @@ -0,0 +1,32 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-GetCurrentWeather", + "arguments": "{\n\"location\": \"Boston, MA\"\n}" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt new file mode 100644 index 000000000000..ceb8f3e8b44b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt @@ -0,0 +1,9 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-FunctionWithException","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":2,"id":"3","type":"function","function":{"name":"MyPlugin-NonExistentFunction","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":3,"id":"4","type":"function","function":{"name":"MyPlugin-InvalidArguments","arguments":"invalid_arguments_format"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_single_function_call_test_response.txt b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_single_function_call_test_response.txt new file mode 100644 index 000000000000..6835039941ce --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_single_function_call_test_response.txt @@ -0,0 +1,3 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_test_response.txt new file mode 100644 index 000000000000..8301463c6008 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_test_response.txt @@ -0,0 +1,3 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Test chat streaming response"},"finish_reason":null}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_test_response.json new file mode 100644 index 000000000000..b601bac8b55b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_test_response.json @@ -0,0 +1,22 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1704208954, + "model": "gpt-4", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Test chat response" + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + }, + "system_fingerprint": null +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_streaming_test_response.txt new file mode 100644 index 000000000000..5e17403da9fc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_streaming_test_response.txt @@ -0,0 +1 @@ +data: {"id":"response-id","model":"","created":1684304924,"object":"chat.completion","choices":[{"index":0,"messages":[{"delta":{"role":"assistant","content":"Test chat with data streaming response"},"end_turn":false}]}]} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_test_response.json new file mode 100644 index 000000000000..40d769dac8a7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_test_response.json @@ -0,0 +1,28 @@ +{ + "id": "response-id", + "model": "", + "created": 1684304924, + "object": "chat.completion", + "choices": [ + { + "index": 0, + "messages": [ + { + "role": "tool", + "content": "{\"citations\": [{\"content\": \"\\nAzure AI services are cloud-based artificial intelligence (AI) services...\", \"id\": null, \"title\": \"What is Azure AI services\", \"filepath\": null, \"url\": null, \"metadata\": {\"chunking\": \"original document size=250. Scores=0.4314117431640625 and 1.72564697265625.Org Highlight count=4.\"}, \"chunk_id\": \"0\"}], \"intent\": \"[\\\"Learn about Azure AI services.\\\"]\"}", + "end_turn": false + }, + { + "role": "assistant", + "content": "Test chat with data response", + "end_turn": true + } + ] + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/image_generation_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/image_generation_test_response.json deleted file mode 100644 index 87b9ab7d7cce..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/image_generation_test_response.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "id": "32ba9f77-d620-4b6c-9265-ad50cb314a5c", - "status": "notRunning" -} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/image_result_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/image_result_test_response.json deleted file mode 100644 index 61904f1b0a02..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/image_result_test_response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "created": 1686192127, - "expires": 1686278532, - "id": "32ba9f77-d620-4b6c-9265-ad50cb314a5c", - "result": { - "created": 1686192127, - "data": [ - { "url": "https://dalleproduse.blob.core.windows.net/private/images/generated_00.png" } - ] - }, - "status": "succeeded" -} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_streaming_test_response.txt new file mode 100644 index 000000000000..a511ea446236 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_streaming_test_response.txt @@ -0,0 +1,3 @@ +data: {"id":"response-id","object":"text_completion","created":1646932609,"model":"ada","choices":[{"text":"Test chat streaming response","index":0,"logprobs":null,"finish_reason":"length"}],"usage":{"prompt_tokens":55,"completion_tokens":100,"total_tokens":155}} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_test_response.json b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_test_response.json new file mode 100644 index 000000000000..540229437440 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_test_response.json @@ -0,0 +1,19 @@ +{ + "id": "response-id", + "object": "text_completion", + "created": 1646932609, + "model": "ada", + "choices": [ + { + "text": "Test chat response", + "index": 0, + "logprobs": null, + "finish_reason": "length" + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs new file mode 100644 index 000000000000..d8e5f1ca177a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextEmbedding; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAITextEmbeddingGenerationServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextEmbeddingGenerationServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var service = includeLoggerFactory ? + new AzureOpenAITextEmbeddingGenerationService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextEmbeddingGenerationService("deployment", "https://endpoint", credentials, "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var client = new OpenAIClient("key"); + var service = includeLoggerFactory ? + new AzureOpenAITextEmbeddingGenerationService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextEmbeddingGenerationService("deployment", client, "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task GenerateEmbeddingsForEmptyDataReturnsEmptyResultAsync() + { + // Arrange + var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + + // Act + var result = await service.GenerateEmbeddingsAsync([]); + + // Assert + Assert.Empty(result); + } + + [Fact] + public async Task GenerateEmbeddingsWithEmptyResponseThrowsExceptionAsync() + { + // Arrange + var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@"{ + ""object"": ""list"", + ""data"": [], + ""model"": ""model-id"" + }", Encoding.UTF8, "application/json") + }; + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => service.GenerateEmbeddingsAsync(["test"])); + Assert.Equal("Expected 1 text embedding(s), but received 0", exception.Message); + } + + [Fact] + public async Task GenerateEmbeddingsByDefaultWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@"{ + ""object"": ""list"", + ""data"": [ + { + ""object"": ""embedding"", + ""embedding"": [ + 0.018990106880664825, + -0.0073809814639389515 + ], + ""index"": 0 + } + ], + ""model"": ""model-id"" + }", Encoding.UTF8, "application/json") + }; + + // Act + var result = await service.GenerateEmbeddingsAsync(["test"]); + + // Assert + Assert.Single(result); + + var memory = result[0]; + + Assert.Equal(0.018990106880664825, memory.Span[0]); + Assert.Equal(-0.0073809814639389515, memory.Span[1]); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs new file mode 100644 index 000000000000..fff5f987a93c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextEmbedding; + +/// +/// Unit tests for class. +/// +public sealed class OpenAITextEmbeddingGenerationServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public OpenAITextEmbeddingGenerationServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : + new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var client = new OpenAIClient("key"); + var service = includeLoggerFactory ? + new OpenAITextEmbeddingGenerationService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : + new OpenAITextEmbeddingGenerationService("model-id", client); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task GenerateEmbeddingsForEmptyDataReturnsEmptyResultAsync() + { + // Arrange + var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); + + // Act + var result = await service.GenerateEmbeddingsAsync([]); + + // Assert + Assert.Empty(result); + } + + [Fact] + public async Task GenerateEmbeddingsWithEmptyResponseThrowsExceptionAsync() + { + // Arrange + var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@"{ + ""object"": ""list"", + ""data"": [], + ""model"": ""model-id"" + }", Encoding.UTF8, "application/json") + }; + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => service.GenerateEmbeddingsAsync(["test"])); + Assert.Equal("Expected 1 text embedding(s), but received 0", exception.Message); + } + + [Fact] + public async Task GenerateEmbeddingsByDefaultWorksCorrectlyAsync() + { + // Arrange + var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@"{ + ""object"": ""list"", + ""data"": [ + { + ""object"": ""embedding"", + ""embedding"": [ + 0.018990106880664825, + -0.0073809814639389515 + ], + ""index"": 0 + } + ], + ""model"": ""model-id"" + }", Encoding.UTF8, "application/json") + }; + + // Act + var result = await service.GenerateEmbeddingsAsync(["test"]); + + // Assert + Assert.Single(result); + + var memory = result[0]; + + Assert.Equal(0.018990106880664825, memory.Span[0]); + Assert.Equal(-0.0073809814639389515, memory.Span[1]); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs new file mode 100644 index 000000000000..87f5526d5f83 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs @@ -0,0 +1,208 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextGeneration; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAITextGenerationServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextGenerationServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var service = includeLoggerFactory ? + new AzureOpenAITextGenerationService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextGenerationService("deployment", "https://endpoint", credentials, "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var client = new OpenAIClient("key"); + var service = includeLoggerFactory ? + new AzureOpenAITextGenerationService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextGenerationService("deployment", client, "model-id"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task GetTextContentsWithEmptyChoicesThrowsExceptionAsync() + { + // Arrange + var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent("{\"id\":\"response-id\",\"object\":\"text_completion\",\"created\":1646932609,\"model\":\"ada\",\"choices\":[]}") + }; + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => service.GetTextContentsAsync("Prompt")); + + Assert.Equal("Text completions not found", exception.Message); + } + + [Theory] + [InlineData(0)] + [InlineData(129)] + public async Task GetTextContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt) + { + // Arrange + var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + var settings = new OpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt }; + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => service.GetTextContentsAsync("Prompt", settings)); + + Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task GetTextContentsHandlesSettingsCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + var settings = new OpenAIPromptExecutionSettings + { + MaxTokens = 123, + Temperature = 0.6, + TopP = 0.5, + FrequencyPenalty = 1.6, + PresencePenalty = 1.2, + ResultsPerPrompt = 5, + TokenSelectionBiases = new Dictionary { { 2, 3 } }, + StopSequences = ["stop_sequence"] + }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) + }; + + // Act + var result = await service.GetTextContentsAsync("Prompt", settings); + + // Assert + var requestContent = this._messageHandlerStub.RequestContent; + + Assert.NotNull(requestContent); + + var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent)); + + Assert.Equal("Prompt", content.GetProperty("prompt")[0].GetString()); + Assert.Equal(123, content.GetProperty("max_tokens").GetInt32()); + Assert.Equal(0.6, content.GetProperty("temperature").GetDouble()); + Assert.Equal(0.5, content.GetProperty("top_p").GetDouble()); + Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble()); + Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble()); + Assert.Equal(5, content.GetProperty("n").GetInt32()); + Assert.Equal(5, content.GetProperty("best_of").GetInt32()); + Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32()); + Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString()); + } + + [Fact] + public async Task GetTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) + }; + + // Act + var result = await service.GetTextContentsAsync("Prompt"); + + // Assert + Assert.True(result.Count > 0); + Assert.Equal("Test chat response", result[0].Text); + + var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + + Assert.NotNull(usage); + Assert.Equal(55, usage.PromptTokens); + Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(155, usage.TotalTokens); + } + + [Fact] + public async Task GetStreamingTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("text_completion_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) + { + Assert.Equal("Test chat streaming response", chunk.Text); + } + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs new file mode 100644 index 000000000000..b8d804c21b5d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextGeneration; + +/// +/// Unit tests for class. +/// +public sealed class OpenAITextGenerationServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public OpenAITextGenerationServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new OpenAITextGenerationService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : + new OpenAITextGenerationService("model-id", "api-key", "organization"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var client = new OpenAIClient("key"); + var service = includeLoggerFactory ? + new OpenAITextGenerationService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : + new OpenAITextGenerationService("model-id", client); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task GetTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new OpenAITextGenerationService("model-id", "api-key", "organization", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) + }; + + // Act + var result = await service.GetTextContentsAsync("Prompt"); + + // Assert + Assert.True(result.Count > 0); + Assert.Equal("Test chat response", result[0].Text); + + var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + + Assert.NotNull(usage); + Assert.Equal(55, usage.PromptTokens); + Assert.Equal(100, usage.CompletionTokens); + Assert.Equal(155, usage.TotalTokens); + } + + [Fact] + public async Task GetStreamingTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new OpenAITextGenerationService("model-id", "api-key", "organization", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("text_completion_streaming_test_response.txt"))); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) + { + Assert.Equal("Test chat streaming response", chunk.Text); + } + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs new file mode 100644 index 000000000000..be406a91e63f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToImage; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAITextToImageServiceTests +{ + [Theory] + [InlineData(1024, 1024, null)] + [InlineData(1792, 1024, null)] + [InlineData(1024, 1792, null)] + [InlineData(512, 512, typeof(NotSupportedException))] + [InlineData(256, 256, typeof(NotSupportedException))] + [InlineData(123, 456, typeof(NotSupportedException))] + public async Task ItValidatesTheModelIdAsync(int width, int height, Type? expectedExceptionType) + { + // Arrange + using var messageHandlerStub = new HttpMessageHandlerStub(); + using var httpClient = new HttpClient(messageHandlerStub, false); + messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@"{ + ""created"": 1702575371, + ""data"": [ + { + ""revised_prompt"": ""A photo capturing the diversity of the Earth's landscapes."", + ""url"": ""https://dalleprodsec.blob.core.windows.net/private/images/0f20c621-7eb0-449d-87fd-8dd2a3a15fbe/generated_00.png?se=2023-12-15T17%3A36%3A25Z&sig=jd2%2Fa8jOM9NmclrUbOLdRgAxcFDFPezOpG%2BSF82d7zM%3D&ske=2023-12-20T10%3A10%3A28Z&skoid=e52d5ed7-0657-4f62-bc12-7e5dbb260a96&sks=b&skt=2023-12-13T10%3A10%3A28Z&sktid=33e01921-4d64-4f8c-a055-5bdaffd5e33d&skv=2020-10-02&sp=r&spr=https&sr=b&sv=2020-10-02"" + } + ] + }", Encoding.UTF8, "application/json") + }; + + var textToImageCompletion = new AzureOpenAITextToImageService(deploymentName: "gpt-35-turbo", modelId: "gpt-3.5-turbo", endpoint: "https://az.com", apiKey: "NOKEY", httpClient: httpClient); + + if (expectedExceptionType is not null) + { + await Assert.ThrowsAsync(expectedExceptionType, () => textToImageCompletion.GenerateImageAsync("anything", width, height)); + } + else + { + // Act + var result = await textToImageCompletion.GenerateImageAsync("anything", width, height); + + // Assert + Assert.NotNull(result); + } + } + + [Theory] + [InlineData("gpt-35-turbo", "gpt-3.5-turbo")] + [InlineData("gpt-35-turbo", null)] + [InlineData("gpt-4-turbo", "gpt-4")] + public void ItHasPropertiesAsDefined(string deploymentName, string? modelId) + { + var service = new AzureOpenAITextToImageService(deploymentName, "https://az.com", "NOKEY", modelId); + Assert.Contains(AzureOpenAITextToImageService.DeploymentNameKey, service.Attributes); + Assert.Equal(deploymentName, service.Attributes[AzureOpenAITextToImageService.DeploymentNameKey]); + + if (modelId is null) + { + return; + } + + Assert.Contains(AIServiceExtensions.ModelIdKey, service.Attributes); + Assert.Equal(modelId, service.Attributes[AIServiceExtensions.ModelIdKey]); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs new file mode 100644 index 000000000000..a420a187d7b7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToImage; + +/// +/// Unit tests for class. +/// +public sealed class OpenAITextToImageServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public OpenAITextToImageServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new OpenAITextToImageService("api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : + new OpenAITextToImageService("api-key", "organization"); + + // Assert + Assert.NotNull(service); + Assert.Equal("organization", service.Attributes["Organization"]); + } + + [Theory] + [InlineData(123, 456, true)] + [InlineData(256, 512, true)] + [InlineData(256, 256, false)] + [InlineData(512, 512, false)] + [InlineData(1024, 1024, false)] + public async Task GenerateImageWorksCorrectlyAsync(int width, int height, bool expectedException) + { + // Arrange + var service = new OpenAITextToImageService("api-key", "organization", this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(@"{ + ""created"": 1702575371, + ""data"": [ + { + ""url"": ""https://image-url"" + } + ] + }", Encoding.UTF8, "application/json") + }; + + // Act & Assert + if (expectedException) + { + await Assert.ThrowsAsync(() => service.GenerateImageAsync("description", width, height)); + } + else + { + var result = await service.GenerateImageAsync("description", width, height); + + Assert.Equal("https://image-url", result); + } + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs new file mode 100644 index 000000000000..91238ef17e68 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs @@ -0,0 +1,218 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using static Microsoft.SemanticKernel.Connectors.OpenAI.ToolCallBehavior; + +namespace SemanticKernel.Connectors.UnitTests.OpenAI; + +/// +/// Unit tests for +/// +public sealed class ToolCallBehaviorTests +{ + [Fact] + public void EnableKernelFunctionsReturnsCorrectKernelFunctionsInstance() + { + // Arrange & Act + var behavior = ToolCallBehavior.EnableKernelFunctions; + + // Assert + Assert.IsType(behavior); + Assert.Equal(0, behavior.MaximumAutoInvokeAttempts); + } + + [Fact] + public void AutoInvokeKernelFunctionsReturnsCorrectKernelFunctionsInstance() + { + // Arrange & Act + var behavior = ToolCallBehavior.AutoInvokeKernelFunctions; + + // Assert + Assert.IsType(behavior); + Assert.Equal(5, behavior.MaximumAutoInvokeAttempts); + } + + [Fact] + public void EnableFunctionsReturnsEnabledFunctionsInstance() + { + // Arrange & Act + List functions = [new("Plugin", "Function", "description", [], null)]; + var behavior = ToolCallBehavior.EnableFunctions(functions); + + // Assert + Assert.IsType(behavior); + } + + [Fact] + public void RequireFunctionReturnsRequiredFunctionInstance() + { + // Arrange & Act + var behavior = ToolCallBehavior.RequireFunction(new("Plugin", "Function", "description", [], null)); + + // Assert + Assert.IsType(behavior); + } + + [Fact] + public void KernelFunctionsConfigureOptionsWithNullKernelDoesNotAddTools() + { + // Arrange + var kernelFunctions = new KernelFunctions(autoInvoke: false); + var chatCompletionsOptions = new ChatCompletionsOptions(); + + // Act + kernelFunctions.ConfigureOptions(null, chatCompletionsOptions); + + // Assert + Assert.Empty(chatCompletionsOptions.Tools); + } + + [Fact] + public void KernelFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools() + { + // Arrange + var kernelFunctions = new KernelFunctions(autoInvoke: false); + var chatCompletionsOptions = new ChatCompletionsOptions(); + var kernel = Kernel.CreateBuilder().Build(); + + // Act + kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + + // Assert + Assert.Null(chatCompletionsOptions.ToolChoice); + Assert.Empty(chatCompletionsOptions.Tools); + } + + [Fact] + public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools() + { + // Arrange + var kernelFunctions = new KernelFunctions(autoInvoke: false); + var chatCompletionsOptions = new ChatCompletionsOptions(); + var kernel = Kernel.CreateBuilder().Build(); + + var plugin = this.GetTestPlugin(); + + kernel.Plugins.Add(plugin); + + // Act + kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + + // Assert + Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice); + + this.AssertTools(chatCompletionsOptions); + } + + [Fact] + public void EnabledFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools() + { + // Arrange + var enabledFunctions = new EnabledFunctions([], autoInvoke: false); + var chatCompletionsOptions = new ChatCompletionsOptions(); + + // Act + enabledFunctions.ConfigureOptions(null, chatCompletionsOptions); + + // Assert + Assert.Null(chatCompletionsOptions.ToolChoice); + Assert.Empty(chatCompletionsOptions.Tools); + } + + [Fact] + public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsException() + { + // Arrange + var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); + var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true); + var chatCompletionsOptions = new ChatCompletionsOptions(); + + // Act & Assert + var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null, chatCompletionsOptions)); + Assert.Equal($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided.", exception.Message); + } + + [Fact] + public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsException() + { + // Arrange + var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); + var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true); + var chatCompletionsOptions = new ChatCompletionsOptions(); + var kernel = Kernel.CreateBuilder().Build(); + + // Act & Assert + var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions)); + Assert.Equal($"The specified {nameof(EnabledFunctions)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void EnabledFunctionsConfigureOptionsWithKernelAndPluginsAddsTools(bool autoInvoke) + { + // Arrange + var plugin = this.GetTestPlugin(); + var functions = plugin.GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); + var enabledFunctions = new EnabledFunctions(functions, autoInvoke); + var chatCompletionsOptions = new ChatCompletionsOptions(); + var kernel = Kernel.CreateBuilder().Build(); + + kernel.Plugins.Add(plugin); + + // Act + enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + + // Assert + Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice); + + this.AssertTools(chatCompletionsOptions); + } + + [Fact] + public void RequiredFunctionConfigureOptionsAddsTools() + { + // Arrange + var function = this.GetTestPlugin().GetFunctionsMetadata()[0].ToOpenAIFunction(); + var chatCompletionsOptions = new ChatCompletionsOptions(); + var requiredFunction = new RequiredFunction(function, autoInvoke: true); + + // Act + requiredFunction.ConfigureOptions(null, chatCompletionsOptions); + + // Assert + Assert.NotNull(chatCompletionsOptions.ToolChoice); + + this.AssertTools(chatCompletionsOptions); + } + + private KernelPlugin GetTestPlugin() + { + var function = KernelFunctionFactory.CreateFromMethod( + (string parameter1, string parameter2) => "Result1", + "MyFunction", + "Test Function", + [new KernelParameterMetadata("parameter1"), new KernelParameterMetadata("parameter2")], + new KernelReturnParameterMetadata { ParameterType = typeof(string), Description = "Function Result" }); + + return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); + } + + private void AssertTools(ChatCompletionsOptions chatCompletionsOptions) + { + Assert.Single(chatCompletionsOptions.Tools); + + var tool = chatCompletionsOptions.Tools[0] as ChatCompletionsFunctionToolDefinition; + + Assert.NotNull(tool); + + Assert.Equal("MyPlugin-MyFunction", tool.Name); + Assert.Equal("Test Function", tool.Description); + Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.Parameters.ToString()); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/.editorconfig b/dotnet/src/Experimental/Agents.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Experimental/Agents.UnitTests/ChatCompletionAgentTests.cs b/dotnet/src/Experimental/Agents.UnitTests/ChatCompletionAgentTests.cs new file mode 100644 index 000000000000..a7ca53e57cb6 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/ChatCompletionAgentTests.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Agents; +using Moq; +using Xunit; + +namespace SemanticKernel.Experimental.Agents.UnitTests; +public class ChatCompletionAgentTests +{ + private readonly IKernelBuilder _kernelBuilder; + + public ChatCompletionAgentTests() + { + this._kernelBuilder = Kernel.CreateBuilder(); + } + + [Fact] + public async Task ItShouldResolveChatCompletionServiceFromKernelAsync() + { + // Arrange + var mockChatCompletionService = new Mock(); + + this._kernelBuilder.Services.AddSingleton(mockChatCompletionService.Object); + + var agent = new ChatCompletionAgent(this._kernelBuilder.Build(), "fake-instructions"); + + // Act + var result = await agent.InvokeAsync(new List()); + + // Assert + mockChatCompletionService.Verify(x => + x.GetChatMessageContentsAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny()), + Times.Once); + } + + [Fact] + public async Task ItShouldAddSystemInstructionsAndMessagesToChatHistoryAsync() + { + // Arrange + var mockChatCompletionService = new Mock(); + + this._kernelBuilder.Services.AddSingleton(mockChatCompletionService.Object); + + var agent = new ChatCompletionAgent(this._kernelBuilder.Build(), "fake-instructions"); + + // Act + var result = await agent.InvokeAsync(new List() { new(AuthorRole.User, "fake-user-message") }); + + // Assert + mockChatCompletionService.Verify( + x => x.GetChatMessageContentsAsync( + It.Is(ch => ch.Count == 2 && + ch.Any(m => m.Role == AuthorRole.System && m.Content == "fake-instructions") && + ch.Any(m => m.Role == AuthorRole.User && m.Content == "fake-user-message")), + It.IsAny(), + It.IsAny(), + It.IsAny()), + Times.Once); + } + + [Fact] + public async Task ItShouldReturnChatCompletionServiceMessagesAsync() + { + // Arrange + var mockChatCompletionService = new Mock(); + mockChatCompletionService + .Setup(ccs => ccs.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(new List { + new(AuthorRole.Assistant, "fake-assistant-message-1"), + new(AuthorRole.Assistant, "fake-assistant-message-2") + }); + + this._kernelBuilder.Services.AddSingleton(mockChatCompletionService.Object); + + var agent = new ChatCompletionAgent(this._kernelBuilder.Build(), "fake-instructions"); + + // Act + var result = await agent.InvokeAsync(new List()); + + // Assert + Assert.Equal(2, result.Count); + Assert.Contains(result, m => m.Role == AuthorRole.Assistant && m.Content == "fake-assistant-message-1"); + Assert.Contains(result, m => m.Role == AuthorRole.Assistant && m.Content == "fake-assistant-message-2"); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Experimental.Agents.UnitTests.csproj b/dotnet/src/Experimental/Agents.UnitTests/Experimental.Agents.UnitTests.csproj new file mode 100644 index 000000000000..756325d2bd00 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Experimental.Agents.UnitTests.csproj @@ -0,0 +1,49 @@ + + + SemanticKernel.Experimental.Agents.UnitTests + SemanticKernel.Experimental.Agents.UnitTests + net6.0 + LatestMajor + true + enable + disable + false + CS1591;SKEXP0101 + + + + + + + + + + + + + + + + + + all + + + all + + + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelExtensionTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelExtensionTests.cs new file mode 100644 index 000000000000..c117be28577a --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelExtensionTests.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; +using Microsoft.SemanticKernel.Experimental.Agents.Extensions; +using Xunit; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Agent")] +public sealed class KernelExtensionTests +{ + private const string TwoPartToolName = "Fake-Bogus"; + + [Fact] + public static void InvokeTwoPartTool() + { + //Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => { }, functionName: "Bogus"); + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("Fake", new[] { function }); + + //Act + var tool = kernel.GetAssistantTool(TwoPartToolName); + + //Assert + Assert.NotNull(tool); + Assert.Equal("Bogus", tool.Name); + } + + [Theory] + [InlineData("Bogus")] + [InlineData("i-am-not-valid")] + public static void InvokeInvalidSinglePartTool(string toolName) + { + //Arrange + var kernel = new Kernel(); + + //Act & Assert + Assert.Throws(() => kernel.GetAssistantTool(toolName)); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelFunctionExtensionTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelFunctionExtensionTests.cs new file mode 100644 index 000000000000..b69aead79981 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelFunctionExtensionTests.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Agents; +using Xunit; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Agent")] +public sealed class KernelFunctionExtensionTests +{ + private const string ToolName = "Bogus"; + private const string PluginName = "Fake"; + + [Fact] + public static void GetTwoPartName() + { + var function = KernelFunctionFactory.CreateFromMethod(() => true, ToolName); + + string qualifiedName = function.GetQualifiedName(PluginName); + + Assert.Equal($"{PluginName}-{ToolName}", qualifiedName); + } + + [Fact] + public static void GetToolModelFromFunction() + { + const string FunctionDescription = "Bogus description"; + const string RequiredParamName = "required"; + const string OptionalParamName = "optional"; + + var requiredParam = new KernelParameterMetadata("required") { IsRequired = true }; + var optionalParam = new KernelParameterMetadata("optional"); + var parameters = new List { requiredParam, optionalParam }; + var function = KernelFunctionFactory.CreateFromMethod(() => true, ToolName, FunctionDescription, parameters); + + var toolModel = function.ToToolModel(PluginName); + var properties = toolModel.Function?.Parameters.Properties; + var required = toolModel.Function?.Parameters.Required; + + Assert.Equal("function", toolModel.Type); + Assert.Equal($"{PluginName}-{ToolName}", toolModel.Function?.Name); + Assert.Equal(FunctionDescription, toolModel.Function?.Description); + Assert.Equal(2, properties?.Count); + Assert.True(properties?.ContainsKey(RequiredParamName)); + Assert.True(properties?.ContainsKey(OptionalParamName)); + Assert.Equal(1, required?.Count ?? 0); + Assert.True(required?.Contains(RequiredParamName) ?? false); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs new file mode 100644 index 000000000000..c6773cea232f --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Agent")] +public sealed class OpenAIRestExtensionsAssistantsTests +{ + private const string BogusEndpoint = "http://localhost"; + private const string BogusApiKey = "bogus"; + private const string TestAgentId = "agentId"; + + private readonly AssistantModel _assistantModel = new(); + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsAssistantsTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateAssistantModelAsync() + { + await this._restContext.CreateAssistantModelAsync(this._assistantModel).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetAssistantsUrl()); + } + + [Fact] + public async Task GetAssistantModelAsync() + { + await this._restContext.GetAssistantModelAsync(TestAgentId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetAssistantUrl(TestAgentId)); + } + + [Fact] + public async Task ListAssistantModelsAsync() + { + await this._restContext.ListAssistantModelsAsync(10, false, "20").ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, $"{this._restContext.GetAssistantsUrl()}?limit=10&order=desc&after=20"); + } + + [Fact] + public async Task DeleteAssistantsModelAsync() + { + await this._restContext.DeleteAssistantModelAsync(TestAgentId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Delete, 1, this._restContext.GetAssistantUrl(TestAgentId)); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs new file mode 100644 index 000000000000..4ce9e3c75426 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Agent")] +public sealed class OpenAIRestExtensionsMessagesTests +{ + private const string BogusEndpoint = "http://localhost"; + private const string BogusApiKey = "bogus"; + private const string TestThreadId = "threadId"; + private const string TestMessageId = "msgId"; + private const string TestContent = "Blah blah"; + + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsMessagesTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateMessageModelAsync() + { + await this._restContext.CreateUserTextMessageAsync(TestThreadId, TestContent).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetMessagesUrl(TestThreadId)); + } + + [Fact] + public async Task GetMessageModelAsync() + { + await this._restContext.GetMessageAsync(TestThreadId, TestMessageId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetMessagesUrl(TestThreadId, TestMessageId)); + } + + [Fact] + public async Task GetMessageModelsAsync() + { + await this._restContext.GetMessagesAsync(TestThreadId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetMessagesUrl(TestThreadId)); + } + + [Fact] + public async Task GetSpecificMessageModelsAsync() + { + var messageIDs = new string[] { "1", "2", "3" }; + + await this._restContext.GetMessagesAsync(TestThreadId, messageIDs).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, messageIDs.Length); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs new file mode 100644 index 000000000000..4dcc85cf4b68 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Agent")] +public sealed class OpenAIRestExtensionsRunTests +{ + private const string BogusEndpoint = "http://localhost"; + private const string BogusApiKey = "bogus"; + private const string TestAgentId = "agentId"; + private const string TestThreadId = "threadId"; + private const string TestRunId = "runId"; + + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsRunTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateRunAsync() + { + await this._restContext.CreateRunAsync(TestThreadId, TestAgentId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetRunsUrl(TestThreadId)); + } + + [Fact] + public async Task GetRunAsync() + { + await this._restContext.GetRunAsync(TestThreadId, TestRunId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetRunUrl(TestThreadId, TestRunId)); + } + + [Fact] + public async Task GetRunStepsAsync() + { + await this._restContext.GetRunStepsAsync(TestThreadId, TestRunId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetRunStepsUrl(TestThreadId, TestRunId)); + } + + [Fact] + public async Task AddToolOutputsAsync() + { + var toolResults = Array.Empty(); + + await this._restContext.AddToolOutputsAsync(TestThreadId, TestRunId, toolResults).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetRunToolOutputUrl(TestThreadId, TestRunId)); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs new file mode 100644 index 000000000000..0738cd85bfc2 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Agent")] +public sealed class OpenAIRestExtensionsThreadTests +{ + private const string BogusEndpoint = "http://localhost"; + private const string BogusApiKey = "bogus"; + private const string TestThreadId = "threadId"; + + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsThreadTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateThreadModelAsync() + { + await this._restContext.CreateThreadModelAsync().ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetThreadsUrl()); + } + + [Fact] + public async Task GetThreadModelAsync() + { + await this._restContext.GetThreadModelAsync(TestThreadId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetThreadUrl(TestThreadId)); + } + + [Fact] + public async Task DeleteThreadModelAsync() + { + await this._restContext.DeleteThreadModelAsync(TestThreadId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Delete, 1, this._restContext.GetThreadUrl(TestThreadId)); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Integration/AgentHarness.cs b/dotnet/src/Experimental/Agents.UnitTests/Integration/AgentHarness.cs new file mode 100644 index 000000000000..2308db878e54 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Integration/AgentHarness.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft. All rights reserved. + +#define DISABLEHOST // Comment line to enable +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.Experimental.Agents.UnitTests.Integration; + +/// +/// Dev harness for manipulating agents. +/// +/// +/// Comment out DISABLEHOST definition to enable tests. +/// Not enabled by default. +/// +[Trait("Category", "Integration Tests")] +[Trait("Feature", "Agent")] +public sealed class AgentHarness +{ +#if DISABLEHOST + private const string SkipReason = "Harness only for local/dev environment"; +#else + private const string SkipReason = null; +#endif + + private readonly ITestOutputHelper _output; + + /// + /// Test constructor. + /// + public AgentHarness(ITestOutputHelper output) + { + this._output = output; + } + + /// + /// Verify creation and retrieval of agent. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAgentLifecycleAsync() + { + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .WithInstructions("say something funny") + .WithName("Fred") + .WithDescription("test agent") + .BuildAsync().ConfigureAwait(true); + + this.DumpAgent(agent); + + var copy = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .GetAsync(agentId: agent.Id).ConfigureAwait(true); + + this.DumpAgent(copy); + } + + /// + /// Verify creation and retrieval of agent. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAgentDefinitionAsync() + { + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .FromTemplatePath("Templates/PoetAgent.yaml") + .BuildAsync() + .ConfigureAwait(true); + + this.DumpAgent(agent); + + var copy = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .GetAsync(agentId: agent.Id).ConfigureAwait(true); + + this.DumpAgent(copy); + } + + /// + /// Verify creation and retrieval of agent. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAgentListAsync() + { + var context = new OpenAIRestContext(AgentBuilder.OpenAIBaseUrl, TestConfig.OpenAIApiKey); + var agents = await context.ListAssistantModelsAsync().ConfigureAwait(true); + foreach (var agent in agents) + { + this.DumpAgent(agent); + } + } + + /// + /// Verify creation and retrieval of agent. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAgentDeleteAsync() + { + var names = + new HashSet(StringComparer.OrdinalIgnoreCase) + { + "Fred", + "Barney", + "DeleteMe", + "Poet", + "Math Tutor", + }; + + var context = new OpenAIRestContext(AgentBuilder.OpenAIBaseUrl, TestConfig.OpenAIApiKey); + var agents = await context.ListAssistantModelsAsync().ConfigureAwait(true); + foreach (var agent in agents) + { + if (!string.IsNullOrWhiteSpace(agent.Name) && names.Contains(agent.Name)) + { + this._output.WriteLine($"Removing: {agent.Name} - {agent.Id}"); + await context.DeleteAssistantModelAsync(agent.Id).ConfigureAwait(true); + } + } + } + + private void DumpAgent(AssistantModel agent) + { + this._output.WriteLine($"# {agent.Id}"); + this._output.WriteLine($"# {agent.Model}"); + this._output.WriteLine($"# {agent.Instructions}"); + this._output.WriteLine($"# {agent.Name}"); + this._output.WriteLine($"# {agent.Description}{Environment.NewLine}"); + } + + private void DumpAgent(IAgent agent) + { + this._output.WriteLine($"# {agent.Id}"); + this._output.WriteLine($"# {agent.Model}"); + this._output.WriteLine($"# {agent.Instructions}"); + this._output.WriteLine($"# {agent.Name}"); + this._output.WriteLine($"# {agent.Description}{Environment.NewLine}"); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Integration/RunHarness.cs b/dotnet/src/Experimental/Agents.UnitTests/Integration/RunHarness.cs new file mode 100644 index 000000000000..bd901a472c21 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Integration/RunHarness.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft. All rights reserved. + +#define DISABLEHOST // Comment line to enable +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Agents; +using Xunit; +using Xunit.Abstractions; + +#pragma warning disable CA1812 // Uninstantiated internal types + +namespace SemanticKernel.Experimental.Agents.UnitTests.Integration; + +/// +/// Dev harness for manipulating runs. +/// +/// +/// Comment out DISABLEHOST definition to enable tests. +/// Not enabled by default. +/// +[Trait("Category", "Integration Tests")] +[Trait("Feature", "Agent")] +public sealed class RunHarness +{ +#if DISABLEHOST + private const string SkipReason = "Harness only for local/dev environment"; +#else + private const string SkipReason = null; +#endif + + private readonly ITestOutputHelper _output; + + /// + /// Test constructor. + /// + public RunHarness(ITestOutputHelper output) + { + this._output = output; + } + + /// + /// Verify creation of run. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyRunLifecycleAsync() + { + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .WithInstructions("say something funny") + .WithName("Fred") + .WithDescription("funny agent") + .BuildAsync().ConfigureAwait(true); + + var thread = await agent.NewThreadAsync().ConfigureAwait(true); + + await this.ChatAsync( + thread, + agent, + "I was on my way to the store this morning and...", + "That was great! Tell me another.").ConfigureAwait(true); + } + + /// + /// Verify creation of run. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyRunFromDefinitionAsync() + { + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .FromTemplatePath("Templates/PoetAgent.yaml") + .BuildAsync() + .ConfigureAwait(true); + + var thread = await agent.NewThreadAsync().ConfigureAwait(true); + + await this.ChatAsync( + thread, + agent, + "Eggs are yummy and beautiful geometric gems.", + "It rains a lot in Seattle.").ConfigureAwait(true); + } + + /// + /// Verify creation of run. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyFunctionLifecycleAsync() + { + var gamePlugin = KernelPluginFactory.CreateFromType(); + + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .FromTemplatePath("Templates/GameAgent.yaml") + .WithPlugin(gamePlugin) + .BuildAsync() + .ConfigureAwait(true); + + var thread = await agent.NewThreadAsync().ConfigureAwait(true); + + await this.ChatAsync( + thread, + agent, + "What is the question for the guessing game?", + "Is it 'RED'?", + "What is the answer?").ConfigureAwait(true); + } + + private async Task ChatAsync(IAgentThread thread, IAgent agent, params string[] messages) + { + foreach (var message in messages) + { + var messageUser = await thread.AddUserMessageAsync(message).ConfigureAwait(true); + this.LogMessage(messageUser); + + var agentMessages = await thread.InvokeAsync(agent).ToArrayAsync().ConfigureAwait(true); + this.LogMessages(agentMessages); + } + } + + private void LogMessages(IEnumerable messages) + { + foreach (var message in messages) + { + this.LogMessage(message); + } + } + + private void LogMessage(IChatMessage message) + { + this._output.WriteLine($"# {message.Id}"); + this._output.WriteLine($"# {message.Content}"); + this._output.WriteLine($"# {message.Role}"); + this._output.WriteLine($"# {message.AgentId}"); + } + + private sealed class GuessingGame + { + /// + /// Get the question + /// + [KernelFunction, Description("Get the guessing game question")] + public string GetQuestion() => "What color am I thinking of?"; + + /// + /// Get the answer + /// + [KernelFunction, Description("Get the answer to the guessing game question.")] + public string GetAnswer() => "Blue"; + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Integration/ThreadHarness.cs b/dotnet/src/Experimental/Agents.UnitTests/Integration/ThreadHarness.cs new file mode 100644 index 000000000000..eabfb3aa5539 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Integration/ThreadHarness.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +#define DISABLEHOST // Comment line to enable +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Agents; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.Experimental.Agents.UnitTests.Integration; + +/// +/// Dev harness for manipulating threads. +/// +/// +/// Comment out DISABLEHOST definition to enable tests. +/// Not enabled by default. +/// +[Trait("Category", "Integration Tests")] +[Trait("Feature", "Agent")] +public sealed class ThreadHarness +{ +#if DISABLEHOST + private const string SkipReason = "Harness only for local/dev environment"; +#else + private const string SkipReason = null; +#endif + + private readonly ITestOutputHelper _output; + + /// + /// Test constructor. + /// + public ThreadHarness(ITestOutputHelper output) + { + this._output = output; + } + + /// + /// Verify creation and retrieval of thread. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyThreadLifecycleAsync() + { + var agent = + await new AgentBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .WithName("DeleteMe") + .BuildAsync() + .ConfigureAwait(true); + + var thread = await agent.NewThreadAsync().ConfigureAwait(true); + + Assert.NotNull(thread.Id); + + this._output.WriteLine($"# {thread.Id}"); + + var message = await thread.AddUserMessageAsync("I'm so confused!").ConfigureAwait(true); + Assert.NotNull(message); + + this._output.WriteLine($"# {message.Id}"); + + var context = new OpenAIRestContext(AgentBuilder.OpenAIBaseUrl, TestConfig.OpenAIApiKey); + var copy = await context.GetThreadModelAsync(thread.Id).ConfigureAwait(true); + + await context.DeleteThreadModelAsync(thread.Id).ConfigureAwait(true); + + await Assert.ThrowsAsync(() => context.GetThreadModelAsync(thread.Id)).ConfigureAwait(true); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/MockExtensions.cs b/dotnet/src/Experimental/Agents.UnitTests/MockExtensions.cs new file mode 100644 index 000000000000..d8dc8a8a41ef --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/MockExtensions.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Threading; +using Moq; +using Moq.Protected; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +internal static class MockExtensions +{ + public static void VerifyMock(this Mock mockHandler, HttpMethod method, int times, string? uri = null) + { + mockHandler.Protected().Verify( + "SendAsync", + Times.Exactly(times), + ItExpr.Is(req => req.Method == method && (uri == null || req.RequestUri == new Uri(uri))), + ItExpr.IsAny()); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Templates/GameAgent.yaml b/dotnet/src/Experimental/Agents.UnitTests/Templates/GameAgent.yaml new file mode 100644 index 000000000000..1f548b665839 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Templates/GameAgent.yaml @@ -0,0 +1,4 @@ +name: Fred +template: | + Run a guessing game where the user tries to guess the answer to a question but don't tell them the answer unless they give up by asking for the answer. + diff --git a/dotnet/src/Experimental/Agents.UnitTests/Templates/PoetAgent.yaml b/dotnet/src/Experimental/Agents.UnitTests/Templates/PoetAgent.yaml new file mode 100644 index 000000000000..6bcec526ee73 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/Templates/PoetAgent.yaml @@ -0,0 +1,4 @@ +name: Poet +template: | + Compose a sonnet inspired by the user input. +description: You are a poet that composes poems based on user input. diff --git a/dotnet/src/Experimental/Agents.UnitTests/TestConfig.cs b/dotnet/src/Experimental/Agents.UnitTests/TestConfig.cs new file mode 100644 index 000000000000..e11087c02285 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/TestConfig.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using Microsoft.Extensions.Configuration; +using Xunit.Sdk; + +namespace SemanticKernel.Experimental.Agents.UnitTests; + +internal static class TestConfig +{ + public const string SupportedGpt35TurboModel = "gpt-3.5-turbo-1106"; + + public static IConfiguration Configuration { get; } = CreateConfiguration(); + + public static string OpenAIApiKey => + TestConfig.Configuration.GetValue("OpenAIApiKey") ?? + throw new TestClassException("Missing OpenAI APIKey."); + + private static IConfiguration CreateConfiguration() + { + return + new ConfigurationBuilder() + .AddEnvironmentVariables() + .AddJsonFile("testsettings.json") + .AddJsonFile("testsettings.development.json", optional: true) + .AddUserSecrets(Assembly.GetExecutingAssembly()) + .Build(); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/testsettings.json b/dotnet/src/Experimental/Agents.UnitTests/testsettings.json new file mode 100644 index 000000000000..d456a389e0f9 --- /dev/null +++ b/dotnet/src/Experimental/Agents.UnitTests/testsettings.json @@ -0,0 +1,3 @@ +{ + "OpenAIApiKey": "" +} diff --git a/dotnet/src/Experimental/Agents/AgentBuilder.cs b/dotnet/src/Experimental/Agents/AgentBuilder.cs new file mode 100644 index 000000000000..563bb1aa51b5 --- /dev/null +++ b/dotnet/src/Experimental/Agents/AgentBuilder.cs @@ -0,0 +1,319 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Fluent builder for initializing an instance. +/// +public partial class AgentBuilder +{ + internal const string OpenAIBaseUrl = "https://api.openai.com/v1"; + + private readonly AssistantModel _model; + private readonly KernelPluginCollection _plugins; + private readonly HashSet _tools; + private readonly List _fileIds; + private string? _apiKey; + private string? _endpoint; + private string? _version; + private Func? _httpClientProvider; + private PromptTemplateConfig? _config; + + /// + /// Initializes a new instance of the class. + /// + public AgentBuilder() + { + this._model = new AssistantModel(); + this._plugins = new KernelPluginCollection(); + this._tools = new HashSet(StringComparer.OrdinalIgnoreCase); + this._fileIds = new List(); + } + + /// + /// Create a instance. + /// + /// A cancellation token + /// A new instance. + public async Task BuildAsync(CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(this._model.Model)) + { + throw new AgentException("Model must be defined for agent."); + } + + if (string.IsNullOrWhiteSpace(this._apiKey)) + { + throw new AgentException("ApiKey must be provided for agent."); + } + + if (string.IsNullOrWhiteSpace(this._endpoint)) + { + throw new AgentException("Endpoint must be provided for agent."); + } + + this._model.Tools.AddRange(this._tools.Select(t => new ToolModel { Type = t })); + this._model.FileIds.AddRange(this._fileIds.Distinct(StringComparer.OrdinalIgnoreCase)); + + return + await Agent.CreateAsync( + new OpenAIRestContext(this._endpoint!, this._apiKey!, this._version, this._httpClientProvider), + this._model, + this._config, + this._plugins, + cancellationToken).ConfigureAwait(false); + } + + /// + /// Create a instance. + /// + /// The agent id to retrieve + /// A cancellation token + /// A new instance. + public async Task GetAsync(string agentId, CancellationToken cancellationToken = default) + { + Verify.NotNull(agentId, nameof(agentId)); + + if (string.IsNullOrWhiteSpace(this._apiKey)) + { + throw new AgentException("ApiKey must be provided for agent."); + } + + if (string.IsNullOrWhiteSpace(this._endpoint)) + { + throw new AgentException("Endpoint must be provided for agent."); + } + + var restContext = new OpenAIRestContext(this._endpoint!, this._apiKey!, this._version, this._httpClientProvider); + var model = await restContext.GetAssistantModelAsync(agentId, cancellationToken).ConfigureAwait(false); + + return + await Agent.CreateAsync( + restContext, + model, + this._config, + this._plugins, + cancellationToken).ConfigureAwait(false); + } + + /// + /// Define the OpenAI chat completion service (required). + /// + /// instance for fluid expression. + public AgentBuilder WithAzureOpenAIChatCompletion(string endpoint, string model, string apiKey, string? version = null) + { + this._apiKey = apiKey; + this._model.Model = model; + this._endpoint = $"{endpoint}/openai"; + this._version = version ?? "2024-02-15-preview"; + + return this; + } + + /// + /// Define the OpenAI chat completion service (required). + /// + /// instance for fluid expression. + public AgentBuilder WithOpenAIChatCompletion(string model, string apiKey) + { + this._apiKey = apiKey; + this._model.Model = model; + this._endpoint = OpenAIBaseUrl; + + return this; + } + + /// + /// Create a new agent from a yaml formatted string. + /// + /// YAML agent definition. + /// instance for fluid expression. + public AgentBuilder FromTemplate(string template) + { + this._config = KernelFunctionYaml.ToPromptTemplateConfig(template); + + this.WithInstructions(this._config.Template.Trim()); + + if (!string.IsNullOrWhiteSpace(this._config.Name)) + { + this.WithName(this._config.Name?.Trim()); + } + + if (!string.IsNullOrWhiteSpace(this._config.Description)) + { + this.WithDescription(this._config.Description?.Trim()); + } + + return this; + } + + /// + /// Create a new agent from a yaml template. + /// + /// Path to a configuration file. + /// instance for fluid expression. + public AgentBuilder FromTemplatePath(string templatePath) + { + var yamlContent = File.ReadAllText(templatePath); + + return this.FromTemplate(yamlContent); + } + + /// + /// Provide an httpclient (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithHttpClient(HttpClient httpClient) + { + this._httpClientProvider ??= () => httpClient; + + return this; + } + + /// + /// Define the agent description (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithDescription(string? description) + { + this._model.Description = description; + + return this; + } + + /// + /// Define the agent instructions (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithInstructions(string instructions) + { + this._model.Instructions = instructions; + + return this; + } + + /// + /// Define the agent metadata (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithMetadata(string key, object value) + { + this._model.Metadata[key] = value; + + return this; + } + + /// + /// Define the agent metadata (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithMetadata(IDictionary metadata) + { + foreach (var kvp in metadata) + { + this._model.Metadata[kvp.Key] = kvp.Value; + } + + return this; + } + + /// + /// Define the agent name (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithName(string? name) + { + this._model.Name = name; + + return this; + } + + /// + /// Enable the code-interpreter tool with this agent. + /// + /// instance for fluid expression. + public AgentBuilder WithCodeInterpreter() + { + this._tools.Add(Agent.ToolCodeInterpreter); + + return this; + } + + /// + /// Enable the retrieval tool with this agent. + /// + /// Optional set of uploaded file identifiers. + /// instance for fluid expression. + public AgentBuilder WithRetrieval(params string[] fileIds) + { + this._tools.Add(Agent.ToolRetrieval); + + return this.WithFiles(fileIds); + } + + /// + /// Define functions associated with agent instance (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithPlugin(KernelPlugin? plugin) + { + if (plugin != null) + { + this._plugins.Add(plugin); + } + + return this; + } + + /// + /// Define functions associated with agent instance (optional). + /// + /// instance for fluid expression. + public AgentBuilder WithPlugins(IEnumerable plugins) + { + this._plugins.AddRange(plugins); + + return this; + } + + /// + /// Associate an uploaded file with the agent, by identifier. + /// + /// The uploaded file identifier. + /// instance for fluid expression. + public AgentBuilder WithFile(string fileId) + { + if (!string.IsNullOrWhiteSpace(fileId)) + { + this._fileIds.Add(fileId); + } + + return this; + } + + /// + /// Associate uploaded files with the agent, by identifier. + /// + /// The uploaded file identifiers. + /// instance for fluid expression. + public AgentBuilder WithFiles(params string[] fileIds) + { + if (fileIds.Length > 0) + { + this._fileIds.AddRange(fileIds); + } + + return this; + } +} diff --git a/dotnet/src/Experimental/Agents/AgentCapability.cs b/dotnet/src/Experimental/Agents/AgentCapability.cs new file mode 100644 index 000000000000..66c9a815bc53 --- /dev/null +++ b/dotnet/src/Experimental/Agents/AgentCapability.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Description of agent capabilities. +/// +[Flags] +public enum AgentCapability +{ + /// + /// No additional capabilities. + /// + None = 0, + + /// + /// Has function / plugin capability. + /// + Functions, + + /// + /// Has document / data retrieval capability. + /// + Retrieval, + + /// + /// Has code-interpereter capability. + /// + CodeInterpreter, +} diff --git a/dotnet/src/Experimental/Agents/AgentPlugin.cs b/dotnet/src/Experimental/Agents/AgentPlugin.cs new file mode 100644 index 000000000000..b11deeccab6c --- /dev/null +++ b/dotnet/src/Experimental/Agents/AgentPlugin.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Specialization of for +/// +public abstract class AgentPlugin : KernelPlugin +{ + /// + protected AgentPlugin(string name, string? description = null) + : base(name, description) + { + // No specialization... + } + + internal abstract Agent Agent { get; } + + /// + /// Invoke plugin with user input + /// + /// The user input + /// A cancel token + /// The agent response + public async Task InvokeAsync(string input, CancellationToken cancellationToken = default) + { + return await this.InvokeAsync(input, arguments: null, cancellationToken).ConfigureAwait(false); + } + + /// + /// Invoke plugin with user input + /// + /// The user input + /// The arguments + /// A cancel token + /// The agent response + public async Task InvokeAsync(string input, KernelArguments? arguments, CancellationToken cancellationToken = default) + { + arguments ??= new KernelArguments(); + + arguments["input"] = input; + + var result = await this.First().InvokeAsync(this.Agent.Kernel, arguments, cancellationToken).ConfigureAwait(false); + var response = result.GetValue()!; + + return response.Message; + } +} diff --git a/dotnet/src/Experimental/Agents/AgentResponse.cs b/dotnet/src/Experimental/Agents/AgentResponse.cs new file mode 100644 index 000000000000..658656c7a0d4 --- /dev/null +++ b/dotnet/src/Experimental/Agents/AgentResponse.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Response from agent when called as a . +/// +public class AgentResponse +{ + /// + /// The thread-id for the agent conversation. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The agent response. + /// + [JsonPropertyName("response")] + public string Message { get; set; } = string.Empty; + + /// + /// Instructions from agent on next steps. + /// + [JsonPropertyName("system_instructions")] + public string Instructions { get; set; } = string.Empty; +} diff --git a/dotnet/src/Experimental/Agents/AssemblyInfo.cs b/dotnet/src/Experimental/Agents/AssemblyInfo.cs new file mode 100644 index 000000000000..951ee2d58289 --- /dev/null +++ b/dotnet/src/Experimental/Agents/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0101")] diff --git a/dotnet/src/Experimental/Agents/ChatCompletionAgent.cs b/dotnet/src/Experimental/Agents/ChatCompletionAgent.cs new file mode 100644 index 000000000000..5150c0ee218e --- /dev/null +++ b/dotnet/src/Experimental/Agents/ChatCompletionAgent.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Represent an agent that is built around the SK ChatCompletion API and leverages the API's capabilities. +/// +public sealed class ChatCompletionAgent +{ + private readonly Kernel _kernel; + private readonly string _instructions; + private readonly PromptExecutionSettings? _promptExecutionSettings; + + /// + /// Initializes a new instance of the class. + /// + /// The containing services, plugins, and other state for use by the agent. + /// The instructions for the agent. + /// The optional execution settings for the agent. If not provided, default settings will be used. + public ChatCompletionAgent(Kernel kernel, string instructions, PromptExecutionSettings? executionSettings = null) + { + Verify.NotNull(kernel, nameof(kernel)); + this._kernel = kernel; + + Verify.NotNullOrWhiteSpace(instructions, nameof(instructions)); + this._instructions = instructions; + + this._promptExecutionSettings = executionSettings; + } + + /// + /// Invokes the agent to process the given messages and generate a response. + /// + /// A list of the messages for the agent to process. + /// An optional to cancel the operation. + /// List of messages representing the agent's response. + public async Task> InvokeAsync(IReadOnlyList messages, CancellationToken cancellationToken = default) + { + var chat = new ChatHistory(this._instructions); + chat.AddRange(messages); + + var chatCompletionService = this.GetChatCompletionService(); + + var chatMessageContent = await chatCompletionService.GetChatMessageContentsAsync( + chat, + this._promptExecutionSettings, + this._kernel, + cancellationToken).ConfigureAwait(false); + + return chatMessageContent; + } + + /// + /// Resolves and returns the chat completion service. + /// + /// An instance of the chat completion service. + private IChatCompletionService GetChatCompletionService() + { + return this._kernel.GetRequiredService(); + } +} diff --git a/dotnet/src/Experimental/Agents/Exceptions/AgentException.cs b/dotnet/src/Experimental/Agents/Exceptions/AgentException.cs new file mode 100644 index 000000000000..25c60b710b11 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Exceptions/AgentException.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Exceptions; + +/// +/// Agent specific . +/// +public class AgentException : KernelException +{ + /// + /// Initializes a new instance of the class. + /// + public AgentException() + { + } + + /// + /// Initializes a new instance of the class with a specified error message. + /// + /// The error message that explains the reason for the exception. + public AgentException(string? message) : base(message) + { + } + + /// + /// Initializes a new instance of the class with a specified error message and a reference to the inner exception that is the cause of this exception. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference if no inner exception is specified. + public AgentException(string? message, Exception? innerException) : base(message, innerException) + { + } +} diff --git a/dotnet/src/Experimental/Agents/Experimental.Agents.csproj b/dotnet/src/Experimental/Agents/Experimental.Agents.csproj new file mode 100644 index 000000000000..3496b3afaf5c --- /dev/null +++ b/dotnet/src/Experimental/Agents/Experimental.Agents.csproj @@ -0,0 +1,32 @@ + + + + Microsoft.SemanticKernel.Experimental.Agents + Microsoft.SemanticKernel.Experimental.Agents + netstandard2.0 + alpha + Latest + + + + + + Semantic Kernel Agents + Semantic Kernel Agents + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelExtensions.cs b/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelExtensions.cs new file mode 100644 index 000000000000..5f2c6596a3ee --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelExtensions.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Extensions; + +internal static class AssistantsKernelExtensions +{ + /// + /// Retrieve a kernel function based on the tool name. + /// + public static KernelFunction GetAssistantTool(this Kernel kernel, string toolName) + { + string[] nameParts = toolName.Split('-'); + return nameParts.Length switch + { + 2 => kernel.Plugins.GetFunction(nameParts[0], nameParts[1]), + _ => throw new AgentException($"Unknown tool: {toolName}"), + }; + } +} diff --git a/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelFunctionExtensions.cs b/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelFunctionExtensions.cs new file mode 100644 index 000000000000..f26f33e111e4 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelFunctionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Json.More; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +internal static class AssistantsKernelFunctionExtensions +{ + /// + /// Produce a fully qualified toolname. + /// + public static string GetQualifiedName(this KernelFunction function, string pluginName) + { + return $"{pluginName}-{function.Name}"; + } + + /// + /// Convert to an OpenAI tool model. + /// + /// The source function + /// The plugin name + /// An OpenAI tool model + public static ToolModel ToToolModel(this KernelFunction function, string pluginName) + { + var metadata = function.Metadata; + var required = new List(metadata.Parameters.Count); + var properties = + metadata.Parameters.ToDictionary( + p => p.Name, + p => + { + if (p.IsRequired) + { + required.Add(p.Name); + } + + return + new OpenAIParameter + { + Type = ConvertType(p.ParameterType), + Description = p.Description, + }; + }); + + var payload = + new ToolModel + { + Type = "function", + Function = + new() + { + Name = function.GetQualifiedName(pluginName), + Description = function.Description, + Parameters = + new OpenAIParameters + { + Properties = properties, + Required = required, + }, + }, + }; + + return payload; + } + + private static string ConvertType(Type? type) + { + if (type == null || type == typeof(string)) + { + return "string"; + } + + if (type.IsNumber()) + { + return "number"; + } + + if (type.IsEnum) + { + return "enum"; + } + + return type.Name; + } +} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Assistant.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Assistant.cs new file mode 100644 index 000000000000..be19e285d684 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Assistant.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using System.Web; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Supported OpenAI REST API actions for assistants. +/// +internal static partial class OpenAIRestExtensions +{ + /// + /// Create a new assistant. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant definition + /// A cancellation token + /// An assistant definition + public static Task CreateAssistantModelAsync( + this OpenAIRestContext context, + AssistantModel model, + CancellationToken cancellationToken = default) + { + var payload = + new + { + model = model.Model, + name = model.Name, + description = model.Description, + instructions = model.Instructions, + tools = model.Tools, + file_ids = model.FileIds, + metadata = model.Metadata, + }; + + return + context.ExecutePostAsync( + context.GetAssistantsUrl(), + payload, + cancellationToken); + } + + /// + /// Retrieve an assistant by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant identifier + /// A cancellation token + /// An assistant definition + public static Task GetAssistantModelAsync( + this OpenAIRestContext context, + string assistantId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + context.GetAssistantUrl(assistantId), + cancellationToken); + } + + /// + /// Retrieve all assistants. + /// + /// A context for accessing OpenAI REST endpoint + /// A limit on the number of objects to be returned. + /// Limit can range between 1 and 100, and the default is 20. + /// Set to true to sort by ascending created_at timestamp + /// instead of descending. + /// A cursor for use in pagination. This is an object ID that defines + /// your place in the list. For instance, if you make a list request and receive 100 objects, + /// ending with obj_foo, your subsequent call can include after=obj_foo in order to + /// fetch the next page of the list. + /// A cursor for use in pagination. This is an object ID that defines + /// your place in the list. For instance, if you make a list request and receive 100 objects, + /// ending with obj_foo, your subsequent call can include before=obj_foo in order to + /// fetch the previous page of the list. + /// List of retrieved Assistants + /// A cancellation token + /// An enumeration of assistant definitions + public static async Task> ListAssistantModelsAsync( + this OpenAIRestContext context, + int limit = 20, + bool ascending = false, + string? after = null, + string? before = null, + CancellationToken cancellationToken = default) + { + var query = HttpUtility.ParseQueryString(string.Empty); + query["limit"] = limit.ToString(CultureInfo.InvariantCulture); + query["order"] = ascending ? "asc" : "desc"; + if (!string.IsNullOrWhiteSpace(after)) + { + query["after"] = after; + } + if (!string.IsNullOrWhiteSpace(before)) + { + query["before"] = before; + } + + var result = + await context.ExecuteGetAsync( + context.GetAssistantsUrl(), + query.ToString(), + cancellationToken).ConfigureAwait(false); + + return result.Data; + } + + /// + /// Delete an existing assistant + /// + /// A context for accessing OpenAI REST endpoint + /// Identifier of assistant to delete + /// A cancellation token + public static Task DeleteAssistantModelAsync( + this OpenAIRestContext context, + string id, + CancellationToken cancellationToken = default) + { + return context.ExecuteDeleteAsync(context.GetAssistantUrl(id), cancellationToken); + } + + internal static string GetAssistantsUrl(this OpenAIRestContext context) => $"{context.Endpoint}/assistants"; + + internal static string GetAssistantUrl(this OpenAIRestContext context, string assistantId) => $"{context.Endpoint}/assistants/{assistantId}"; +} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.AssistantFiles.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.AssistantFiles.cs new file mode 100644 index 000000000000..bbb31226ff67 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.AssistantFiles.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Supported OpenAI REST API actions for managing assistant files. +/// +internal static partial class OpenAIRestExtensions +{ + /// + /// Associate uploaded file with the assistant, by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant identifier + /// The identifier of the uploaded file. + /// A cancellation token + /// An assistant definition + public static async Task AddAssistantFileAsync( + this OpenAIRestContext context, + string assistantId, + string fileId, + CancellationToken cancellationToken = default) + { + var payload = + new + { + file_id = fileId + }; + + var result = + await context.ExecutePostAsync( + context.GetAssistantFileUrl(assistantId), + payload, + cancellationToken).ConfigureAwait(false); + + return result.Id; + } + + /// + /// Disassociate uploaded file with from assistant, by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant identifier + /// The identifier of the uploaded file. + /// A cancellation token + public static Task RemoveAssistantFileAsync( + this OpenAIRestContext context, + string assistantId, + string fileId, + CancellationToken cancellationToken = default) + { + return context.ExecuteDeleteAsync(context.GetAssistantFileUrl(assistantId, fileId), cancellationToken); + } + + private static string GetAssistantFileUrl(this OpenAIRestContext context, string assistantId) + { + return $"{context.GetAssistantUrl(assistantId)}/files"; + } + + private static string GetAssistantFileUrl(this OpenAIRestContext context, string assistantId, string fileId) + { + return $"{context.GetAssistantUrl(assistantId)}/files/{fileId}"; + } +} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Messages.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Messages.cs new file mode 100644 index 000000000000..a94c20de1e43 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Messages.cs @@ -0,0 +1,119 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Supported OpenAI REST API actions for thread messages. +/// +internal static partial class OpenAIRestExtensions +{ + /// + /// Create a new message. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// The message text + /// A cancellation token + /// A message definition + public static Task CreateUserTextMessageAsync( + this OpenAIRestContext context, + string threadId, + string content, + CancellationToken cancellationToken = default) + { + var payload = + new + { + role = AuthorRole.User.Label, + content, + }; + + return + context.ExecutePostAsync( + context.GetMessagesUrl(threadId), + payload, + cancellationToken); + } + + /// + /// Retrieve an message by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// The message identifier + /// A cancellation token + /// A message definition + public static Task GetMessageAsync( + this OpenAIRestContext context, + string threadId, + string messageId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + context.GetMessagesUrl(threadId, messageId), + cancellationToken); + } + + /// + /// Retrieve all thread messages. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// A cancellation token + /// A message list definition + public static Task GetMessagesAsync( + this OpenAIRestContext context, + string threadId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + context.GetMessagesUrl(threadId), + cancellationToken); + } + + /// + /// Retrieve all thread messages. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// The set of message identifiers to retrieve + /// A cancellation token + /// A message list definition + public static async Task> GetMessagesAsync( + this OpenAIRestContext context, + string threadId, + IEnumerable messageIds, + CancellationToken cancellationToken = default) + { + var tasks = + messageIds.Select( + id => + context.ExecuteGetAsync( + context.GetMessagesUrl(threadId, id), + cancellationToken)).ToArray(); + + await Task.WhenAll(tasks).ConfigureAwait(false); + + return tasks.Select(t => t.Result).ToArray(); + } + + internal static string GetMessagesUrl(this OpenAIRestContext context, string threadId) + { + return $"{context.GetThreadUrl(threadId)}/messages"; + } + + internal static string GetMessagesUrl(this OpenAIRestContext context, string threadId, string messageId) + { + return $"{context.GetThreadUrl(threadId)}/messages/{messageId}"; + } +} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Run.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Run.cs new file mode 100644 index 000000000000..12d3538ad4de --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Run.cs @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Supported OpenAI REST API actions for thread runs. +/// +internal static partial class OpenAIRestExtensions +{ + /// + /// Create a new run. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// The assistant identifier + /// Optional instruction override + /// The assistant tools + /// A cancellation token + /// A run definition + public static Task CreateRunAsync( + this OpenAIRestContext context, + string threadId, + string assistantId, + string? instructions = null, + IEnumerable? tools = null, + CancellationToken cancellationToken = default) + { + var payload = + new + { + assistant_id = assistantId, + instructions, + tools, + }; + + return + context.ExecutePostAsync( + context.GetRunsUrl(threadId), + payload, + cancellationToken); + } + + /// + /// Retrieve an run by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// A run identifier + /// A cancellation token + /// A run definition + public static Task GetRunAsync( + this OpenAIRestContext context, + string threadId, + string runId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + context.GetRunUrl(threadId, runId), + cancellationToken); + } + + /// + /// Retrieve run steps by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// A run identifier + /// A cancellation token + /// A set of run steps + public static Task GetRunStepsAsync( + this OpenAIRestContext context, + string threadId, + string runId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + context.GetRunStepsUrl(threadId, runId), + cancellationToken); + } + + /// + /// Add a function result for a run. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// The run identifier + /// The function/tool results. + /// A cancellation token + /// A run definition + public static Task AddToolOutputsAsync( + this OpenAIRestContext context, + string threadId, + string runId, + IEnumerable results, + CancellationToken cancellationToken = default) + { + var payload = + new + { + tool_outputs = results + }; + + return + context.ExecutePostAsync( + context.GetRunToolOutputUrl(threadId, runId), + payload, + cancellationToken); + } + + internal static string GetRunsUrl(this OpenAIRestContext context, string threadId) + { + return $"{context.GetThreadUrl(threadId)}/runs"; + } + + internal static string GetRunUrl(this OpenAIRestContext context, string threadId, string runId) + { + return $"{context.GetThreadUrl(threadId)}/runs/{runId}"; + } + + internal static string GetRunStepsUrl(this OpenAIRestContext context, string threadId, string runId) + { + return $"{context.GetThreadUrl(threadId)}/runs/{runId}/steps"; + } + + internal static string GetRunToolOutputUrl(this OpenAIRestContext context, string threadId, string runId) + { + return $"{context.GetThreadUrl(threadId)}/runs/{runId}/submit_tool_outputs"; + } +} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Thread.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Thread.cs new file mode 100644 index 000000000000..062fbe8ade59 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Thread.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Supported OpenAI REST API actions for threads. +/// +internal static partial class OpenAIRestExtensions +{ + /// + /// Create a new thread. + /// + /// A context for accessing OpenAI REST endpoint + /// A cancellation token + /// A thread definition + public static Task CreateThreadModelAsync( + this OpenAIRestContext context, + CancellationToken cancellationToken = default) + { + return + context.ExecutePostAsync( + context.GetThreadsUrl(), + cancellationToken); + } + + /// + /// Retrieve an thread by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// A cancellation token + /// A thread definition + public static Task GetThreadModelAsync( + this OpenAIRestContext context, + string threadId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + context.GetThreadUrl(threadId), + cancellationToken); + } + + /// + /// Delete an existing thread. + /// + /// A context for accessing OpenAI REST endpoint + /// Identifier of thread to delete + /// A cancellation token + public static Task DeleteThreadModelAsync( + this OpenAIRestContext context, + string id, + CancellationToken cancellationToken = default) + { + return context.ExecuteDeleteAsync(context.GetThreadUrl(id), cancellationToken); + } + + internal static string GetThreadsUrl(this OpenAIRestContext context) => $"{context.Endpoint}/threads"; + + internal static string GetThreadUrl(this OpenAIRestContext context, string threadId) => $"{context.Endpoint}/threads/{threadId}"; +} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs new file mode 100644 index 000000000000..081f935e416b --- /dev/null +++ b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs @@ -0,0 +1,120 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; +using Microsoft.SemanticKernel.Experimental.Agents.Internal; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +internal static partial class OpenAIRestExtensions +{ + private const string HeaderNameOpenAIAssistant = "OpenAI-Beta"; + private const string HeaderNameAuthorization = "Authorization"; + private const string HeaderOpenAIValueAssistant = "assistants=v1"; + + private static Task ExecuteGetAsync( + this OpenAIRestContext context, + string url, + CancellationToken cancellationToken = default) + { + return context.ExecuteGetAsync(url, query: null, cancellationToken); + } + + private static async Task ExecuteGetAsync( + this OpenAIRestContext context, + string url, + string? query = null, + CancellationToken cancellationToken = default) + { + using var request = HttpRequest.CreateGetRequest(context.FormatUrl(url, query)); + + request.AddHeaders(context); + + using var response = await context.GetHttpClient().SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); + + var responseBody = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + + // Common case is for failure exception to be raised by REST invocation. + // Null result is a logical possibility, but unlikely edge case. + // Might occur due to model alignment issues over time. + return + JsonSerializer.Deserialize(responseBody) ?? + throw new AgentException($"Null result processing: {typeof(TResult).Name}"); + } + + private static Task ExecutePostAsync( + this OpenAIRestContext context, + string url, + CancellationToken cancellationToken = default) + { + return context.ExecutePostAsync(url, payload: null, cancellationToken); + } + + private static async Task ExecutePostAsync( + this OpenAIRestContext context, + string url, + object? payload, + CancellationToken cancellationToken = default) + { + using var request = HttpRequest.CreatePostRequest(context.FormatUrl(url), payload); + + request.AddHeaders(context); + + using var response = await context.GetHttpClient().SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); + + var responseBody = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + + return + JsonSerializer.Deserialize(responseBody) ?? + throw new AgentException($"Null result processing: {typeof(TResult).Name}"); + } + + private static async Task ExecuteDeleteAsync( + this OpenAIRestContext context, + string url, + CancellationToken cancellationToken = default) + { + using var request = HttpRequest.CreateDeleteRequest(context.FormatUrl(url)); + + request.AddHeaders(context); + + using var response = await context.GetHttpClient().SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); + } + + private static void AddHeaders(this HttpRequestMessage request, OpenAIRestContext context) + { + if (context.HasVersion) + { + // OpenAI + request.Headers.Add("api-key", context.ApiKey); + } + + // Azure OpenAI + request.Headers.Add(HeaderNameAuthorization, $"Bearer {context.ApiKey}"); + request.Headers.Add(HeaderNameOpenAIAssistant, HeaderOpenAIValueAssistant); + } + + private static string FormatUrl( + this OpenAIRestContext context, + string url, + string? query = null) + { + var hasQuery = !string.IsNullOrWhiteSpace(query); + var delimiter = hasQuery ? "?" : string.Empty; + + if (!context.HasVersion) + { + // OpenAI + return $"{url}{delimiter}{query}"; + } + + // Azure OpenAI + var delimiterB = hasQuery ? "&" : "?"; + + return $"{url}{delimiter}{query}{delimiterB}api-version={context.Version}"; + } +} diff --git a/dotnet/src/Experimental/Agents/IAgent.cs b/dotnet/src/Experimental/Agents/IAgent.cs new file mode 100644 index 000000000000..69a0e9272756 --- /dev/null +++ b/dotnet/src/Experimental/Agents/IAgent.cs @@ -0,0 +1,120 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Represents an agent that can call the model and use tools. +/// +public interface IAgent +{ + /// + /// The agent identifier (which can be referenced in API endpoints). + /// + string Id { get; } + + /// + /// Identifies additional agent capabilities. + /// + AgentCapability Capabilities { get; } + + /// + /// Unix timestamp (in seconds) for when the agent was created + /// + long CreatedAt { get; } + + /// + /// Name of the agent + /// + string? Name { get; } + + /// + /// The description of the agent + /// + string? Description { get; } + + /// + /// ID of the model to use + /// + string Model { get; } + + /// + /// The system instructions that the agent uses + /// + string Instructions { get; } + + /// + /// Identifiers of files associated with agent. + /// + IEnumerable FileIds { get; } + + /// + /// Tools defined for run execution. + /// + KernelPluginCollection Plugins { get; } + + /// + /// A semantic-kernel instance associated with the agent. + /// + internal Kernel Kernel { get; } + + /// + /// Internal tools model. + /// + internal IEnumerable Tools { get; } + + /// + /// Expose the agent as a plugin. + /// + AgentPlugin AsPlugin(); + + /// + /// Expose the agent internally as a prompt-template + /// + internal IPromptTemplate AsPromptTemplate(); + + /// + /// Creates a new agent chat thread. + /// + /// A cancellation token + Task NewThreadAsync(CancellationToken cancellationToken = default); + + /// + /// Gets an existing agent chat thread. + /// + /// The id of the existing chat thread. + /// A cancellation token + Task GetThreadAsync(string id, CancellationToken cancellationToken = default); + + /// + /// Deletes an existing agent chat thread. + /// + /// The id of the existing chat thread. Allows for null-fallthrough to simplify caller patterns. + /// A cancellation token + Task DeleteThreadAsync(string? id, CancellationToken cancellationToken = default); + + /// + /// Associate uploaded file with the agent, by identifier. + /// + /// The identifier of the uploaded file. + /// A cancellation token + Task AddFileAsync(string fileId, CancellationToken cancellationToken = default); + + /// + /// Remove association of uploaded file with the agent, by identifier. + /// + /// The identifier of the uploaded file. + /// A cancellation token + Task RemoveFileAsync(string fileId, CancellationToken cancellationToken = default); + + /// + /// Delete current agent. Terminal state - Unable to perform any + /// subsequent actions. + /// + /// A cancellation token + Task DeleteAsync(CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Experimental/Agents/IAgentExtensions.cs b/dotnet/src/Experimental/Agents/IAgentExtensions.cs new file mode 100644 index 000000000000..5488fc91f663 --- /dev/null +++ b/dotnet/src/Experimental/Agents/IAgentExtensions.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Convenience actions for . +/// +public static class IAgentExtensions +{ + /// + /// Invoke agent with user input + /// + /// the agent + /// the user input + /// Optional arguments for parameterized instructions + /// Optional cancellation token + /// Chat messages + public static async IAsyncEnumerable InvokeAsync( + this IAgent agent, + string input, + KernelArguments? arguments = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + IAgentThread thread = await agent.NewThreadAsync(cancellationToken).ConfigureAwait(false); + try + { + await foreach (var message in thread.InvokeAsync(agent, input, arguments, cancellationToken)) + { + yield return message; + } + } + finally + { + await thread.DeleteAsync(cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/dotnet/src/Experimental/Agents/IAgentThread.cs b/dotnet/src/Experimental/Agents/IAgentThread.cs new file mode 100644 index 000000000000..a61b46e62b8f --- /dev/null +++ b/dotnet/src/Experimental/Agents/IAgentThread.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Represents a thread that contains messages. +/// +public interface IAgentThread +{ + /// + /// The thread identifier (which can be referenced in API endpoints). + /// + string Id { get; } + + /// + /// Add a textual user message to the thread. + /// + /// The user message + /// A cancellation token + /// + Task AddUserMessageAsync(string message, CancellationToken cancellationToken = default); + + /// + /// Advance the thread with the specified agent. + /// + /// An agent instance. + /// Optional arguments for parameterized instructions + /// A cancellation token + /// The resulting agent message(s) + IAsyncEnumerable InvokeAsync(IAgent agent, KernelArguments? arguments = null, CancellationToken cancellationToken = default); + + /// + /// Advance the thread with the specified agent. + /// + /// An agent instance. + /// The user message + /// Optional arguments for parameterized instructions + /// A cancellation token + /// The resulting agent message(s) + IAsyncEnumerable InvokeAsync(IAgent agent, string userMessage, KernelArguments? arguments = null, CancellationToken cancellationToken = default); + + /// + /// Delete current thread. Terminal state - Unable to perform any + /// subsequent actions. + /// + /// A cancellation token + Task DeleteAsync(CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Experimental/Agents/IChatMessage.cs b/dotnet/src/Experimental/Agents/IChatMessage.cs new file mode 100644 index 000000000000..e722f73df3ec --- /dev/null +++ b/dotnet/src/Experimental/Agents/IChatMessage.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Collections.ObjectModel; + +namespace Microsoft.SemanticKernel.Experimental.Agents; + +/// +/// Represents a message that is part of an agent thread. +/// +public interface IChatMessage +{ + /// + /// The message identifier (which can be referenced in API endpoints). + /// + string Id { get; } + + /// + /// The id of the agent associated with the a message where role = "agent", otherwise null. + /// + string? AgentId { get; } + + /// + /// The chat message content. + /// + string Content { get; } + + /// + /// The role associated with the chat message. + /// + string Role { get; } + + /// + /// Annotations associated with the message. + /// + IList Annotations { get; } + + /// + /// Properties associated with the message. + /// + ReadOnlyDictionary Properties { get; } + + /// + /// Defines message annotation. + /// + interface IAnnotation + { + /// + /// The file identifier. + /// + string FileId { get; } + + /// + /// The text in the message content that needs to be replaced. + /// + string Label { get; } + + /// + /// The citation. + /// + string? Quote { get; } + + /// + /// Start index of the citation. + /// + int StartIndex { get; } + + /// + /// End index of the citation. + /// + int EndIndex { get; } + } +} diff --git a/dotnet/src/Experimental/Agents/Internal/Agent.cs b/dotnet/src/Experimental/Agents/Internal/Agent.cs new file mode 100644 index 000000000000..ea1230689743 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Internal/Agent.cs @@ -0,0 +1,312 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; +using Microsoft.SemanticKernel.Experimental.Agents.Models; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; + +/// +/// Represents an agent that can call the model and use tools. +/// +internal sealed class Agent : IAgent +{ + public const string ToolCodeInterpreter = "code_interpreter"; + public const string ToolRetrieval = "retrieval"; + + /// + public string Id => this._model.Id; + + /// + public Kernel Kernel { get; } + + /// + public KernelPluginCollection Plugins => this.Kernel.Plugins; + + /// + public AgentCapability Capabilities { get; } + + /// + public long CreatedAt => this._model.CreatedAt; + + /// + public string? Name => this._model.Name; + + /// + public string? Description => this._model.Description; + + /// + public string Model => this._model.Model; + + /// + public string Instructions => this._model.Instructions; + + /// + public IEnumerable Tools => this._tools; + + /// + public IEnumerable FileIds => this._fileIds.AsEnumerable(); + + private static readonly Regex s_removeInvalidCharsRegex = new("[^0-9A-Za-z-]"); + private static readonly Dictionary s_templateFactories = + new(StringComparer.OrdinalIgnoreCase) + { + { PromptTemplateConfig.SemanticKernelTemplateFormat, new KernelPromptTemplateFactory() }, + { HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, new HandlebarsPromptTemplateFactory() }, + }; + + private readonly OpenAIRestContext _restContext; + private readonly AssistantModel _model; + private readonly IPromptTemplate _promptTemplate; + private readonly ToolModel[] _tools; + private readonly HashSet _fileIds; + + private AgentPlugin? _agentPlugin; + private bool _isDeleted; + + /// + /// Create a new agent. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant definition + /// The template config + /// Plugins to initialize as agent tools + /// A cancellation token + /// An initialized instance. + public static async Task CreateAsync( + OpenAIRestContext restContext, + AssistantModel assistantModel, + PromptTemplateConfig? config, + IEnumerable? plugins = null, + CancellationToken cancellationToken = default) + { + var resultModel = await restContext.CreateAssistantModelAsync(assistantModel, cancellationToken).ConfigureAwait(false); + + return new Agent(resultModel, config, restContext, plugins); + } + + /// + /// Initializes a new instance of the class. + /// + internal Agent( + AssistantModel assistantModel, + PromptTemplateConfig? config, + OpenAIRestContext restContext, + IEnumerable? plugins = null) + { + config ??= + new PromptTemplateConfig + { + Name = assistantModel.Name, + Description = assistantModel.Description, + Template = assistantModel.Instructions, + }; + + this._model = assistantModel; + this._restContext = restContext; + this._promptTemplate = this.DefinePromptTemplate(config); + this._fileIds = new HashSet(assistantModel.FileIds, StringComparer.OrdinalIgnoreCase); + + IKernelBuilder builder = Kernel.CreateBuilder(); + + this.Kernel = + Kernel + .CreateBuilder() + .AddOpenAIChatCompletion(this._model.Model, this._restContext.ApiKey) + .Build(); + + if (plugins is not null) + { + this.Kernel.Plugins.AddRange(plugins); + } + + this.Capabilities = + (this.Kernel.Plugins.Count > 0 ? AgentCapability.Functions : AgentCapability.None) | + (this._model.Tools.Any(t => string.Equals(t.Type, ToolRetrieval, StringComparison.OrdinalIgnoreCase)) ? AgentCapability.Retrieval : AgentCapability.None) | + (this._model.Tools.Any(t => string.Equals(t.Type, ToolCodeInterpreter, StringComparison.OrdinalIgnoreCase)) ? AgentCapability.CodeInterpreter : AgentCapability.None); + + this._tools = this._model.Tools.Concat(this.Kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolModel(p.Name)))).ToArray(); + } + + public AgentPlugin AsPlugin() => this._agentPlugin ??= this.DefinePlugin(); + + public IPromptTemplate AsPromptTemplate() => this._promptTemplate; + + /// + public Task NewThreadAsync(CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + return ChatThread.CreateAsync(this._restContext, cancellationToken); + } + + /// + public Task GetThreadAsync(string id, CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + return ChatThread.GetAsync(this._restContext, id, cancellationToken); + } + + /// + public async Task DeleteThreadAsync(string? id, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(id)) + { + return; + } + + await this._restContext.DeleteThreadModelAsync(id!, cancellationToken).ConfigureAwait(false); + } + + /// + public async Task AddFileAsync(string fileId, CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + return; + } + + if (this._fileIds.Contains(fileId)) + { + return; + } + + await this._restContext.AddAssistantFileAsync(this.Id, fileId, cancellationToken).ConfigureAwait(false); + + this._fileIds.Add(fileId); + } + + /// + public async Task RemoveFileAsync(string fileId, CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + return; + } + + if (!this._fileIds.Contains(fileId)) + { + return; + } + + await this._restContext.RemoveAssistantFileAsync(this.Id, fileId, cancellationToken).ConfigureAwait(false); + + this._fileIds.Remove(fileId); + } + + /// + public async Task DeleteAsync(CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + return; + } + + await this._restContext.DeleteAssistantModelAsync(this.Id, cancellationToken).ConfigureAwait(false); + this._isDeleted = true; + } + + /// + /// Marshal thread run through interface. + /// + /// The user input + /// Arguments for parameterized instructions + /// A cancellation token. + /// An agent response ( + private async Task AskAsync( + [Description("The user message provided to the agent.")] + string input, + KernelArguments arguments, + CancellationToken cancellationToken = default) + { + var thread = await this.NewThreadAsync(cancellationToken).ConfigureAwait(false); + try + { + await thread.AddUserMessageAsync(input, cancellationToken).ConfigureAwait(false); + + var messages = await thread.InvokeAsync(this, input, arguments, cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + var response = + new AgentResponse + { + ThreadId = thread.Id, + Message = string.Join(Environment.NewLine, messages.Select(m => m.Content)), + }; + + return response; + } + finally + { + await thread.DeleteAsync(cancellationToken).ConfigureAwait(false); + } + } + + private AgentPluginImpl DefinePlugin() + { + var functionAsk = KernelFunctionFactory.CreateFromMethod(this.AskAsync, description: this.Description); + + return new AgentPluginImpl(this, functionAsk); + } + + private IPromptTemplate DefinePromptTemplate(PromptTemplateConfig config) + { + if (!s_templateFactories.TryGetValue(config.TemplateFormat, out var factory)) + { + factory = new KernelPromptTemplateFactory(); + } + + return factory.Create(config); + } + + private void ThrowIfDeleted() + { + if (this._isDeleted) + { + throw new AgentException($"{nameof(Agent)}: {this.Id} has been deleted."); + } + } + + private sealed class AgentPluginImpl : AgentPlugin + { + public KernelFunction FunctionAsk { get; } + + internal override Agent Agent { get; } + + public override int FunctionCount => 1; + + private static readonly string s_functionName = nameof(Agent.AskAsync).Substring(0, nameof(AgentPluginImpl.Agent.AskAsync).Length - 5); + + public AgentPluginImpl(Agent agent, KernelFunction functionAsk) + : base(s_removeInvalidCharsRegex.Replace(agent.Name ?? agent.Id, string.Empty), + agent.Description ?? agent.Instructions) + { + this.Agent = agent; + this.FunctionAsk = functionAsk; + } + + public override IEnumerator GetEnumerator() + { + yield return this.FunctionAsk; + } + + public override bool TryGetFunction(string name, [NotNullWhen(true)] out KernelFunction? function) + { + function = null; + + if (s_functionName.Equals(name, StringComparison.OrdinalIgnoreCase)) + { + function = this.FunctionAsk; + } + + return function != null; + } + } +} diff --git a/dotnet/src/Experimental/Agents/Internal/ChatMessage.cs b/dotnet/src/Experimental/Agents/Internal/ChatMessage.cs new file mode 100644 index 000000000000..78f2256053a2 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Internal/ChatMessage.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using Microsoft.SemanticKernel.Experimental.Agents.Models; +using static Microsoft.SemanticKernel.Experimental.Agents.IChatMessage; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; + +/// +/// Represents a message that is part of an agent thread. +/// +internal sealed class ChatMessage : IChatMessage +{ + /// + public string Id { get; } + + /// + public string? AgentId { get; } + + /// + public string Content { get; } + + /// + public string Role { get; } + + /// + public ReadOnlyDictionary Properties { get; } + + public IList Annotations { get; } + + /// + /// Initializes a new instance of the class. + /// + internal ChatMessage(ThreadMessageModel model) + { + var content = model.Content.First(); + var text = content.Text?.Value ?? string.Empty; + this.Annotations = content.Text!.Annotations.Select(a => new Annotation(a.Text, a.StartIndex, a.EndIndex, a.FileCitation?.FileId ?? a.FilePath!.FileId, a.FileCitation?.Quote)).ToArray(); + + this.Id = model.Id; + this.AgentId = string.IsNullOrWhiteSpace(model.AgentId) ? null : model.AgentId; + this.Role = model.Role; + this.Content = text; + this.Properties = new ReadOnlyDictionary(model.Metadata); + } + + private class Annotation : IAnnotation + { + public Annotation(string label, int startIndex, int endIndex, string fileId, string? quote) + { + this.FileId = fileId; + this.Label = label; + this.Quote = quote; + this.StartIndex = startIndex; + this.EndIndex = endIndex; + } + + /// + public string FileId { get; } + + /// + public string Label { get; } + + /// + public string? Quote { get; } + + /// + public int StartIndex { get; } + + /// + public int EndIndex { get; } + } +} diff --git a/dotnet/src/Experimental/Agents/Internal/ChatRun.cs b/dotnet/src/Experimental/Agents/Internal/ChatRun.cs new file mode 100644 index 000000000000..23a54ca7e4ce --- /dev/null +++ b/dotnet/src/Experimental/Agents/Internal/ChatRun.cs @@ -0,0 +1,192 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; +using Microsoft.SemanticKernel.Experimental.Agents.Extensions; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; + +/// +/// Represents an execution run on a thread. +/// +internal sealed class ChatRun +{ + /// + public string Id => this._model.Id; + + /// + public string AgentId => this._model.AgentId; + + /// + public string ThreadId => this._model.ThreadId; + + private const string ActionState = "requires_action"; + private const string FailedState = "failed"; + private const string CompletedState = "completed"; + private static readonly TimeSpan s_pollingInterval = TimeSpan.FromMilliseconds(500); + private static readonly TimeSpan s_pollingBackoff = TimeSpan.FromSeconds(1); + + private static readonly HashSet s_pollingStates = + new(StringComparer.OrdinalIgnoreCase) + { + "queued", + "in_progress", + }; + + private readonly OpenAIRestContext _restContext; + private readonly Kernel _kernel; + + private ThreadRunModel _model; + + /// + public async IAsyncEnumerable GetResultAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + // Poll until actionable + await PollRunStatus().ConfigureAwait(false); + + // Retrieve steps + var processedMessageIds = new HashSet(); + var steps = await this._restContext.GetRunStepsAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); + + do + { + // Is tool action required? + if (ActionState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)) + { + // Execute functions in parallel and post results at once. + var tasks = steps.Data.SelectMany(step => this.ExecuteStep(step, cancellationToken)).ToArray(); + await Task.WhenAll(tasks).ConfigureAwait(false); + + var results = tasks.Select(t => t.Result).ToArray(); + await this._restContext.AddToolOutputsAsync(this.ThreadId, this.Id, results, cancellationToken).ConfigureAwait(false); + + // Refresh run as it goes back into pending state after posting function results. + await PollRunStatus(force: true).ConfigureAwait(false); + + // Refresh steps to retrieve additional messages. + steps = await this._restContext.GetRunStepsAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); + } + + // Did fail? + if (FailedState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)) + { + throw new AgentException($"Unexpected failure processing run: {this.Id}: {this._model.LastError?.Message ?? "Unknown"}"); + } + + var newMessageIds = + steps.Data + .Where(s => s.StepDetails.MessageCreation != null) + .Select(s => (s.StepDetails.MessageCreation!.MessageId, s.CompletedAt)) + .Where(t => !processedMessageIds.Contains(t.MessageId)) + .OrderBy(t => t.CompletedAt) + .Select(t => t.MessageId); + + foreach (var messageId in newMessageIds) + { + processedMessageIds.Add(messageId); + yield return messageId; + } + } + while (!CompletedState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)); + + async Task PollRunStatus(bool force = false) + { + int count = 0; + + // Ignore model status when forced. + while (force || s_pollingStates.Contains(this._model.Status)) + { + if (!force) + { + // Reduce polling frequency after a couple attempts + await Task.Delay(count >= 2 ? s_pollingInterval : s_pollingBackoff, cancellationToken).ConfigureAwait(false); + ++count; + } + + force = false; + + try + { + this._model = await this._restContext.GetRunAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); + } + catch (Exception exception) when (!exception.IsCriticalException()) + { + // Retry anyway.. + } + } + } + } + + /// + /// Initializes a new instance of the class. + /// + internal ChatRun( + ThreadRunModel model, + Kernel kernel, + OpenAIRestContext restContext) + { + this._model = model; + this._kernel = kernel; + this._restContext = restContext; + } + + private IEnumerable> ExecuteStep(ThreadRunStepModel step, CancellationToken cancellationToken) + { + // Process all of the steps that require action + if (step.Status == "in_progress" && step.StepDetails.Type == "tool_calls") + { + foreach (var toolCall in step.StepDetails.ToolCalls) + { + // Run function + yield return this.ProcessFunctionStepAsync(toolCall.Id, toolCall.Function, cancellationToken); + } + } + } + + private async Task ProcessFunctionStepAsync(string callId, ThreadRunStepModel.FunctionDetailsModel functionDetails, CancellationToken cancellationToken) + { + var result = await InvokeFunctionCallAsync().ConfigureAwait(false); + var toolResult = result as string; + if (toolResult == null) + { + toolResult = JsonSerializer.Serialize(result); + } + + return + new ToolResultModel + { + CallId = callId, + Output = toolResult!, + }; + + async Task InvokeFunctionCallAsync() + { + var function = this._kernel.GetAssistantTool(functionDetails.Name); + + var functionArguments = new KernelArguments(); + if (!string.IsNullOrWhiteSpace(functionDetails.Arguments)) + { + var arguments = JsonSerializer.Deserialize>(functionDetails.Arguments)!; + foreach (var argument in arguments) + { + functionArguments[argument.Key] = argument.Value.ToString(); + } + } + + var result = await function.InvokeAsync(this._kernel, functionArguments, cancellationToken).ConfigureAwait(false); + if (result.ValueType == typeof(AgentResponse)) + { + return result.GetValue()!; + } + + return result.GetValue() ?? string.Empty; + } + } +} diff --git a/dotnet/src/Experimental/Agents/Internal/ChatThread.cs b/dotnet/src/Experimental/Agents/Internal/ChatThread.cs new file mode 100644 index 000000000000..48ffdfa8b74c --- /dev/null +++ b/dotnet/src/Experimental/Agents/Internal/ChatThread.cs @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; +using Microsoft.SemanticKernel.Experimental.Agents.Models; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; + +/// +/// Represents a thread that contains messages. +/// +internal sealed class ChatThread : IAgentThread +{ + /// + public string Id { get; private set; } + + private readonly OpenAIRestContext _restContext; + private bool _isDeleted; + + /// + /// Create a new thread. + /// + /// A context for accessing OpenAI REST endpoint + /// A cancellation token + /// An initialized instance. + public static async Task CreateAsync(OpenAIRestContext restContext, CancellationToken cancellationToken = default) + { + // Common case is for failure exception to be raised by REST invocation. Null result is a logical possibility, but unlikely edge case. + var threadModel = await restContext.CreateThreadModelAsync(cancellationToken).ConfigureAwait(false); + + return new ChatThread(threadModel, messageListModel: null, restContext); + } + + /// + /// Retrieve an existing thread. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// A cancellation token + /// An initialized instance. + public static async Task GetAsync(OpenAIRestContext restContext, string threadId, CancellationToken cancellationToken = default) + { + var threadModel = await restContext.GetThreadModelAsync(threadId, cancellationToken).ConfigureAwait(false); + var messageListModel = await restContext.GetMessagesAsync(threadId, cancellationToken).ConfigureAwait(false); + + return new ChatThread(threadModel, messageListModel, restContext); + } + + /// + public async Task AddUserMessageAsync(string message, CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + var messageModel = + await this._restContext.CreateUserTextMessageAsync( + this.Id, + message, + cancellationToken).ConfigureAwait(false); + + return new ChatMessage(messageModel); + } + + /// + public IAsyncEnumerable InvokeAsync(IAgent agent, KernelArguments? arguments = null, CancellationToken cancellationToken = default) + { + return this.InvokeAsync(agent, string.Empty, arguments, cancellationToken); + } + + /// + public async IAsyncEnumerable InvokeAsync(IAgent agent, string userMessage, KernelArguments? arguments = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + if (!string.IsNullOrWhiteSpace(userMessage)) + { + yield return await this.AddUserMessageAsync(userMessage, cancellationToken).ConfigureAwait(false); + } + + // Finalize prompt / agent instructions using provided parameters. + var instructions = await agent.AsPromptTemplate().RenderAsync(agent.Kernel, arguments, cancellationToken).ConfigureAwait(false); + + // Create run using templated prompt + var runModel = await this._restContext.CreateRunAsync(this.Id, agent.Id, instructions, agent.Tools, cancellationToken).ConfigureAwait(false); + var run = new ChatRun(runModel, agent.Kernel, this._restContext); + + await foreach (var messageId in run.GetResultAsync(cancellationToken).ConfigureAwait(false)) + { + var message = await this._restContext.GetMessageAsync(this.Id, messageId, cancellationToken).ConfigureAwait(false); + yield return new ChatMessage(message); + } + } + + /// + /// Delete an existing thread. + /// + /// A cancellation token + public async Task DeleteAsync(CancellationToken cancellationToken) + { + if (this._isDeleted) + { + return; + } + + await this._restContext.DeleteThreadModelAsync(this.Id, cancellationToken).ConfigureAwait(false); + this._isDeleted = true; + } + + /// + /// Initializes a new instance of the class. + /// + private ChatThread( + ThreadModel threadModel, + ThreadMessageListModel? messageListModel, + OpenAIRestContext restContext) + { + this.Id = threadModel.Id; + this._restContext = restContext; + } + + private void ThrowIfDeleted() + { + if (this._isDeleted) + { + throw new AgentException($"{nameof(ChatThread)}: {this.Id} has been deleted."); + } + } +} diff --git a/dotnet/src/Experimental/Agents/Internal/OpenAIRestContext.cs b/dotnet/src/Experimental/Agents/Internal/OpenAIRestContext.cs new file mode 100644 index 000000000000..343c8c90a1ab --- /dev/null +++ b/dotnet/src/Experimental/Agents/Internal/OpenAIRestContext.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; + +/// +/// Placeholder context. +/// +internal sealed class OpenAIRestContext +{ + private static readonly HttpClient s_defaultOpenAIClient = new(); + + /// + /// The service API key. + /// + public string ApiKey { get; } + + /// + /// The service endpoint. + /// + public string Endpoint { get; } + + /// + /// Is the version defined? + /// + public bool HasVersion { get; } + + /// + /// The optional API version. + /// + public string? Version { get; } + + /// + /// Accessor for the http client. + /// + public HttpClient GetHttpClient() => this._clientFactory.Invoke(); + + private readonly Func _clientFactory; + + /// + /// Initializes a new instance of the class. + /// + public OpenAIRestContext(string endpoint, string apiKey, Func? clientFactory = null) + : this(endpoint, apiKey, version: null, clientFactory) + { + // Nothing to do... + } + + /// + /// Initializes a new instance of the class. + /// + public OpenAIRestContext(string endpoint, string apiKey, string? version, Func? clientFactory = null) + { + this._clientFactory = clientFactory ??= () => s_defaultOpenAIClient; + + this.ApiKey = apiKey; + this.Endpoint = endpoint; + this.HasVersion = !string.IsNullOrEmpty(version); + this.Version = version; + } +} diff --git a/dotnet/src/Experimental/Agents/Models/AssistantModel.cs b/dotnet/src/Experimental/Agents/Models/AssistantModel.cs new file mode 100644 index 000000000000..b7320433dcca --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/AssistantModel.cs @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// list of run steps belonging to a run. +/// +internal sealed class AssistantListModel : OpenAIListModel +{ + // No specialization +} + +/// +/// Model of Assistant data returned from OpenAI +/// +internal sealed record AssistantModel +{ + /// + /// Identifier, which can be referenced in API endpoints + /// + [JsonPropertyName("id")] + public string Id { get; init; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the assistant was created + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; init; } + + /// + /// Name of the assistant + /// + [JsonPropertyName("name")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Name { get; set; } + + /// + /// The description of the assistant + /// + [JsonPropertyName("description")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Description { get; set; } + + /// + /// ID of the model to use + /// + [JsonPropertyName("model")] + public string Model { get; set; } = string.Empty; + + /// + /// The system instructions that the assistant uses + /// + [JsonPropertyName("instructions")] + public string Instructions { get; set; } = string.Empty; + + /// + /// A list of tool enabled on the assistant + /// There can be a maximum of 128 tools per assistant. + /// + [JsonPropertyName("tools")] + public List Tools { get; init; } = new List(); + + /// + /// A list of file IDs attached to this assistant. + /// There can be a maximum of 20 files attached to the assistant. + /// + [JsonPropertyName("file_ids")] + public List FileIds { get; init; } = new List(); + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a + /// maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; init; } = new Dictionary(); + + /// + /// Assistant file model. + /// + public sealed class FileModel + { + /// + /// ID of the assistant. + /// + [JsonPropertyName("assistant_id")] + public string AssistantId { get; set; } = string.Empty; + + /// + /// ID of the uploaded file. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the assistant was created + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; init; } + } +} diff --git a/dotnet/src/Experimental/Agents/Models/OpenAIListModel.cs b/dotnet/src/Experimental/Agents/Models/OpenAIListModel.cs new file mode 100644 index 000000000000..1425bb3543d2 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/OpenAIListModel.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// list of run steps belonging to a run. +/// +internal abstract class OpenAIListModel +{ + /// + /// List of steps. + /// + [JsonPropertyName("data")] + public List Data { get; set; } = new List(); + + /// + /// The identifier of the first data record. + /// + [JsonPropertyName("first_id")] + public string FirstId { get; set; } = string.Empty; + + /// + /// The identifier of the last data record. + /// + [JsonPropertyName("last_id")] + public string LastId { get; set; } = string.Empty; + + /// + /// Indicates of more pages of data exist. + /// + [JsonPropertyName("has_more")] + public bool HasMore { get; set; } +} diff --git a/dotnet/src/Experimental/Agents/Models/OpenAIParameters.cs b/dotnet/src/Experimental/Agents/Models/OpenAIParameters.cs new file mode 100644 index 000000000000..f87f3aec84c1 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/OpenAIParameters.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// Wrapper for parameter map. +/// +internal sealed class OpenAIParameters +{ + /// + /// Empty parameter set. + /// + public static readonly OpenAIParameters Empty = new(); + + /// + /// Always "object" + /// + [JsonPropertyName("type")] + public string Type { get; set; } = "object"; + + /// + /// Set of parameters. + /// + [JsonPropertyName("properties")] + public Dictionary Properties { get; set; } = new(); + + /// + /// Set of parameters. + /// + [JsonPropertyName("required")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public List? Required { get; set; } +} + +/// +/// Wrapper for parameter definition. +/// +internal sealed class OpenAIParameter +{ + /// + /// The parameter type. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = "object"; + + /// + /// The parameter description. + /// + [JsonPropertyName("description")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Description { get; set; } +} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadMessageModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadMessageModel.cs new file mode 100644 index 000000000000..de6a650d463c --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/ThreadMessageModel.cs @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 +#pragma warning disable CA1852 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// list of run steps belonging to a run. +/// +internal sealed class ThreadMessageListModel : OpenAIListModel +{ + // No specialization +} + +/// +/// Represents a message within a thread. +/// +internal sealed class ThreadMessageModel +{ + /// + /// Identifier, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the message was created. + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; set; } + + /// + /// The thread ID that this message belongs to. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The entity that produced the message. One of "user" or "agent". + /// + [JsonPropertyName("role")] + public string Role { get; set; } = string.Empty; + + /// + /// The content of the message in array of text and/or images. + /// + [JsonPropertyName("content")] + public List Content { get; set; } = new List(); + + /// + /// A list of file IDs that the agent should use. + /// + [JsonPropertyName("file_ids")] + public List FileIds { get; set; } = new List(); + + /// + /// If applicable, the ID of the agent that authored this message. + /// + [JsonPropertyName("agent_id")] + public string AgentId { get; set; } = string.Empty; + + /// + /// If applicable, the ID of the run associated with the authoring of this message. + /// + [JsonPropertyName("run_id")] + public string RunId { get; set; } = string.Empty; + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. Keys can be a maximum of 64 + /// characters long and values can be a maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; set; } = new Dictionary(); + + /// + /// Representa contents within a message. + /// + public sealed class ContentModel + { + /// + /// Type of content. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// Text context. + /// + [JsonPropertyName("text")] + public TextContentModel? Text { get; set; } + } + + /// + /// Text content. + /// + public sealed class TextContentModel + { + /// + /// The text itself. + /// + [JsonPropertyName("value")] + public string Value { get; set; } = string.Empty; + + /// + /// Any annotations on the text. + /// + [JsonPropertyName("annotations")] + public List Annotations { get; set; } = new List(); + } + + public sealed class TextAnnotationModel + { + /// + /// Type of content. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// The text of the citation-label text in the message content that can be replaced/reformatted. + /// + [JsonPropertyName("text")] + public string Text { get; set; } = string.Empty; + + /// + /// Annotation when type == "file_citation" + /// + [JsonPropertyName("file_citation")] + public TextFileCitationModel? FileCitation { get; set; } + + /// + /// Annotation when type == "file_path" + /// + [JsonPropertyName("file_path")] + public TextFilePathModel? FilePath { get; set; } + + /// + /// Start index of the citation. + /// + [JsonPropertyName("start_index")] + public int StartIndex { get; set; } + + /// + /// End index of the citation. + /// + [JsonPropertyName("end_index")] + public int EndIndex { get; set; } + } + + public sealed class TextFileCitationModel + { + /// + /// The file identifier. + /// + [JsonPropertyName("file_id")] + public string FileId { get; set; } = string.Empty; + + /// + /// The citation. + /// + [JsonPropertyName("quote")] + public string Quote { get; set; } = string.Empty; + } + + public sealed class TextFilePathModel + { + /// + /// The file identifier. + /// + [JsonPropertyName("file_id")] + public string FileId { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadModel.cs new file mode 100644 index 000000000000..85570cb76d36 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/ThreadModel.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// Model of Thread data returned from OpenAI +/// +internal sealed class ThreadModel +{ + /// + /// Identifier, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// The Unix timestamp (in seconds) for when the thread was created. + /// + [JsonPropertyName("created_at")] + public int CreatedAt { get; set; } + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. Keys can be a maximum of 64 + /// characters long and values can be a maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; set; } = new Dictionary(); +} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadRunModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadRunModel.cs new file mode 100644 index 000000000000..6312f9c1b4b6 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/ThreadRunModel.cs @@ -0,0 +1,131 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// Represents an execution run on a thread. +/// +internal sealed class ThreadRunModel +{ + /// + /// Identifier, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the run was created. + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; set; } + + /// + /// ID of the agent used for execution of this run. + /// + [JsonPropertyName("agent_id")] + public string AgentId { get; set; } = string.Empty; + + /// + /// ID of the thread that was executed on as a part of this run. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The status of the run, which can be one of: + /// queued, in_progress, requires_action, cancelling, cancelled, failed, completed, or expired. + /// + [JsonPropertyName("status")] + public string Status { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the run was started. + /// + [JsonPropertyName("started_at")] + public long? StartedAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run will expire. + /// + [JsonPropertyName("expires_at")] + public long? ExpiresAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run was cancelled. + /// + [JsonPropertyName("cancelled_at")] + public long? CancelledAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run failed. + /// + [JsonPropertyName("failed_at")] + public long? FailedAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run was completed. + /// + [JsonPropertyName("completed_at")] + public long? CompletedAt { get; set; } + + /// + /// The last error associated with this run. Will be null if there are no errors. + /// + [JsonPropertyName("last_error")] + public ErrorModel? LastError { get; set; } + + /// + /// The model that the agent used for this run. + /// + [JsonPropertyName("model")] + public string Model { get; set; } = string.Empty; + + /// + /// The instructions that the agent used for this run. + /// + [JsonPropertyName("instructions")] + public string Instructions { get; set; } = string.Empty; + + /// + /// The list of tools that the agent used for this run. + /// + [JsonPropertyName("tools")] + public List Tools { get; set; } = new List(); + + /// + /// The list of File IDs the agent used for this run. + /// + [JsonPropertyName("file_ids")] + public List FileIds { get; set; } = new List(); + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. Keys can be a maximum of 64 + /// characters long and values can be a maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; set; } = new Dictionary(); + + /// + /// Run error information. + /// + public sealed class ErrorModel + { + /// + /// Error code. + /// + [JsonPropertyName("code")] + public string Code { get; set; } = string.Empty; + + /// + /// Error message. + /// + [JsonPropertyName("message")] + public string Message { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadRunStepModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadRunStepModel.cs new file mode 100644 index 000000000000..5ad9418f55e3 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/ThreadRunStepModel.cs @@ -0,0 +1,196 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// list of run steps belonging to a run. +/// +internal sealed class ThreadRunStepListModel : OpenAIListModel +{ + // No specialization +} + +/// +/// Step in a run on a thread. +/// +internal sealed class ThreadRunStepModel +{ + /// + /// Identifier of the run step, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the run step was created. + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; set; } + + /// + /// The ID of the run to which the run step belongs. + /// + [JsonPropertyName("run_id")] + public string RunId { get; set; } = string.Empty; + + /// + /// ID of the agent associated with the run step. + /// + [JsonPropertyName("agent_id")] + public string AgentId { get; set; } = string.Empty; + + /// + /// The ID of the thread to which the run and run step belongs. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The type of run step, which can be either message_creation or tool_calls. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// The status of the run step, which can be one of: + /// in_progress, cancelled, failed, completed, or expired. + /// + [JsonPropertyName("status")] + public string Status { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the run step was cancelled. + /// + [JsonPropertyName("cancelled_at")] + public long? CancelledAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run step completed. + /// + [JsonPropertyName("completed_at")] + public long? CompletedAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run step expired. + /// A step is considered expired if the parent run is expired. + /// + [JsonPropertyName("expired_at")] + public long? ExpiredAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run step failed. + /// + [JsonPropertyName("failed_at")] + public long? FailedAt { get; set; } + + /// + /// The last error associated with this run step. Will be null if there are no errors. + /// + [JsonPropertyName("last_error")] + public string LastError { get; set; } = string.Empty; + + /// + /// The details of the run step. + /// + [JsonPropertyName("step_details")] + public StepDetailsModel StepDetails { get; set; } = StepDetailsModel.Empty; + + /// + /// Details of a run step. + /// + public sealed class StepDetailsModel + { + /// + /// Empty definition + /// + public static StepDetailsModel Empty = new(); + + /// + /// Type of detail. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// Details of the message creation by the run step. + /// + [JsonPropertyName("message_creation")] + public MessageCreationDetailsModel? MessageCreation { get; set; } + + /// + /// Details of tool calls. + /// + [JsonPropertyName("tool_calls")] + public ToolCallsDetailsModel[] ToolCalls { get; set; } = Array.Empty(); + } + + /// + /// Message creation details. + /// + public sealed class MessageCreationDetailsModel + { + /// + /// ID of the message that was created by this run step. + /// + [JsonPropertyName("message_id")] + public string MessageId { get; set; } = string.Empty; + } + + /// + /// Tool call details. + /// + public sealed class ToolCallsDetailsModel + { + /// + /// ID of the tool call. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// The type of tool call. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// The definition of the function that was called. + /// + [JsonPropertyName("function")] + public FunctionDetailsModel Function { get; set; } = FunctionDetailsModel.Empty; + } + + /// + /// Function call details. + /// + public sealed class FunctionDetailsModel + { + /// + /// Empty definition + /// + public static FunctionDetailsModel Empty = new(); + + /// + /// The name of the function. + /// + [JsonPropertyName("name")] + public string Name { get; set; } = string.Empty; + + /// + /// The arguments passed to the function. + /// + [JsonPropertyName("arguments")] + public string Arguments { get; set; } = string.Empty; + + /// + /// The output of the function. + /// This will be null if the outputs have not been submitted yet. + /// + [JsonPropertyName("output")] + public string Output { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/Experimental/Agents/Models/ToolModel.cs b/dotnet/src/Experimental/Agents/Models/ToolModel.cs new file mode 100644 index 000000000000..d23dd0bf647d --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/ToolModel.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +/// +/// Tool entry +/// +internal sealed record ToolModel +{ + /// + /// Type of tool to have at agent's disposition + /// + [JsonPropertyName("type")] + public string Type { get; init; } = string.Empty; + + /// + /// The function definition for Type = 'function'. + /// + [JsonPropertyName("function")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public FunctionModel? Function { get; init; } + + /// + /// Defines the function when ToolModel.Type == 'function'. + /// + public sealed record FunctionModel + { + /// + /// The function name. + /// + [JsonPropertyName("name")] + public string Name { get; init; } = string.Empty; + + /// + /// The function description. + /// + [JsonPropertyName("description")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Description { get; init; } + + /// + /// The function description. + /// + [JsonPropertyName("parameters")] + public OpenAIParameters Parameters { get; init; } = OpenAIParameters.Empty; + } +} diff --git a/dotnet/src/Experimental/Agents/Models/ToolResultModel.cs b/dotnet/src/Experimental/Agents/Models/ToolResultModel.cs new file mode 100644 index 000000000000..3c4ef062cf70 --- /dev/null +++ b/dotnet/src/Experimental/Agents/Models/ToolResultModel.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 +#pragma warning disable CA1852 + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Agents.Models; + +internal sealed class ToolResultModel +{ + private static readonly object s_placeholder = new(); + + /// + /// The tool call identifier. + /// + [JsonPropertyName("tool_call_id")] + public string CallId { get; set; } = string.Empty; + + /// + /// The tool output + /// + [JsonPropertyName("output")] + public object Output { get; set; } = s_placeholder; +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Experimental.Assistants.UnitTests.csproj b/dotnet/src/Experimental/Assistants.UnitTests/Experimental.Assistants.UnitTests.csproj new file mode 100644 index 000000000000..bdd40950b402 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Experimental.Assistants.UnitTests.csproj @@ -0,0 +1,49 @@ + + + SemanticKernel.Experimental.Assistants.UnitTests + SemanticKernel.Experimental.Assistants.UnitTests + net6.0 + LatestMajor + true + enable + disable + false + CS1591;SKEXP0101 + + + + + + + + + + + + + + + + + + all + + + all + + + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + PreserveNewest + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Extensions/KernelExtensionTests.cs b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/KernelExtensionTests.cs new file mode 100644 index 000000000000..d30e849edbcc --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/KernelExtensionTests.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; +using Microsoft.SemanticKernel.Experimental.Assistants.Extensions; +using Xunit; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Assistant")] +public sealed class KernelExtensionTests +{ + private const string TwoPartToolName = "Fake-Bogus"; + + [Fact] + public static void InvokeTwoPartTool() + { + //Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => { }, functionName: "Bogus"); + + var kernel = new Kernel(); + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("Fake", "Fake functions", new[] { function })); + + //Act + var tool = kernel.GetAssistantTool(TwoPartToolName); + + //Assert + Assert.NotNull(tool); + Assert.Equal("Bogus", tool.Name); + } + + [Theory] + [InlineData("Bogus")] + [InlineData("i-am-not-valid")] + public static void InvokeInvalidSinglePartTool(string toolName) + { + //Arrange + var kernel = new Kernel(); + + //Act & Assert + Assert.Throws(() => kernel.GetAssistantTool(toolName)); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Extensions/KernelFunctionExtensionTests.cs b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/KernelFunctionExtensionTests.cs new file mode 100644 index 000000000000..576ce977667a --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/KernelFunctionExtensionTests.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Xunit; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Assistant")] +public sealed class KernelFunctionExtensionTests +{ + private const string ToolName = "Bogus"; + private const string PluginName = "Fake"; + + [Fact] + public static void GetTwoPartName() + { + var function = KernelFunctionFactory.CreateFromMethod(() => true, ToolName); + + string qualifiedName = function.GetQualifiedName(PluginName); + + Assert.Equal($"{PluginName}-{ToolName}", qualifiedName); + } + + [Fact] + public static void GetToolModelFromFunction() + { + const string FunctionDescription = "Bogus description"; + const string RequiredParamName = "required"; + const string OptionalParamName = "optional"; + + var requiredParam = new KernelParameterMetadata("required") { IsRequired = true }; + var optionalParam = new KernelParameterMetadata("optional"); + var parameters = new List { requiredParam, optionalParam }; + var function = KernelFunctionFactory.CreateFromMethod(() => true, ToolName, FunctionDescription, parameters); + + var toolModel = function.ToToolModel(PluginName); + var properties = toolModel.Function?.Parameters.Properties; + var required = toolModel.Function?.Parameters.Required; + + Assert.Equal("function", toolModel.Type); + Assert.Equal($"{PluginName}-{ToolName}", toolModel.Function?.Name); + Assert.Equal(FunctionDescription, toolModel.Function?.Description); + Assert.Equal(2, properties?.Count); + Assert.True(properties?.ContainsKey(RequiredParamName)); + Assert.True(properties?.ContainsKey(OptionalParamName)); + Assert.Equal(1, required?.Count ?? 0); + Assert.True(required?.Contains(RequiredParamName) ?? false); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs new file mode 100644 index 000000000000..87a12e538646 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Assistant")] +public sealed class OpenAIRestExtensionsAssistantTests +{ + private const string BogusApiKey = "bogus"; + private const string TestAssistantId = "assistantId"; + + private readonly AssistantModel _assistantModel = new(); + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsAssistantTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateAssistantModelAsync() + { + await this._restContext.CreateAssistantModelAsync(this._assistantModel).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, OpenAIRestExtensions.BaseAssistantUrl); + } + + [Fact] + public async Task GetAssistantModelAsync() + { + await this._restContext.GetAssistantModelAsync(TestAssistantId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, OpenAIRestExtensions.GetAssistantUrl(TestAssistantId)); + } + + [Fact] + public async Task ListAssistantModelsAsync() + { + await this._restContext.ListAssistantModelsAsync(10, false, "20").ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, $"{OpenAIRestExtensions.BaseAssistantUrl}?limit=10&order=desc&after=20"); + } + + [Fact] + public async Task DeleteAssistantModelAsync() + { + await this._restContext.DeleteAssistantModelAsync(TestAssistantId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Delete, 1, OpenAIRestExtensions.GetAssistantUrl(TestAssistantId)); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs new file mode 100644 index 000000000000..4168daace491 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Assistant")] +public sealed class OpenAIRestExtensionsMessagesTests +{ + private const string BogusApiKey = "bogus"; + private const string TestThreadId = "threadId"; + private const string TestMessageId = "msgId"; + private const string TestContent = "Blah blah"; + + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsMessagesTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateMessageModelAsync() + { + await this._restContext.CreateUserTextMessageAsync(TestThreadId, TestContent).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, OpenAIRestExtensions.GetMessagesUrl(TestThreadId)); + } + + [Fact] + public async Task GetMessageModelAsync() + { + await this._restContext.GetMessageAsync(TestThreadId, TestMessageId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, OpenAIRestExtensions.GetMessagesUrl(TestThreadId, TestMessageId)); + } + + [Fact] + public async Task GetMessageModelsAsync() + { + await this._restContext.GetMessagesAsync(TestThreadId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, OpenAIRestExtensions.GetMessagesUrl(TestThreadId)); + } + + [Fact] + public async Task GetSpecificMessageModelsAsync() + { + var messageIDs = new string[] { "1", "2", "3" }; + + await this._restContext.GetMessagesAsync(TestThreadId, messageIDs).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, messageIDs.Length); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs new file mode 100644 index 000000000000..41ef1ed510ad --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Assistant")] +public sealed class OpenAIRestExtensionsRunTests +{ + private const string BogusApiKey = "bogus"; + private const string TestAssistantId = "assistantId"; + private const string TestThreadId = "threadId"; + private const string TestRunId = "runId"; + + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsRunTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateRunAsync() + { + await this._restContext.CreateRunAsync(TestThreadId, TestAssistantId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, OpenAIRestExtensions.GetRunUrl(TestThreadId)); + } + + [Fact] + public async Task GetRunAsync() + { + await this._restContext.GetRunAsync(TestThreadId, TestRunId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, OpenAIRestExtensions.GetRunUrl(TestThreadId, TestRunId)); + } + + [Fact] + public async Task GetRunStepsAsync() + { + await this._restContext.GetRunStepsAsync(TestThreadId, TestRunId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, OpenAIRestExtensions.GetRunStepsUrl(TestThreadId, TestRunId)); + } + + [Fact] + public async Task AddToolOutputsAsync() + { + var toolResults = Array.Empty(); + + await this._restContext.AddToolOutputsAsync(TestThreadId, TestRunId, toolResults).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, OpenAIRestExtensions.GetRunToolOutput(TestThreadId, TestRunId)); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs new file mode 100644 index 000000000000..042eafee48aa --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Moq; +using Moq.Protected; +using Xunit; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +[Trait("Category", "Unit Tests")] +[Trait("Feature", "Assistant")] +public sealed class OpenAIRestExtensionsThreadTests +{ + private const string BogusApiKey = "bogus"; + private const string TestThreadId = "threadId"; + + private readonly OpenAIRestContext _restContext; + private readonly Mock _mockHttpMessageHandler = new(); + + public OpenAIRestExtensionsThreadTests() + { + this._mockHttpMessageHandler + .Protected() + .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); + this._restContext = new(BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); + } + + [Fact] + public async Task CreateThreadModelAsync() + { + await this._restContext.CreateThreadModelAsync().ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, OpenAIRestExtensions.BaseThreadUrl); + } + + [Fact] + public async Task GetThreadModelAsync() + { + await this._restContext.GetThreadModelAsync(TestThreadId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, OpenAIRestExtensions.GetThreadUrl(TestThreadId)); + } + + [Fact] + public async Task DeleteThreadModelAsync() + { + await this._restContext.DeleteThreadModelAsync(TestThreadId).ConfigureAwait(true); + + this._mockHttpMessageHandler.VerifyMock(HttpMethod.Delete, 1, OpenAIRestExtensions.GetThreadUrl(TestThreadId)); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Integration/AssistantHarness.cs b/dotnet/src/Experimental/Assistants.UnitTests/Integration/AssistantHarness.cs new file mode 100644 index 000000000000..11e32b85effe --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Integration/AssistantHarness.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft. All rights reserved. + +#define DISABLEHOST // Comment line to enable +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.Experimental.Assistants.UnitTests.Integration; + +/// +/// Dev harness for manipulating assistants. +/// +/// +/// Comment out DISABLEHOST definition to enable tests. +/// Not enabled by default. +/// +[Trait("Category", "Integration Tests")] +[Trait("Feature", "Assistant")] +public sealed class AssistantHarness +{ +#if DISABLEHOST + private const string SkipReason = "Harness only for local/dev environment"; +#else + private const string SkipReason = null; +#endif + + private readonly ITestOutputHelper _output; + + /// + /// Test constructor. + /// + public AssistantHarness(ITestOutputHelper output) + { + this._output = output; + } + + /// + /// Verify creation and retrieval of assistant. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAssistantLifecycleAsync() + { + var assistant = + await AssistantBuilder.NewAsync( + apiKey: TestConfig.OpenAIApiKey, + model: TestConfig.SupportedGpt35TurboModel, + instructions: "say something funny", + name: "Fred", + description: "test assistant").ConfigureAwait(true); + + this.DumpAssistant(assistant); + + var copy = + await AssistantBuilder.GetAssistantAsync( + apiKey: TestConfig.OpenAIApiKey, + assistantId: assistant.Id).ConfigureAwait(true); + + this.DumpAssistant(copy); + } + + /// + /// Verify creation and retrieval of assistant. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAssistantDefinitionAsync() + { + var assistant = + await new AssistantBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .FromTemplatePath("Templates/PoetAssistant.yaml") + .BuildAsync() + .ConfigureAwait(true); + + this.DumpAssistant(assistant); + + var copy = + await AssistantBuilder.GetAssistantAsync( + apiKey: TestConfig.OpenAIApiKey, + assistantId: assistant.Id).ConfigureAwait(true); + + this.DumpAssistant(copy); + } + + /// + /// Verify creation and retrieval of assistant. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAssistantListAsync() + { + var context = new OpenAIRestContext(TestConfig.OpenAIApiKey); + var assistants = await context.ListAssistantModelsAsync().ConfigureAwait(true); + foreach (var assistant in assistants) + { + this.DumpAssistant(assistant); + } + } + + /// + /// Verify creation and retrieval of assistant. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyAssistantDeleteAsync() + { + var names = + new HashSet(StringComparer.OrdinalIgnoreCase) + { + "Fred", + "Barney", + "DeleteMe", + "Poet", + "Math Tutor", + }; + + var context = new OpenAIRestContext(TestConfig.OpenAIApiKey); + var assistants = await context.ListAssistantModelsAsync().ConfigureAwait(true); + foreach (var assistant in assistants) + { + if (!string.IsNullOrWhiteSpace(assistant.Name) && names.Contains(assistant.Name)) + { + this._output.WriteLine($"Removing: {assistant.Name} - {assistant.Id}"); + await context.DeleteAssistantModelAsync(assistant.Id).ConfigureAwait(true); + } + } + } + + private void DumpAssistant(AssistantModel assistant) + { + this._output.WriteLine($"# {assistant.Id}"); + this._output.WriteLine($"# {assistant.Model}"); + this._output.WriteLine($"# {assistant.Instructions}"); + this._output.WriteLine($"# {assistant.Name}"); + this._output.WriteLine($"# {assistant.Description}{Environment.NewLine}"); + } + + private void DumpAssistant(IAssistant assistant) + { + this._output.WriteLine($"# {assistant.Id}"); + this._output.WriteLine($"# {assistant.Model}"); + this._output.WriteLine($"# {assistant.Instructions}"); + this._output.WriteLine($"# {assistant.Name}"); + this._output.WriteLine($"# {assistant.Description}{Environment.NewLine}"); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Integration/RunHarness.cs b/dotnet/src/Experimental/Assistants.UnitTests/Integration/RunHarness.cs new file mode 100644 index 000000000000..b2d9c1403b8d --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Integration/RunHarness.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft. All rights reserved. + +#define DISABLEHOST // Comment line to enable +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Xunit; +using Xunit.Abstractions; + +#pragma warning disable CA1812 // Uninstantiated internal types + +namespace SemanticKernel.Experimental.Assistants.UnitTests.Integration; + +/// +/// Dev harness for manipulating runs. +/// +/// +/// Comment out DISABLEHOST definition to enable tests. +/// Not enabled by default. +/// +[Trait("Category", "Integration Tests")] +[Trait("Feature", "Assistant")] +public sealed class RunHarness +{ +#if DISABLEHOST + private const string SkipReason = "Harness only for local/dev environment"; +#else + private const string SkipReason = null; +#endif + + private readonly ITestOutputHelper _output; + + /// + /// Test constructor. + /// + public RunHarness(ITestOutputHelper output) + { + this._output = output; + } + + /// + /// Verify creation of run. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyRunLifecycleAsync() + { + var assistant = + await AssistantBuilder.NewAsync( + apiKey: TestConfig.OpenAIApiKey, + model: TestConfig.SupportedGpt35TurboModel, + instructions: "say something funny", + name: "Fred", + description: "funny assistant").ConfigureAwait(true); + + var thread = await assistant.NewThreadAsync().ConfigureAwait(true); + + await this.ChatAsync( + thread, + assistant, + "I was on my way to the store this morning and...", + "That was great! Tell me another.").ConfigureAwait(true); + } + + /// + /// Verify creation of run. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyRunFromDefinitionAsync() + { + var assistant = + await new AssistantBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .FromTemplatePath("Templates/PoetAssistant.yaml") + .BuildAsync() + .ConfigureAwait(true); + + var thread = await assistant.NewThreadAsync().ConfigureAwait(true); + + await this.ChatAsync( + thread, + assistant, + "Eggs are yummy and beautiful geometric gems.", + "It rains a lot in Seattle.").ConfigureAwait(true); + } + + /// + /// Verify creation of run. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyFunctionLifecycleAsync() + { + var gamePlugin = KernelPluginFactory.CreateFromType(); + + var assistant = + await new AssistantBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .FromTemplatePath("Templates/GameAssistant.yaml") + .WithPlugin(gamePlugin) + .BuildAsync() + .ConfigureAwait(true); + + var thread = await assistant.NewThreadAsync().ConfigureAwait(true); + + await this.ChatAsync( + thread, + assistant, + "What is the question for the guessing game?", + "Is it 'RED'?", + "What is the answer?").ConfigureAwait(true); + } + + private async Task ChatAsync(IChatThread thread, IAssistant assistant, params string[] messages) + { + foreach (var message in messages) + { + var messageUser = await thread.AddUserMessageAsync(message).ConfigureAwait(true); + this.LogMessage(messageUser); + + var assistantMessages = await thread.InvokeAsync(assistant).ToArrayAsync().ConfigureAwait(true); + this.LogMessages(assistantMessages); + } + } + + private void LogMessages(IEnumerable messages) + { + foreach (var message in messages) + { + this.LogMessage(message); + } + } + + private void LogMessage(IChatMessage message) + { + this._output.WriteLine($"# {message.Id}"); + this._output.WriteLine($"# {message.Content}"); + this._output.WriteLine($"# {message.Role}"); + this._output.WriteLine($"# {message.AssistantId}"); + } + + private sealed class GuessingGame + { + /// + /// Get the question + /// + [KernelFunction, Description("Get the guessing game question")] + public string GetQuestion() => "What color am I thinking of?"; + + /// + /// Get the answer + /// + [KernelFunction, Description("Get the answer to the guessing game question.")] + public string GetAnswer() => "Blue"; + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Integration/ThreadHarness.cs b/dotnet/src/Experimental/Assistants.UnitTests/Integration/ThreadHarness.cs new file mode 100644 index 000000000000..0546bbd43a30 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Integration/ThreadHarness.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +#define DISABLEHOST // Comment line to enable +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Assistants; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.Experimental.Assistants.UnitTests.Integration; + +/// +/// Dev harness for manipulating threads. +/// +/// +/// Comment out DISABLEHOST definition to enable tests. +/// Not enabled by default. +/// +[Trait("Category", "Integration Tests")] +[Trait("Feature", "Assistant")] +public sealed class ThreadHarness +{ +#if DISABLEHOST + private const string SkipReason = "Harness only for local/dev environment"; +#else + private const string SkipReason = null; +#endif + + private readonly ITestOutputHelper _output; + + /// + /// Test constructor. + /// + public ThreadHarness(ITestOutputHelper output) + { + this._output = output; + } + + /// + /// Verify creation and retrieval of thread. + /// + [Fact(Skip = SkipReason)] + public async Task VerifyThreadLifecycleAsync() + { + var assistant = + await new AssistantBuilder() + .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) + .WithName("DeleteMe") + .BuildAsync() + .ConfigureAwait(true); + + var thread = await assistant.NewThreadAsync().ConfigureAwait(true); + + Assert.NotNull(thread.Id); + + this._output.WriteLine($"# {thread.Id}"); + + var message = await thread.AddUserMessageAsync("I'm so confused!").ConfigureAwait(true); + Assert.NotNull(message); + + this._output.WriteLine($"# {message.Id}"); + + var context = new OpenAIRestContext(TestConfig.OpenAIApiKey); + var copy = await context.GetThreadModelAsync(thread.Id).ConfigureAwait(true); + + await context.DeleteThreadModelAsync(thread.Id).ConfigureAwait(true); + + await Assert.ThrowsAsync(() => context.GetThreadModelAsync(thread.Id)).ConfigureAwait(true); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/MockExtensions.cs b/dotnet/src/Experimental/Assistants.UnitTests/MockExtensions.cs new file mode 100644 index 000000000000..5f2e147e7375 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/MockExtensions.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Threading; +using Moq; +using Moq.Protected; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +internal static class MockExtensions +{ + public static void VerifyMock(this Mock mockHandler, HttpMethod method, int times, string? uri = null) + { + mockHandler.Protected().Verify( + "SendAsync", + Times.Exactly(times), + ItExpr.Is(req => req.Method == method && (uri == null || req.RequestUri == new Uri(uri))), + ItExpr.IsAny()); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Templates/GameAssistant.yaml b/dotnet/src/Experimental/Assistants.UnitTests/Templates/GameAssistant.yaml new file mode 100644 index 000000000000..dce10488e8a2 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Templates/GameAssistant.yaml @@ -0,0 +1,4 @@ +name: Fred +instructions: | + Run a guessing game where the user tries to guess the answer to a question but don't tell them the answer unless they give up by asking for the answer. + diff --git a/dotnet/src/Experimental/Assistants.UnitTests/Templates/PoetAssistant.yaml b/dotnet/src/Experimental/Assistants.UnitTests/Templates/PoetAssistant.yaml new file mode 100644 index 000000000000..7e356ddd61f7 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/Templates/PoetAssistant.yaml @@ -0,0 +1,4 @@ +name: Poet +instructions: | + Compose a sonnet inspired by the user input. +description: You are a poet that composes poems based on user input. diff --git a/dotnet/src/Experimental/Assistants.UnitTests/TestConfig.cs b/dotnet/src/Experimental/Assistants.UnitTests/TestConfig.cs new file mode 100644 index 000000000000..30263aa7737c --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/TestConfig.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using Microsoft.Extensions.Configuration; +using Xunit.Sdk; + +namespace SemanticKernel.Experimental.Assistants.UnitTests; + +internal static class TestConfig +{ + public const string SupportedGpt35TurboModel = "gpt-3.5-turbo-1106"; + + public static IConfiguration Configuration { get; } = CreateConfiguration(); + + public static string OpenAIApiKey => + TestConfig.Configuration.GetValue("OpenAIApiKey") ?? + throw new TestClassException("Missing OpenAI APIKey."); + + private static IConfiguration CreateConfiguration() + { + return + new ConfigurationBuilder() + .AddEnvironmentVariables() + .AddJsonFile("testsettings.json") + .AddJsonFile("testsettings.development.json", optional: true) + .AddUserSecrets(Assembly.GetExecutingAssembly()) + .Build(); + } +} diff --git a/dotnet/src/Experimental/Assistants.UnitTests/testsettings.json b/dotnet/src/Experimental/Assistants.UnitTests/testsettings.json new file mode 100644 index 000000000000..d456a389e0f9 --- /dev/null +++ b/dotnet/src/Experimental/Assistants.UnitTests/testsettings.json @@ -0,0 +1,3 @@ +{ + "OpenAIApiKey": "" +} diff --git a/dotnet/src/Experimental/Assistants/AssemblyInfo.cs b/dotnet/src/Experimental/Assistants/AssemblyInfo.cs new file mode 100644 index 000000000000..951ee2d58289 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0101")] diff --git a/dotnet/src/Experimental/Assistants/AssistantBuilder.Static.cs b/dotnet/src/Experimental/Assistants/AssistantBuilder.Static.cs new file mode 100644 index 000000000000..8b52b43bb5c9 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/AssistantBuilder.Static.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Context for interacting with OpenAI REST API. +/// +public partial class AssistantBuilder +{ + /// + /// Create a new assistant. + /// + /// The OpenAI API key + /// The assistant chat model (required) + /// The assistant instructions (required) + /// The assistant name (optional) + /// The assistant description(optional) + /// The requested . + public static async Task NewAsync( + string apiKey, + string model, + string instructions, + string? name = null, + string? description = null) + { + return + await new AssistantBuilder() + .WithOpenAIChatCompletion(model, apiKey) + .WithInstructions(instructions) + .WithName(name) + .WithDescription(description) + .BuildAsync().ConfigureAwait(false); + } + + /// + /// Retrieve an existing assistant, by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant identifier + /// Plugins to initialize as assistant tools + /// A cancellation token + /// An initialized instance. + public static async Task GetAssistantAsync( + string apiKey, + string assistantId, + IEnumerable? plugins = null, + CancellationToken cancellationToken = default) + { + var restContext = new OpenAIRestContext(apiKey); + var resultModel = await restContext.GetAssistantModelAsync(assistantId, cancellationToken).ConfigureAwait(false); + + return new Assistant(resultModel, restContext, plugins); + } +} diff --git a/dotnet/src/Experimental/Assistants/AssistantBuilder.cs b/dotnet/src/Experimental/Assistants/AssistantBuilder.cs new file mode 100644 index 000000000000..1dc37b4ac126 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/AssistantBuilder.cs @@ -0,0 +1,196 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Fluent builder for initializing an instance. +/// +public partial class AssistantBuilder +{ + private readonly AssistantModel _model; + private readonly KernelPluginCollection _plugins; + + private string? _apiKey; + private Func? _httpClientProvider; + + /// + /// Initializes a new instance of the class. + /// + public AssistantBuilder() + { + this._model = new AssistantModel(); + this._plugins = new KernelPluginCollection(); + } + + /// + /// Create a instance. + /// + /// A cancellation token + /// A new instance. + public async Task BuildAsync(CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(this._model.Model)) + { + throw new AssistantException("Model must be defined for assistant."); + } + + if (string.IsNullOrWhiteSpace(this._apiKey)) + { + throw new AssistantException("ApiKey must be provided for assistant."); + } + + return + await Assistant.CreateAsync( + new OpenAIRestContext(this._apiKey!, this._httpClientProvider), + this._model, + this._plugins, + cancellationToken).ConfigureAwait(false); + } + + /// + /// Define the OpenAI chat completion service (required). + /// + /// instance for fluid expression. + public AssistantBuilder WithOpenAIChatCompletion(string model, string apiKey) + { + this._apiKey = apiKey; + this._model.Model = model; + + return this; + } + + /// + /// Create a new assistant from a yaml formatted string. + /// + /// YAML assistant definition. + /// instance for fluid expression. + public AssistantBuilder FromTemplate(string template) + { + var deserializer = new DeserializerBuilder().Build(); + + var assistantKernelModel = deserializer.Deserialize(template); + + return + this + .WithInstructions(assistantKernelModel.Instructions.Trim()) + .WithName(assistantKernelModel.Name.Trim()) + .WithDescription(assistantKernelModel.Description.Trim()); + } + + /// + /// Create a new assistant from a yaml template. + /// + /// Path to a configuration file. + /// instance for fluid expression. + public AssistantBuilder FromTemplatePath(string templatePath) + { + var yamlContent = File.ReadAllText(templatePath); + + return this.FromTemplate(yamlContent); + } + + /// + /// Provide an httpclient (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithHttpClient(HttpClient httpClient) + { + this._httpClientProvider ??= () => httpClient; + + return this; + } + + /// + /// Define the assistant description (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithDescription(string? description) + { + this._model.Description = description; + + return this; + } + + /// + /// Define the assistant instructions (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithInstructions(string instructions) + { + this._model.Instructions = instructions; + + return this; + } + + /// + /// Define the assistant metadata (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithMetadata(string key, object value) + { + this._model.Metadata[key] = value; + + return this; + } + + /// + /// Define the assistant metadata (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithMetadata(IDictionary metadata) + { + foreach (var kvp in metadata) + { + this._model.Metadata[kvp.Key] = kvp.Value; + } + + return this; + } + + /// + /// Define the assistant name (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithName(string? name) + { + this._model.Name = name; + + return this; + } + + /// + /// Define functions associated with assistant instance (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithPlugin(KernelPlugin? plugin) + { + if (plugin != null) + { + this._plugins.Add(plugin); + } + + return this; + } + + /// + /// Define functions associated with assistant instance (optional). + /// + /// instance for fluid expression. + public AssistantBuilder WithPlugins(IEnumerable plugins) + { + this._plugins.AddRange(plugins); + + return this; + } +} diff --git a/dotnet/src/Experimental/Assistants/AssistantPlugin.cs b/dotnet/src/Experimental/Assistants/AssistantPlugin.cs new file mode 100644 index 000000000000..26d9cfae88f7 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/AssistantPlugin.cs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Specialization of for +/// +public abstract class AssistantPlugin : KernelPlugin +{ + /// + protected AssistantPlugin(string name, string? description = null) + : base(name, description) + { + // No specialization... + } + + internal abstract Assistant Assistant { get; } + + /// + /// Invoke plugin with user input + /// + /// The user input + /// A cancel token + /// The assistant response + public async Task InvokeAsync(string input, CancellationToken cancellationToken = default) + { + var args = new KernelArguments { { "input", input } }; + var result = await this.First().InvokeAsync(this.Assistant.Kernel, args, cancellationToken).ConfigureAwait(false); + var response = result.GetValue()!; + + return response.Message; + } +} diff --git a/dotnet/src/Experimental/Assistants/AssistantResponse.cs b/dotnet/src/Experimental/Assistants/AssistantResponse.cs new file mode 100644 index 000000000000..c92ce3662fb2 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/AssistantResponse.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Response from assistant when called as a . +/// +public class AssistantResponse +{ + /// + /// The thread-id for the assistant conversation. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The assistant response. + /// + [JsonPropertyName("response")] + public string Message { get; set; } = string.Empty; + + /// + /// Instructions from assistant on next steps. + /// + [JsonPropertyName("system_instructions")] + public string Instructions { get; set; } = string.Empty; +} diff --git a/dotnet/src/Experimental/Assistants/Exceptions/AssistantException.cs b/dotnet/src/Experimental/Assistants/Exceptions/AssistantException.cs new file mode 100644 index 000000000000..2f3057b11543 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Exceptions/AssistantException.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; + +/// +/// Assistant specific . +/// +public class AssistantException : KernelException +{ + /// + /// Initializes a new instance of the class. + /// + public AssistantException() + { + } + + /// + /// Initializes a new instance of the class with a specified error message. + /// + /// The error message that explains the reason for the exception. + public AssistantException(string? message) : base(message) + { + } + + /// + /// Initializes a new instance of the class with a specified error message and a reference to the inner exception that is the cause of this exception. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference if no inner exception is specified. + public AssistantException(string? message, Exception? innerException) : base(message, innerException) + { + } +} diff --git a/dotnet/src/Experimental/Assistants/Experimental.Assistants.csproj b/dotnet/src/Experimental/Assistants/Experimental.Assistants.csproj new file mode 100644 index 000000000000..2edcf111ad0f --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Experimental.Assistants.csproj @@ -0,0 +1,31 @@ + + + + Microsoft.SemanticKernel.Experimental.Assistants + Microsoft.SemanticKernel.Experimental.Assistants + netstandard2.0 + alpha + Latest + + + + + + Semantic Kernel Assistants + Semantic Kernel Assistants + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Assistants/Extensions/AssistantsKernelExtensions.cs b/dotnet/src/Experimental/Assistants/Extensions/AssistantsKernelExtensions.cs new file mode 100644 index 000000000000..d508b01f9517 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Extensions/AssistantsKernelExtensions.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Extensions; + +internal static class AssistantsKernelExtensions +{ + /// + /// Retrieve a kernel function based on the tool name. + /// + public static KernelFunction GetAssistantTool(this Kernel kernel, string toolName) + { + string[] nameParts = toolName.Split('-'); + return nameParts.Length switch + { + 2 => kernel.Plugins.GetFunction(nameParts[0], nameParts[1]), + _ => throw new AssistantException($"Unknown tool: {toolName}"), + }; + } +} diff --git a/dotnet/src/Experimental/Assistants/Extensions/AssistantsKernelFunctionExtensions.cs b/dotnet/src/Experimental/Assistants/Extensions/AssistantsKernelFunctionExtensions.cs new file mode 100644 index 000000000000..cc56d692896a --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Extensions/AssistantsKernelFunctionExtensions.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Json.More; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +internal static class AssistantsKernelFunctionExtensions +{ + /// + /// Produce a fully qualified toolname. + /// + public static string GetQualifiedName(this KernelFunction function, string pluginName) + { + return $"{pluginName}-{function.Name}"; + } + + /// + /// Convert to an OpenAI tool model. + /// + /// The source function + /// The plugin name + /// An OpenAI tool model + public static ToolModel ToToolModel(this KernelFunction function, string pluginName) + { + var metadata = function.Metadata; + var required = new List(metadata.Parameters.Count); + var properties = + metadata.Parameters.ToDictionary( + p => p.Name, + p => + { + if (p.IsRequired) + { + required.Add(p.Name); + } + + return + new OpenAIParameter + { + Type = ConvertType(p.ParameterType), + Description = p.Description, + }; + }); + + var payload = + new ToolModel + { + Type = "function", + Function = + new() + { + Name = function.GetQualifiedName(pluginName), + Description = function.Description, + Parameters = + new OpenAIParameters + { + Properties = properties, + Required = required, + }, + }, + }; + + return payload; + } + + private static string ConvertType(Type? type) + { + if (type == null || type == typeof(string)) + { + return "string"; + } + + if (type.IsNumber()) + { + return "number"; + } + + if (type.IsEnum) + { + return "enum"; + } + + return type.Name; + } +} diff --git a/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Assistant.cs b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Assistant.cs new file mode 100644 index 000000000000..361f1249f0c6 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Assistant.cs @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using System.Web; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Supported OpenAI REST API actions for assistants. +/// +internal static partial class OpenAIRestExtensions +{ + internal const string BaseAssistantUrl = $"{BaseUrl}/assistants"; + + /// + /// Create a new assistant. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant definition + /// A cancellation token + /// An assistant definition + public static Task CreateAssistantModelAsync( + this OpenAIRestContext context, + AssistantModel model, + CancellationToken cancellationToken = default) + { + var payload = + new + { + model = model.Model, + name = model.Name, + description = model.Description, + instructions = model.Instructions, + tools = model.Tools, + file_ids = model.FileIds, + metadata = model.Metadata, + }; + + return + context.ExecutePostAsync( + BaseAssistantUrl, + payload, + cancellationToken); + } + + /// + /// Retrieve an assistant by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant identifier + /// A cancellation token + /// An assistant definition + public static Task GetAssistantModelAsync( + this OpenAIRestContext context, + string assistantId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + GetAssistantUrl(assistantId), + cancellationToken); + } + + /// + /// Retrieve all assistants. + /// + /// A context for accessing OpenAI REST endpoint + /// A limit on the number of objects to be returned. + /// Limit can range between 1 and 100, and the default is 20. + /// Set to true to sort by ascending created_at timestamp + /// instead of descending. + /// A cursor for use in pagination. This is an object ID that defines + /// your place in the list. For instance, if you make a list request and receive 100 objects, + /// ending with obj_foo, your subsequent call can include after=obj_foo in order to + /// fetch the next page of the list. + /// A cursor for use in pagination. This is an object ID that defines + /// your place in the list. For instance, if you make a list request and receive 100 objects, + /// ending with obj_foo, your subsequent call can include before=obj_foo in order to + /// fetch the previous page of the list. + /// List of retrieved Assistants + /// A cancellation token + /// An enumeration of assistant definitions + public static async Task> ListAssistantModelsAsync( + this OpenAIRestContext context, + int limit = 20, + bool ascending = false, + string? after = null, + string? before = null, + CancellationToken cancellationToken = default) + { + var query = HttpUtility.ParseQueryString(string.Empty); + query["limit"] = limit.ToString(CultureInfo.InvariantCulture); + query["order"] = ascending ? "asc" : "desc"; + if (!string.IsNullOrWhiteSpace(after)) + { + query["after"] = after; + } + if (!string.IsNullOrWhiteSpace(before)) + { + query["before"] = before; + } + + string requestUrl = string.Join("?", BaseAssistantUrl, query.ToString()); + + var result = + await context.ExecuteGetAsync( + requestUrl, + cancellationToken).ConfigureAwait(false); + + return result.Data; + } + + /// + /// Delete an existing assistant + /// + /// A context for accessing OpenAI REST endpoint + /// Identifier of assistant to delete + /// A cancellation token + public static Task DeleteAssistantModelAsync( + this OpenAIRestContext context, + string id, + CancellationToken cancellationToken = default) + { + return context.ExecuteDeleteAsync(GetAssistantUrl(id), cancellationToken); + } + + internal static string GetAssistantUrl(string assistantId) + { + return $"{BaseAssistantUrl}/{assistantId}"; + } +} diff --git a/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Messages.cs b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Messages.cs new file mode 100644 index 000000000000..61d426907ba8 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Messages.cs @@ -0,0 +1,119 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Supported OpenAI REST API actions for thread messages. +/// +internal static partial class OpenAIRestExtensions +{ + /// + /// Create a new message. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// The message text + /// A cancellation token + /// A message definition + public static Task CreateUserTextMessageAsync( + this OpenAIRestContext context, + string threadId, + string content, + CancellationToken cancellationToken = default) + { + var payload = + new + { + role = AuthorRole.User.Label, + content, + }; + + return + context.ExecutePostAsync( + GetMessagesUrl(threadId), + payload, + cancellationToken); + } + + /// + /// Retrieve an message by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// The message identifier + /// A cancellation token + /// A message definition + public static Task GetMessageAsync( + this OpenAIRestContext context, + string threadId, + string messageId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + GetMessagesUrl(threadId, messageId), + cancellationToken); + } + + /// + /// Retrieve all thread messages. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// A cancellation token + /// A message list definition + public static Task GetMessagesAsync( + this OpenAIRestContext context, + string threadId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + GetMessagesUrl(threadId), + cancellationToken); + } + + /// + /// Retrieve all thread messages. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// The set of message identifiers to retrieve + /// A cancellation token + /// A message list definition + public static async Task> GetMessagesAsync( + this OpenAIRestContext context, + string threadId, + IEnumerable messageIds, + CancellationToken cancellationToken = default) + { + var tasks = + messageIds.Select( + id => + context.ExecuteGetAsync( + GetMessagesUrl(threadId, id), + cancellationToken)).ToArray(); + + await Task.WhenAll(tasks).ConfigureAwait(false); + + return tasks.Select(t => t.Result).ToArray(); + } + + internal static string GetMessagesUrl(string threadId) + { + return $"{BaseThreadUrl}/{threadId}/messages"; + } + + internal static string GetMessagesUrl(string threadId, string messageId) + { + return $"{BaseThreadUrl}/{threadId}/messages/{messageId}"; + } +} diff --git a/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Run.cs b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Run.cs new file mode 100644 index 000000000000..989ca93a8ae0 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Run.cs @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Supported OpenAI REST API actions for thread runs. +/// +internal static partial class OpenAIRestExtensions +{ + /// + /// Create a new run. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// The assistant identifier + /// Optional instruction override + /// The assistant tools + /// A cancellation token + /// A run definition + public static Task CreateRunAsync( + this OpenAIRestContext context, + string threadId, + string assistantId, + string? instructions = null, + IEnumerable? tools = null, + CancellationToken cancellationToken = default) + { + var payload = + new + { + assistant_id = assistantId, + instructions, + tools, + }; + + return + context.ExecutePostAsync( + GetRunUrl(threadId), + payload, + cancellationToken); + } + + /// + /// Retrieve an run by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// A run identifier + /// A cancellation token + /// A run definition + public static Task GetRunAsync( + this OpenAIRestContext context, + string threadId, + string runId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + GetRunUrl(threadId, runId), + cancellationToken); + } + + /// + /// Retrieve run steps by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// A run identifier + /// A cancellation token + /// A set of run steps + public static Task GetRunStepsAsync( + this OpenAIRestContext context, + string threadId, + string runId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + GetRunStepsUrl(threadId, runId), + cancellationToken); + } + + /// + /// Add a function result for a run. + /// + /// A context for accessing OpenAI REST endpoint + /// A thread identifier + /// The run identifier + /// The function/tool results. + /// A cancellation token + /// A run definition + public static Task AddToolOutputsAsync( + this OpenAIRestContext context, + string threadId, + string runId, + IEnumerable results, + CancellationToken cancellationToken = default) + { + var payload = + new + { + tool_outputs = results + }; + + return + context.ExecutePostAsync( + GetRunToolOutput(threadId, runId), + payload, + cancellationToken); + } + + internal static string GetRunUrl(string threadId) + { + return $"{BaseThreadUrl}/{threadId}/runs"; + } + + internal static string GetRunUrl(string threadId, string runId) + { + return $"{BaseThreadUrl}/{threadId}/runs/{runId}"; + } + + internal static string GetRunStepsUrl(string threadId, string runId) + { + return $"{BaseThreadUrl}/{threadId}/runs/{runId}/steps"; + } + + internal static string GetRunToolOutput(string threadId, string runId) + { + return $"{BaseThreadUrl}/{threadId}/runs/{runId}/submit_tool_outputs"; + } +} diff --git a/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Thread.cs b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Thread.cs new file mode 100644 index 000000000000..711b0e25e881 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.Thread.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Supported OpenAI REST API actions for threads. +/// +internal static partial class OpenAIRestExtensions +{ + internal const string BaseThreadUrl = $"{BaseUrl}/threads"; + + /// + /// Create a new thread. + /// + /// A context for accessing OpenAI REST endpoint + /// A cancellation token + /// A thread definition + public static Task CreateThreadModelAsync( + this OpenAIRestContext context, + CancellationToken cancellationToken = default) + { + return + context.ExecutePostAsync( + BaseThreadUrl, + cancellationToken); + } + + /// + /// Retrieve an thread by identifier. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// A cancellation token + /// A thread definition + public static Task GetThreadModelAsync( + this OpenAIRestContext context, + string threadId, + CancellationToken cancellationToken = default) + { + return + context.ExecuteGetAsync( + GetThreadUrl(threadId), + cancellationToken); + } + + /// + /// Delete an existing thread. + /// + /// A context for accessing OpenAI REST endpoint + /// Identifier of thread to delete + /// A cancellation token + public static Task DeleteThreadModelAsync( + this OpenAIRestContext context, + string id, + CancellationToken cancellationToken = default) + { + return context.ExecuteDeleteAsync(GetThreadUrl(id), cancellationToken); + } + + internal static string GetThreadUrl(string threadId) + { + return $"{BaseThreadUrl}/{threadId}"; + } +} diff --git a/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.cs b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.cs new file mode 100644 index 000000000000..e98198311a6b --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Extensions/OpenAIRestExtensions.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; +using Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +internal static partial class OpenAIRestExtensions +{ + private const string BaseUrl = "https://api.openai.com/v1"; + private const string HeaderNameOpenAIAssistant = "OpenAI-Beta"; + private const string HeaderNameAuthorization = "Authorization"; + private const string HeaderOpenAIValueAssistant = "assistants=v1"; + + private static async Task ExecuteGetAsync( + this OpenAIRestContext context, + string url, + CancellationToken cancellationToken = default) + { + using var request = HttpRequest.CreateGetRequest(url); + + request.Headers.Add(HeaderNameAuthorization, $"Bearer {context.ApiKey}"); + request.Headers.Add(HeaderNameOpenAIAssistant, HeaderOpenAIValueAssistant); + + using var response = await context.GetHttpClient().SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + throw new AssistantException($"Unexpected failure: {response.StatusCode} [{url}]"); + } + + string responseBody = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + + // Common case is for failure exception to be raised by REST invocation. + // Null result is a logical possibility, but unlikely edge case. + // Might occur due to model alignment issues over time. + return + JsonSerializer.Deserialize(responseBody) ?? + throw new AssistantException($"Null result processing: {typeof(TResult).Name}"); + } + + private static Task ExecutePostAsync( + this OpenAIRestContext context, + string url, + CancellationToken cancellationToken = default) + { + return context.ExecutePostAsync(url, payload: null, cancellationToken); + } + + private static async Task ExecutePostAsync( + this OpenAIRestContext context, + string url, + object? payload, + CancellationToken cancellationToken = default) + { + using var request = HttpRequest.CreatePostRequest(url, payload); + + request.Headers.Add(HeaderNameAuthorization, $"Bearer {context.ApiKey}"); + request.Headers.Add(HeaderNameOpenAIAssistant, HeaderOpenAIValueAssistant); + + using var response = await context.GetHttpClient().SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + throw new AssistantException($"Unexpected failure: {response.StatusCode} [{url}]"); + } + + string responseBody = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + return + JsonSerializer.Deserialize(responseBody) ?? + throw new AssistantException($"Null result processing: {typeof(TResult).Name}"); + } + + private static async Task ExecuteDeleteAsync( + this OpenAIRestContext context, + string url, + CancellationToken cancellationToken = default) + { + using var request = HttpRequest.CreateDeleteRequest(url); + + request.Headers.Add(HeaderNameAuthorization, $"Bearer {context.ApiKey}"); + request.Headers.Add(HeaderNameOpenAIAssistant, HeaderOpenAIValueAssistant); + + using var response = await context.GetHttpClient().SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + throw new AssistantException($"Unexpected failure: {response.StatusCode} [{url}]"); + } + } +} diff --git a/dotnet/src/Experimental/Assistants/IAssistant.cs b/dotnet/src/Experimental/Assistants/IAssistant.cs new file mode 100644 index 000000000000..5530e33ec887 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/IAssistant.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Represents an assistant that can call the model and use tools. +/// +public interface IAssistant +{ + /// + /// The assistant identifier (which can be referenced in API endpoints). + /// + string Id { get; } + + /// + /// Always "assistant" + /// +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema +#pragma warning disable CA1716 // Identifiers should not match keywords + string Object { get; } +#pragma warning restore CA1716 // Identifiers should not match keywords +#pragma warning restore CA1720 // Identifier contains type name + + /// + /// Unix timestamp (in seconds) for when the assistant was created + /// + long CreatedAt { get; } + + /// + /// Name of the assistant + /// + string? Name { get; } + + /// + /// The description of the assistant + /// + string? Description { get; } + + /// + /// ID of the model to use + /// + string Model { get; } + + /// + /// The system instructions that the assistant uses + /// + string Instructions { get; } + + /// + /// A semantic-kernel instance associated with the assistant. + /// + internal Kernel Kernel { get; } + + /// + /// Tools defined for run execution. + /// + public KernelPluginCollection Plugins { get; } + + /// + /// Expose the assistant as a plugin. + /// + public AssistantPlugin AsPlugin(); + + /// + /// Creates a new assistant chat thread. + /// + /// A cancellation token + Task NewThreadAsync(CancellationToken cancellationToken = default); + + /// + /// Gets an existing assistant chat thread. + /// + /// The id of the existing chat thread. + /// A cancellation token + Task GetThreadAsync(string id, CancellationToken cancellationToken = default); + + /// + /// Deletes an existing assistant chat thread. + /// + /// The id of the existing chat thread. Allows for null-fallthrough to simplify caller patterns. + /// A cancellation token + Task DeleteThreadAsync(string? id, CancellationToken cancellationToken = default); + + /// + /// Delete current assistant. Terminal state - Unable to perform any + /// subsequent actions. + /// + /// A cancellation token + Task DeleteAsync(CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Experimental/Assistants/IAssistantExtensions.cs b/dotnet/src/Experimental/Assistants/IAssistantExtensions.cs new file mode 100644 index 000000000000..26ebecc32df2 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/IAssistantExtensions.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Convenience actions for . +/// +public static class IAssistantExtensions +{ + /// + /// Invoke assistant with user input + /// + /// the assistant + /// the user input + /// a cancel token + /// chat messages + public static async IAsyncEnumerable InvokeAsync( + this IAssistant assistant, + string input, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + IChatThread thread = await assistant.NewThreadAsync(cancellationToken).ConfigureAwait(false); + try + { + await foreach (var message in thread.InvokeAsync(assistant, input, cancellationToken)) + { + yield return message; + } + } + finally + { + await thread.DeleteAsync(cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/dotnet/src/Experimental/Assistants/IChatMessage.cs b/dotnet/src/Experimental/Assistants/IChatMessage.cs new file mode 100644 index 000000000000..05e0e51c60b0 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/IChatMessage.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.ObjectModel; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Represents a message that is part of an assistant thread. +/// +public interface IChatMessage +{ + /// + /// The message identifier (which can be referenced in API endpoints). + /// + string Id { get; } + + /// + /// The id of the assistant associated with the a message where role = "assistant", otherwise null. + /// + string? AssistantId { get; } + + /// + /// The chat message content. + /// + string Content { get; } + + /// + /// The role associated with the chat message. + /// + string Role { get; } + + /// + /// Properties associated with the message. + /// + ReadOnlyDictionary Properties { get; } +} diff --git a/dotnet/src/Experimental/Assistants/IChatThread.cs b/dotnet/src/Experimental/Assistants/IChatThread.cs new file mode 100644 index 000000000000..cd29324db872 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/IChatThread.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Experimental.Assistants; + +/// +/// Represents a thread that contains messages. +/// +public interface IChatThread +{ + /// + /// The thread identifier (which can be referenced in API endpoints). + /// + string Id { get; } + + /// + /// Add a textual user message to the thread. + /// + /// The user message + /// A cancellation token + /// + Task AddUserMessageAsync(string message, CancellationToken cancellationToken = default); + + /// + /// Advance the thread with the specified assistant. + /// + /// An assistant instance. + /// A cancellation token + /// The resulting assistant message(s) + IAsyncEnumerable InvokeAsync(IAssistant assistant, CancellationToken cancellationToken = default); + + /// + /// Advance the thread with the specified assistant. + /// + /// An assistant instance. + /// The user message + /// A cancellation token + /// The resulting assistant message(s) + IAsyncEnumerable InvokeAsync(IAssistant assistant, string userMessage, CancellationToken cancellationToken = default); + + /// + /// Delete current thread. Terminal state - Unable to perform any + /// subsequent actions. + /// + /// A cancellation token + Task DeleteAsync(CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Experimental/Assistants/Internal/Assistant.cs b/dotnet/src/Experimental/Assistants/Internal/Assistant.cs new file mode 100644 index 000000000000..5d8b1585e39d --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Internal/Assistant.cs @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +/// +/// Represents an assistant that can call the model and use tools. +/// +internal sealed class Assistant : IAssistant +{ + /// + public string Id => this._model.Id; + + /// + public Kernel Kernel { get; } + + /// + public KernelPluginCollection Plugins => this.Kernel.Plugins; + + /// +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema +#pragma warning disable CA1716 // Identifiers should not match keywords + public string Object => this._model.Object; +#pragma warning restore CA1720 // Identifier contains type name - We don't control the schema +#pragma warning restore CA1716 // Identifiers should not match keywords + + /// + public long CreatedAt => this._model.CreatedAt; + + /// + public string? Name => this._model.Name; + + /// + public string? Description => this._model.Description; + + /// + public string Model => this._model.Model; + + /// + public string Instructions => this._model.Instructions; + + private static readonly Regex s_removeInvalidCharsRegex = new("[^0-9A-Za-z-]"); + + private readonly OpenAIRestContext _restContext; + private readonly AssistantModel _model; + + private AssistantPlugin? _assistantPlugin; + private bool _isDeleted; + + /// + /// Create a new assistant. + /// + /// A context for accessing OpenAI REST endpoint + /// The assistant definition + /// Plugins to initialize as assistant tools + /// A cancellation token + /// An initialized instance. + public static async Task CreateAsync( + OpenAIRestContext restContext, + AssistantModel assistantModel, + IEnumerable? plugins = null, + CancellationToken cancellationToken = default) + { + var resultModel = await restContext.CreateAssistantModelAsync(assistantModel, cancellationToken).ConfigureAwait(false); + + return new Assistant(resultModel, restContext, plugins); + } + + /// + /// Initializes a new instance of the class. + /// + internal Assistant( + AssistantModel model, + OpenAIRestContext restContext, + IEnumerable? plugins = null) + { + this._model = model; + this._restContext = restContext; + + IKernelBuilder builder = Kernel.CreateBuilder(); + ; + this.Kernel = + Kernel + .CreateBuilder() + .AddOpenAIChatCompletion(this._model.Model, this._restContext.ApiKey) + .Build(); + + if (plugins is not null) + { + this.Kernel.Plugins.AddRange(plugins); + } + } + + public AssistantPlugin AsPlugin() => this._assistantPlugin ??= this.DefinePlugin(); + + /// + public Task NewThreadAsync(CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + return ChatThread.CreateAsync(this._restContext, cancellationToken); + } + + /// + public Task GetThreadAsync(string id, CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + return ChatThread.GetAsync(this._restContext, id, cancellationToken); + } + + /// + public async Task DeleteThreadAsync(string? id, CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(id)) + { + return; + } + + await this._restContext.DeleteThreadModelAsync(id!, cancellationToken).ConfigureAwait(false); + } + + /// + public async Task DeleteAsync(CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + return; + } + + await this._restContext.DeleteAssistantModelAsync(this.Id, cancellationToken).ConfigureAwait(false); + this._isDeleted = true; + } + + /// + /// Marshal thread run through interface. + /// + /// The user input + /// A cancellation token. + /// An assistant response ( + private async Task AskAsync( + [Description("The user message provided to the assistant.")] + string input, + CancellationToken cancellationToken = default) + { + var thread = await this.NewThreadAsync(cancellationToken).ConfigureAwait(false); + try + { + await thread.AddUserMessageAsync(input, cancellationToken).ConfigureAwait(false); + + var messages = await thread.InvokeAsync(this, cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + var response = + new AssistantResponse + { + ThreadId = thread.Id, + Message = string.Concat(messages.Select(m => m.Content)), + }; + + return response; + } + finally + { + await thread.DeleteAsync(cancellationToken).ConfigureAwait(false); + } + } + + private AssistantPluginImpl DefinePlugin() + { + var functionAsk = KernelFunctionFactory.CreateFromMethod(this.AskAsync, description: this.Description); + + return new AssistantPluginImpl(this, functionAsk); + } + + private void ThrowIfDeleted() + { + if (this._isDeleted) + { + throw new AssistantException($"{nameof(Assistant)}: {this.Id} has been deleted."); + } + } + + private sealed class AssistantPluginImpl : AssistantPlugin + { + public KernelFunction FunctionAsk { get; } + + internal override Assistant Assistant { get; } + + public override int FunctionCount => 1; + + private static readonly string s_functionName = nameof(Assistant.AskAsync).Substring(0, nameof(Assistant.AskAsync).Length - 5); + + public AssistantPluginImpl(Assistant assistant, KernelFunction functionAsk) + : base(s_removeInvalidCharsRegex.Replace(assistant.Name ?? assistant.Id, string.Empty), + assistant.Description ?? assistant.Instructions) + { + this.Assistant = assistant; + this.FunctionAsk = functionAsk; + } + + public override IEnumerator GetEnumerator() + { + yield return this.FunctionAsk; + } + + public override bool TryGetFunction(string name, [NotNullWhen(true)] out KernelFunction? function) + { + function = null; + + if (s_functionName.Equals(name, StringComparison.OrdinalIgnoreCase)) + { + function = this.FunctionAsk; + } + + return function != null; + } + } +} diff --git a/dotnet/src/Experimental/Assistants/Internal/ChatMessage.cs b/dotnet/src/Experimental/Assistants/Internal/ChatMessage.cs new file mode 100644 index 000000000000..8842f4a916a4 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Internal/ChatMessage.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +/// +/// Represents a message that is part of an assistant thread. +/// +internal sealed class ChatMessage : IChatMessage +{ + /// + public string Id { get; } + + /// + public string? AssistantId { get; } + + /// + public string Content { get; } + + /// + public string Role { get; } + + /// + public ReadOnlyDictionary Properties { get; } + + /// + /// Initializes a new instance of the class. + /// + internal ChatMessage(ThreadMessageModel model) + { + var content = (IEnumerable)model.Content; + var text = content.First().Text?.Value ?? string.Empty; + + this.Id = model.Id; + this.AssistantId = string.IsNullOrWhiteSpace(model.AssistantId) ? null : model.AssistantId; + this.Role = model.Role; + this.Content = text; + this.Properties = new ReadOnlyDictionary(model.Metadata); + } +} diff --git a/dotnet/src/Experimental/Assistants/Internal/ChatRun.cs b/dotnet/src/Experimental/Assistants/Internal/ChatRun.cs new file mode 100644 index 000000000000..1c7e9db0609c --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Internal/ChatRun.cs @@ -0,0 +1,184 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; +using Microsoft.SemanticKernel.Experimental.Assistants.Extensions; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +/// +/// Represents an execution run on a thread. +/// +internal sealed class ChatRun +{ + /// + public string Id => this._model.Id; + + /// + public string AssistantId => this._model.AssistantId; + + /// + public string ThreadId => this._model.ThreadId; + + private const string ActionState = "requires_action"; + private const string FailedState = "failed"; + private const string CompletedState = "completed"; + private static readonly TimeSpan s_pollingInterval = TimeSpan.FromMilliseconds(500); + private static readonly TimeSpan s_pollingBackoff = TimeSpan.FromSeconds(1); + + private static readonly HashSet s_pollingStates = + new(StringComparer.OrdinalIgnoreCase) + { + "queued", + "in_progress", + }; + + private readonly OpenAIRestContext _restContext; + private readonly Kernel _kernel; + + private ThreadRunModel _model; + + /// + public async Task> GetResultAsync(CancellationToken cancellationToken = default) + { + // Poll until actionable + await PollRunStatus().ConfigureAwait(false); + + // Retrieve steps + var steps = await this._restContext.GetRunStepsAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); + + do + { + // Is tool action required? + if (ActionState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)) + { + // Execute functions in parallel and post results at once. + var tasks = steps.Data.SelectMany(step => this.ExecuteStep(step, cancellationToken)).ToArray(); + await Task.WhenAll(tasks).ConfigureAwait(false); + + var results = tasks.Select(t => t.Result).ToArray(); + await this._restContext.AddToolOutputsAsync(this.ThreadId, this.Id, results, cancellationToken).ConfigureAwait(false); + + // Refresh run as it goes back into pending state after posting function results. + await PollRunStatus(force: true).ConfigureAwait(false); + + // Refresh steps to retrieve additional messages. + steps = await this._restContext.GetRunStepsAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); + } + + // Did fail? + if (FailedState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)) + { + throw new AssistantException($"Unexpected failure processing run: {this.Id}: {this._model.LastError?.Message ?? "Unknown"}"); + } + } + while (!CompletedState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)); + + var messageIds = + steps.Data + .Where(s => s.StepDetails.MessageCreation != null) + .Select(s => s.StepDetails.MessageCreation!.MessageId) + .ToArray(); + + return messageIds; + + async Task PollRunStatus(bool force = false) + { + int count = 0; + + // Ignore model status when forced. + while (force || s_pollingStates.Contains(this._model.Status)) + { + if (!force) + { + // Reduce polling frequency after a couple attempts + await Task.Delay(count >= 2 ? s_pollingInterval : s_pollingBackoff, cancellationToken).ConfigureAwait(false); + ++count; + } + + force = false; + + try + { + this._model = await this._restContext.GetRunAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); + } + catch (Exception exception) when (!exception.IsCriticalException()) + { + // Retry anyway.. + } + } + } + } + + /// + /// Initializes a new instance of the class. + /// + internal ChatRun( + ThreadRunModel model, + Kernel kernel, + OpenAIRestContext restContext) + { + this._model = model; + this._kernel = kernel; + this._restContext = restContext; + } + + private IEnumerable> ExecuteStep(ThreadRunStepModel step, CancellationToken cancellationToken) + { + // Process all of the steps that require action + if (step.Status == "in_progress" && step.StepDetails.Type == "tool_calls") + { + foreach (var toolCall in step.StepDetails.ToolCalls) + { + // Run function + yield return this.ProcessFunctionStepAsync(toolCall.Id, toolCall.Function, cancellationToken); + } + } + } + + private async Task ProcessFunctionStepAsync(string callId, ThreadRunStepModel.FunctionDetailsModel functionDetails, CancellationToken cancellationToken) + { + var result = await InvokeFunctionCallAsync().ConfigureAwait(false); + var toolResult = result as string; + if (toolResult == null) + { + toolResult = JsonSerializer.Serialize(result); + } + + return + new ToolResultModel + { + CallId = callId, + Output = toolResult!, + }; + + async Task InvokeFunctionCallAsync() + { + var function = this._kernel.GetAssistantTool(functionDetails.Name); + + var functionArguments = new KernelArguments(); + if (!string.IsNullOrWhiteSpace(functionDetails.Arguments)) + { + var arguments = JsonSerializer.Deserialize>(functionDetails.Arguments)!; + foreach (var argument in arguments) + { + functionArguments[argument.Key] = argument.Value.ToString(); + } + } + + var result = await function.InvokeAsync(this._kernel, functionArguments, cancellationToken).ConfigureAwait(false); + if (result.ValueType == typeof(AssistantResponse)) + { + return result.GetValue()!; + } + + return result.GetValue() ?? string.Empty; + } + } +} diff --git a/dotnet/src/Experimental/Assistants/Internal/ChatThread.cs b/dotnet/src/Experimental/Assistants/Internal/ChatThread.cs new file mode 100644 index 000000000000..5a32741e561d --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Internal/ChatThread.cs @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Assistants.Exceptions; +using Microsoft.SemanticKernel.Experimental.Assistants.Models; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +/// +/// Represents a thread that contains messages. +/// +internal sealed class ChatThread : IChatThread +{ + /// + public string Id { get; private set; } + + private readonly OpenAIRestContext _restContext; + private bool _isDeleted; + + /// + /// Create a new thread. + /// + /// A context for accessing OpenAI REST endpoint + /// A cancellation token + /// An initialized instance. + public static async Task CreateAsync(OpenAIRestContext restContext, CancellationToken cancellationToken = default) + { + // Common case is for failure exception to be raised by REST invocation. Null result is a logical possibility, but unlikely edge case. + var threadModel = await restContext.CreateThreadModelAsync(cancellationToken).ConfigureAwait(false); + + return new ChatThread(threadModel, messageListModel: null, restContext); + } + + /// + /// Retrieve an existing thread. + /// + /// A context for accessing OpenAI REST endpoint + /// The thread identifier + /// A cancellation token + /// An initialized instance. + public static async Task GetAsync(OpenAIRestContext restContext, string threadId, CancellationToken cancellationToken = default) + { + var threadModel = await restContext.GetThreadModelAsync(threadId, cancellationToken).ConfigureAwait(false); + var messageListModel = await restContext.GetMessagesAsync(threadId, cancellationToken).ConfigureAwait(false); + + return new ChatThread(threadModel, messageListModel, restContext); + } + + /// + public async Task AddUserMessageAsync(string message, CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + var messageModel = + await this._restContext.CreateUserTextMessageAsync( + this.Id, + message, + cancellationToken).ConfigureAwait(false); + + return new ChatMessage(messageModel); + } + + /// + public IAsyncEnumerable InvokeAsync(IAssistant assistant, CancellationToken cancellationToken) + { + return this.InvokeAsync(assistant, string.Empty, cancellationToken); + } + + /// + public async IAsyncEnumerable InvokeAsync(IAssistant assistant, string userMessage, [EnumeratorCancellation] CancellationToken cancellationToken) + { + this.ThrowIfDeleted(); + + if (!string.IsNullOrWhiteSpace(userMessage)) + { + yield return await this.AddUserMessageAsync(userMessage, cancellationToken).ConfigureAwait(false); + } + + var tools = assistant.Plugins.SelectMany(p => p.Select(f => f.ToToolModel(p.Name))); + var runModel = await this._restContext.CreateRunAsync(this.Id, assistant.Id, assistant.Instructions, tools, cancellationToken).ConfigureAwait(false); + + var run = new ChatRun(runModel, assistant.Kernel, this._restContext); + var results = await run.GetResultAsync(cancellationToken).ConfigureAwait(false); + + var messages = await this._restContext.GetMessagesAsync(this.Id, results, cancellationToken).ConfigureAwait(false); + foreach (var message in messages) + { + yield return new ChatMessage(message); + } + } + + /// + /// Delete an existing thread. + /// + /// A cancellation token + public async Task DeleteAsync(CancellationToken cancellationToken) + { + if (this._isDeleted) + { + return; + } + + await this._restContext.DeleteThreadModelAsync(this.Id, cancellationToken).ConfigureAwait(false); + this._isDeleted = true; + } + + /// + /// Initializes a new instance of the class. + /// + private ChatThread( + ThreadModel threadModel, + ThreadMessageListModel? messageListModel, + OpenAIRestContext restContext) + { + this.Id = threadModel.Id; + this._restContext = restContext; + } + + private void ThrowIfDeleted() + { + if (this._isDeleted) + { + throw new AssistantException($"{nameof(ChatThread)}: {this.Id} has been deleted."); + } + } +} diff --git a/dotnet/src/Experimental/Assistants/Internal/OpenAIRestContext.cs b/dotnet/src/Experimental/Assistants/Internal/OpenAIRestContext.cs new file mode 100644 index 000000000000..4474eab49c39 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Internal/OpenAIRestContext.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Internal; + +/// +/// Placeholder context. +/// +internal sealed class OpenAIRestContext +{ + private static readonly HttpClient s_defaultOpenAIClient = new(); + /// + public string ApiKey { get; } + + /// + public HttpClient GetHttpClient() => this._clientFactory.Invoke(); + + private readonly Func _clientFactory; + + /// + /// Initializes a new instance of the class. + /// + public OpenAIRestContext(string apiKey, Func? clientFactory = null) + { + this._clientFactory = clientFactory ??= () => s_defaultOpenAIClient; + + this.ApiKey = apiKey; + } +} diff --git a/dotnet/src/Experimental/Assistants/Models/AssistantConfigurationModel.cs b/dotnet/src/Experimental/Assistants/Models/AssistantConfigurationModel.cs new file mode 100644 index 000000000000..0eaa23d4260a --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/AssistantConfigurationModel.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// Represents a yaml configuration file for an assistant. +/// +internal sealed class AssistantConfigurationModel +{ + /// + /// The assistant name + /// + [YamlMember(Alias = "name")] + public string Name { get; set; } = string.Empty; + + /// + /// The assistant description + /// + [YamlMember(Alias = "description")] + public string Description { get; set; } = string.Empty; + + /// + /// The assistant instructions template + /// + [YamlMember(Alias = "instructions")] + public string Instructions { get; set; } = string.Empty; + + ///// + ///// The assistant instructions template + ///// + //[YamlMember(Alias = "template")] + //public string Template { get; set; } = string.Empty; + + ///// + ///// The assistant instruction template format. + ///// + //[YamlMember(Alias = "template_format")] + //public string TemplateFormat { get; set; } = string.Empty; + + ///// + ///// Describes the input variables for the template. + ///// + //[YamlMember(Alias = "input_variables")] + //public List InputVariables { get; set; } + + ///// + ///// Describes known valid models. + ///// + //[YamlMember(Alias = "execution_settings")] + //public List ExecutionSettings { get; set; } +} diff --git a/dotnet/src/Experimental/Assistants/Models/AssistantModel.cs b/dotnet/src/Experimental/Assistants/Models/AssistantModel.cs new file mode 100644 index 000000000000..afeec4612bd4 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/AssistantModel.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// list of run steps belonging to a run. +/// +internal sealed class AssistantListModel : OpenAIListModel +{ + // No specialization +} + +/// +/// Model of Assistant data returned from OpenAI +/// +internal sealed record AssistantModel +{ + /// + /// Identifier, which can be referenced in API endpoints + /// + [JsonPropertyName("id")] + public string Id { get; init; } = string.Empty; + + /// + /// Always "assistant" + /// + [JsonPropertyName("object")] +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema + public string Object { get; init; } = "assistant"; +#pragma warning restore CA1720 // Identifier contains type name + + /// + /// Unix timestamp (in seconds) for when the assistant was created + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; init; } + + /// + /// Name of the assistant + /// + [JsonPropertyName("name")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Name { get; set; } + + /// + /// The description of the assistant + /// + [JsonPropertyName("description")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Description { get; set; } + + /// + /// ID of the model to use + /// + [JsonPropertyName("model")] + public string Model { get; set; } = string.Empty; + + /// + /// The system instructions that the assistant uses + /// + [JsonPropertyName("instructions")] + public string Instructions { get; set; } = string.Empty; + + /// + /// A list of tool enabled on the assistant + /// There can be a maximum of 128 tools per assistant. + /// + [JsonPropertyName("tools")] + public List Tools { get; init; } = new List(); + + /// + /// A list of file IDs attached to this assistant. + /// There can be a maximum of 20 files attached to the assistant. + /// + [JsonPropertyName("file_ids")] + public List FileIds { get; init; } = new List(); + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. + /// Keys can be a maximum of 64 characters long and values can be a + /// maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; init; } = new Dictionary(); +} diff --git a/dotnet/src/Experimental/Assistants/Models/OpenAIListModel.cs b/dotnet/src/Experimental/Assistants/Models/OpenAIListModel.cs new file mode 100644 index 000000000000..8c6c70eb9441 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/OpenAIListModel.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// list of run steps belonging to a run. +/// +internal abstract class OpenAIListModel +{ + /// + /// Always "list" + /// + [JsonPropertyName("object")] +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema + public string Object { get; set; } = "list"; +#pragma warning restore CA1720 // Identifier contains type name + + /// + /// List of steps. + /// + [JsonPropertyName("data")] + public List Data { get; set; } = new List(); + + /// + /// The identifier of the first data record. + /// + [JsonPropertyName("first_id")] + public string FirstId { get; set; } = string.Empty; + + /// + /// The identifier of the last data record. + /// + [JsonPropertyName("last_id")] + public string LastId { get; set; } = string.Empty; + + /// + /// Indicates of more pages of data exist. + /// + [JsonPropertyName("has_more")] + public bool HasMore { get; set; } +} diff --git a/dotnet/src/Experimental/Assistants/Models/OpenAIParameters.cs b/dotnet/src/Experimental/Assistants/Models/OpenAIParameters.cs new file mode 100644 index 000000000000..36d3892e2a87 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/OpenAIParameters.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// Wrapper for parameter map. +/// +internal sealed class OpenAIParameters +{ + /// + /// Empty parameter set. + /// + public static readonly OpenAIParameters Empty = new(); + + /// + /// Always "object" + /// + [JsonPropertyName("type")] + public string Type { get; set; } = "object"; + + /// + /// Set of parameters. + /// + [JsonPropertyName("properties")] + public Dictionary Properties { get; set; } = new(); + + /// + /// Set of parameters. + /// + [JsonPropertyName("required")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public List? Required { get; set; } +} + +/// +/// Wrapper for parameter definition. +/// +internal sealed class OpenAIParameter +{ + /// + /// The parameter type. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = "object"; + + /// + /// The parameter description. + /// + [JsonPropertyName("description")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Description { get; set; } +} diff --git a/dotnet/src/Experimental/Assistants/Models/ThreadMessageModel.cs b/dotnet/src/Experimental/Assistants/Models/ThreadMessageModel.cs new file mode 100644 index 000000000000..05efab92ed87 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/ThreadMessageModel.cs @@ -0,0 +1,123 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 +#pragma warning disable CA1852 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// list of run steps belonging to a run. +/// +internal sealed class ThreadMessageListModel : OpenAIListModel +{ + // No specialization +} + +/// +/// Represents a message within a thread. +/// +internal sealed class ThreadMessageModel +{ + /// + /// Identifier, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Always "thread.message" + /// + [JsonPropertyName("object")] +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema + public string Object { get; set; } = "thread.message"; +#pragma warning restore CA1720 // Identifier contains type name + + /// + /// Unix timestamp (in seconds) for when the message was created. + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; set; } + + /// + /// The thread ID that this message belongs to. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The entity that produced the message. One of "user" or "assistant". + /// + [JsonPropertyName("role")] + public string Role { get; set; } = string.Empty; + + /// + /// The content of the message in array of text and/or images. + /// + [JsonPropertyName("content")] + public List Content { get; set; } = new List(); + + /// + /// A list of file IDs that the assistant should use. + /// + [JsonPropertyName("file_ids")] + public List FileIds { get; set; } = new List(); + + /// + /// If applicable, the ID of the assistant that authored this message. + /// + [JsonPropertyName("assistant_id")] + public string AssistantId { get; set; } = string.Empty; + + /// + /// If applicable, the ID of the run associated with the authoring of this message. + /// + [JsonPropertyName("run_id")] + public string RunId { get; set; } = string.Empty; + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. Keys can be a maximum of 64 + /// characters long and values can be a maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; set; } = new Dictionary(); + + /// + /// Representa contents within a message. + /// + public sealed class ContentModel + { + /// + /// Type of content. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// Text context. + /// + [JsonPropertyName("text")] + public TextContentModel? Text { get; set; } + } + + /// + /// Text content. + /// + public sealed class TextContentModel + { + /// + /// The text itself. + /// + [JsonPropertyName("value")] + public string Value { get; set; } = string.Empty; + + /// + /// Any annotations on the text. + /// + [JsonPropertyName("annotations")] + public List Annotations { get; set; } = new List(); + } +} diff --git a/dotnet/src/Experimental/Assistants/Models/ThreadModel.cs b/dotnet/src/Experimental/Assistants/Models/ThreadModel.cs new file mode 100644 index 000000000000..1dd73daa4f02 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/ThreadModel.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// Model of Thread data returned from OpenAI +/// +internal sealed class ThreadModel +{ + /// + /// Identifier, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Always "thread" + /// + [JsonPropertyName("object")] +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema + public string Object { get; set; } = "thread"; +#pragma warning restore CA1720 // Identifier contains type name + + /// + /// The Unix timestamp (in seconds) for when the thread was created. + /// + [JsonPropertyName("created_at")] + public int CreatedAt { get; set; } + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. Keys can be a maximum of 64 + /// characters long and values can be a maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; set; } = new Dictionary(); +} diff --git a/dotnet/src/Experimental/Assistants/Models/ThreadRunModel.cs b/dotnet/src/Experimental/Assistants/Models/ThreadRunModel.cs new file mode 100644 index 000000000000..062963a029ad --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/ThreadRunModel.cs @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// Represents an execution run on a thread. +/// +internal sealed class ThreadRunModel +{ + /// + /// Identifier, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Always "thread.run" + /// + [JsonPropertyName("object")] +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema + public string Object { get; set; } = "thread.run"; +#pragma warning restore CA1720 // Identifier contains type name + + /// + /// Unix timestamp (in seconds) for when the run was created. + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; set; } + + /// + /// ID of the assistant used for execution of this run. + /// + [JsonPropertyName("assistant_id")] + public string AssistantId { get; set; } = string.Empty; + + /// + /// ID of the thread that was executed on as a part of this run. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The status of the run, which can be one of: + /// queued, in_progress, requires_action, cancelling, cancelled, failed, completed, or expired. + /// + [JsonPropertyName("status")] + public string Status { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the run was started. + /// + [JsonPropertyName("started_at")] + public long? StartedAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run will expire. + /// + [JsonPropertyName("expires_at")] + public long? ExpiresAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run was cancelled. + /// + [JsonPropertyName("cancelled_at")] + public long? CancelledAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run failed. + /// + [JsonPropertyName("failed_at")] + public long? FailedAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run was completed. + /// + [JsonPropertyName("completed_at")] + public long? CompletedAt { get; set; } + + /// + /// The last error associated with this run. Will be null if there are no errors. + /// + [JsonPropertyName("last_error")] + public ErrorModel? LastError { get; set; } + + /// + /// The model that the assistant used for this run. + /// + [JsonPropertyName("model")] + public string Model { get; set; } = string.Empty; + + /// + /// The instructions that the assistant used for this run. + /// + [JsonPropertyName("instructions")] + public string Instructions { get; set; } = string.Empty; + + /// + /// The list of tools that the assistant used for this run. + /// + [JsonPropertyName("tools")] + public List Tools { get; set; } = new List(); + + /// + /// The list of File IDs the assistant used for this run. + /// + [JsonPropertyName("file_ids")] + public List FileIds { get; set; } = new List(); + + /// + /// Set of 16 key-value pairs that can be attached to an object. + /// This can be useful for storing additional information about the + /// object in a structured format. Keys can be a maximum of 64 + /// characters long and values can be a maximum of 512 characters long. + /// + [JsonPropertyName("metadata")] + public Dictionary Metadata { get; set; } = new Dictionary(); + + /// + /// Run error information. + /// + public sealed class ErrorModel + { + /// + /// Error code. + /// + [JsonPropertyName("code")] + public string Code { get; set; } = string.Empty; + + /// + /// Error message. + /// + [JsonPropertyName("message")] + public string Message { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/Experimental/Assistants/Models/ThreadRunStepModel.cs b/dotnet/src/Experimental/Assistants/Models/ThreadRunStepModel.cs new file mode 100644 index 000000000000..c94b65632733 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/ThreadRunStepModel.cs @@ -0,0 +1,204 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 + +using System; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// list of run steps belonging to a run. +/// +internal sealed class ThreadRunStepListModel : OpenAIListModel +{ + // No specialization +} + +/// +/// Step in a run on a thread. +/// +internal sealed class ThreadRunStepModel +{ + /// + /// Identifier of the run step, which can be referenced in API endpoints. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// Always "thread.run.step" + /// + [JsonPropertyName("object")] +#pragma warning disable CA1720 // Identifier contains type name - We don't control the schema + public string Object { get; set; } = "thread.run.step"; +#pragma warning restore CA1720 // Identifier contains type name + + /// + /// Unix timestamp (in seconds) for when the run step was created. + /// + [JsonPropertyName("created_at")] + public long CreatedAt { get; set; } + + /// + /// The ID of the run to which the run step belongs. + /// + [JsonPropertyName("run_id")] + public string RunId { get; set; } = string.Empty; + + /// + /// ID of the assistant associated with the run step. + /// + [JsonPropertyName("assistant_id")] + public string AssistantId { get; set; } = string.Empty; + + /// + /// The ID of the thread to which the run and run step belongs. + /// + [JsonPropertyName("thread_id")] + public string ThreadId { get; set; } = string.Empty; + + /// + /// The type of run step, which can be either message_creation or tool_calls. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// The status of the run step, which can be one of: + /// in_progress, cancelled, failed, completed, or expired. + /// + [JsonPropertyName("status")] + public string Status { get; set; } = string.Empty; + + /// + /// Unix timestamp (in seconds) for when the run step was cancelled. + /// + [JsonPropertyName("cancelled_at")] + public long? CancelledAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run step completed. + /// + [JsonPropertyName("completed_at")] + public long? CompletedAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run step expired. + /// A step is considered expired if the parent run is expired. + /// + [JsonPropertyName("expired_at")] + public long? ExpiredAt { get; set; } + + /// + /// Unix timestamp (in seconds) for when the run step failed. + /// + [JsonPropertyName("failed_at")] + public long? FailedAt { get; set; } + + /// + /// The last error associated with this run step. Will be null if there are no errors. + /// + [JsonPropertyName("last_error")] + public string LastError { get; set; } = string.Empty; + + /// + /// The details of the run step. + /// + [JsonPropertyName("step_details")] + public StepDetailsModel StepDetails { get; set; } = StepDetailsModel.Empty; + + /// + /// Details of a run step. + /// + public sealed class StepDetailsModel + { + /// + /// Empty definition + /// + public static StepDetailsModel Empty = new(); + + /// + /// Type of detail. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// Details of the message creation by the run step. + /// + [JsonPropertyName("message_creation")] + public MessageCreationDetailsModel? MessageCreation { get; set; } + + /// + /// Details of tool calls. + /// + [JsonPropertyName("tool_calls")] + public ToolCallsDetailsModel[] ToolCalls { get; set; } = Array.Empty(); + } + + /// + /// Message creation details. + /// + public sealed class MessageCreationDetailsModel + { + /// + /// ID of the message that was created by this run step. + /// + [JsonPropertyName("message_id")] + public string MessageId { get; set; } = string.Empty; + } + + /// + /// Tool call details. + /// + public sealed class ToolCallsDetailsModel + { + /// + /// ID of the tool call. + /// + [JsonPropertyName("id")] + public string Id { get; set; } = string.Empty; + + /// + /// The type of tool call. + /// + [JsonPropertyName("type")] + public string Type { get; set; } = string.Empty; + + /// + /// The definition of the function that was called. + /// + [JsonPropertyName("function")] + public FunctionDetailsModel Function { get; set; } = FunctionDetailsModel.Empty; + } + + /// + /// Function call details. + /// + public sealed class FunctionDetailsModel + { + /// + /// Empty definition + /// + public static FunctionDetailsModel Empty = new(); + + /// + /// The name of the function. + /// + [JsonPropertyName("name")] + public string Name { get; set; } = string.Empty; + + /// + /// The arguments passed to the function. + /// + [JsonPropertyName("arguments")] + public string Arguments { get; set; } = string.Empty; + + /// + /// The output of the function. + /// This will be null if the outputs have not been submitted yet. + /// + [JsonPropertyName("output")] + public string Output { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/Experimental/Assistants/Models/ToolModel.cs b/dotnet/src/Experimental/Assistants/Models/ToolModel.cs new file mode 100644 index 000000000000..6e13c0e9b5fd --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/ToolModel.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +/// +/// Tool entry +/// +internal sealed record ToolModel +{ + /// + /// Type of tool to have at assistant's disposition + /// + [JsonPropertyName("type")] + public string Type { get; init; } = string.Empty; + + /// + /// The function definition for Type = 'function'. + /// + [JsonPropertyName("function")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public FunctionModel? Function { get; init; } + + /// + /// Defines the function when ToolModel.Type == 'function'. + /// + public sealed record FunctionModel + { + /// + /// The function name. + /// + [JsonPropertyName("name")] + public string Name { get; init; } = string.Empty; + + /// + /// The function description. + /// + [JsonPropertyName("description")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Description { get; init; } + + /// + /// The function description. + /// + [JsonPropertyName("parameters")] + public OpenAIParameters Parameters { get; init; } = OpenAIParameters.Empty; + } +} diff --git a/dotnet/src/Experimental/Assistants/Models/ToolResultModel.cs b/dotnet/src/Experimental/Assistants/Models/ToolResultModel.cs new file mode 100644 index 000000000000..f7f9e4a0c310 --- /dev/null +++ b/dotnet/src/Experimental/Assistants/Models/ToolResultModel.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable CA1812 +#pragma warning disable CA1852 + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Assistants.Models; + +internal sealed class ToolResultModel +{ + private static readonly object s_placeholder = new(); + + /// + /// The tool call identifier. + /// + [JsonPropertyName("tool_call_id")] + public string CallId { get; set; } = string.Empty; + + /// + /// The tool output + /// + [JsonPropertyName("output")] + public object Output { get; set; } = s_placeholder; +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/.editorconfig b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/CollectEmailPlugin.cs b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/CollectEmailPlugin.cs new file mode 100644 index 000000000000..9fee46ea2bd7 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/CollectEmailPlugin.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.ComponentModel; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Experimental.Orchestration; + +namespace SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests; + +public sealed class CollectEmailPlugin +{ + private const string Goal = "Collect email from user"; + + private const string EmailRegex = @"^([\w\.\-]+)@([\w\-]+)((\.(\w){2,3})+)$"; + + private const string SystemPrompt = + $@"I am AI assistant and will only answer questions related to collect email. +The email should conform the regex: {EmailRegex} + +If I cannot answer, say that I don't know. +Do not expose the regex unless asked. +"; + + private readonly IChatCompletionService _chat; + + private int MaxTokens { get; set; } = 256; + + private readonly PromptExecutionSettings _chatRequestSettings; + + public CollectEmailPlugin(Kernel kernel) + { + this._chat = kernel.GetRequiredService(); + this._chatRequestSettings = new OpenAIPromptExecutionSettings + { + MaxTokens = this.MaxTokens, + StopSequences = new List() { "Observation:" }, + Temperature = 0 + }; + } + + [KernelFunction("ConfigureEmailAddress")] + [Description("Useful to assist in configuration of email address, must be called after email provided")] + public async Task CollectEmailAsync( + [Description("The email address provided by the user, pass no matter what the value is")] + // ReSharper disable once InconsistentNaming +#pragma warning disable CA1707 // Identifiers should not contain underscores + string email_address, +#pragma warning restore CA1707 // Identifiers should not contain underscores + KernelArguments arguments) + { + var chat = new ChatHistory(SystemPrompt); + chat.AddUserMessage(Goal); + + ChatHistory? chatHistory = arguments.GetChatHistory(); + if (chatHistory?.Count > 0) + { + chat.AddRange(chatHistory); + } + + if (!string.IsNullOrEmpty(email_address) && IsValidEmail(email_address)) + { + return "Thanks for providing the info, the following email would be used in subsequent steps: " + email_address; + } + + // invalid email, prompt user to provide a valid email + arguments["email_address"] = string.Empty; + arguments.PromptInput(); + + var response = await this._chat.GetChatMessageContentAsync(chat).ConfigureAwait(false); + + return response.Content ?? string.Empty; + } + + private static bool IsValidEmail(string email) + { + // check using regex + var regex = new Regex(EmailRegex); + return regex.IsMatch(email); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj new file mode 100644 index 000000000000..d28f7036a6b5 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj @@ -0,0 +1,45 @@ + + + SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests + SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests + net6.0 + LatestMajor + true + false + CA2007,VSTHRD111,SKEXP0102,SKEXP0052,SKEXP0054 + b7762d10-e29b-4bb1-8b74-b6d69a667dd4 + + + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + Always + + + Always + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/FlowOrchestratorTests.cs b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/FlowOrchestratorTests.cs new file mode 100644 index 000000000000..f4be196ac805 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/FlowOrchestratorTests.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Experimental.Orchestration; +using Microsoft.SemanticKernel.Memory; +using Microsoft.SemanticKernel.Plugins.Web; +using Microsoft.SemanticKernel.Plugins.Web.Bing; +using SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests.TestSettings; +using xRetry; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests; + +public sealed class FlowOrchestratorTests : IDisposable +{ + private readonly string _bingApiKey; + + public FlowOrchestratorTests(ITestOutputHelper output) + { + this._logger = new XunitLogger(output); + this._testOutputHelper = new RedirectOutput(output); + + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + string? bingApiKeyCandidate = this._configuration["Bing:ApiKey"]; + Assert.NotNull(bingApiKeyCandidate); + this._bingApiKey = bingApiKeyCandidate; + } + + [RetryFact(maxRetries: 3)] + public async Task CanExecuteFlowAsync() + { + // Arrange + IKernelBuilder builder = this.InitializeKernelBuilder(); + var bingConnector = new BingConnector(this._bingApiKey); + var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); + var sessionId = Guid.NewGuid().ToString(); + string dummyAddress = "abc@xyz.com"; + + Dictionary plugins = new() + { + { webSearchEnginePlugin, "WebSearch" } + }; + + Microsoft.SemanticKernel.Experimental.Orchestration.Flow flow = FlowSerializer.DeserializeFromYaml(@" +goal: answer question and sent email +steps: + - goal: What is the tallest mountain in Asia? How tall is it divided by 2? + plugins: + - WebSearchEnginePlugin + provides: + - answer + - goal: Collect email address + plugins: + - CollectEmailPlugin + provides: + - email_address + - goal: Send email + plugins: + - SendEmailPlugin + requires: + - email_address + - answer + provides: + - email +"); + + var flowOrchestrator = new FlowOrchestrator( + builder, + await FlowStatusProvider.ConnectAsync(new VolatileMemoryStore()), + plugins, + config: new FlowOrchestratorConfig() { MaxStepIterations = 20 }); + + // Act + var result = await flowOrchestrator.ExecuteFlowAsync(flow, sessionId, "What is the tallest mountain in Asia? How tall is it divided by 2?"); + + // Assert + // Loose assertion -- make sure that the plan was executed and pause when it needs interact with user to get more input + var response = result.GetValue>()!.First(); + Assert.Contains("email", response, StringComparison.InvariantCultureIgnoreCase); + + // Act + result = await flowOrchestrator.ExecuteFlowAsync(flow, sessionId, $"my email is {dummyAddress}"); + + // Assert + var emailPayload = result.Metadata!["email"] as string; + Assert.Contains(dummyAddress, emailPayload, StringComparison.InvariantCultureIgnoreCase); + Assert.Contains("Everest", emailPayload, StringComparison.InvariantCultureIgnoreCase); + } + + private IKernelBuilder InitializeKernelBuilder() + { + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + return Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); + } + + private readonly ILoggerFactory _logger; + private readonly RedirectOutput _testOutputHelper; + private readonly IConfigurationRoot _configuration; + + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + ~FlowOrchestratorTests() + { + this.Dispose(false); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + if (this._logger is IDisposable ld) + { + ld.Dispose(); + } + + this._testOutputHelper.Dispose(); + } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/README.md b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/README.md new file mode 100644 index 000000000000..fec79f00d9bc --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/README.md @@ -0,0 +1,115 @@ +# Experimental Flow Orchestrator Integration Tests + +## Requirements + +1. **Azure OpenAI**: go to the [Azure OpenAI Quickstart](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart) + and deploy an instance of Azure OpenAI, deploy a model like "text-davinci-003" find your Endpoint and API key. +2. **OpenAI**: go to [OpenAI](https://openai.com/product/) to register and procure your API key. +3. **Azure Bing Web Search API**: go to [Bing Web Search API](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api) + and select `Try Now` to get started. + +## Setup + +### Option 1: Use Secret Manager + +Integration tests will require secrets and credentials, to access OpenAI, Azure OpenAI, +Bing and other resources. + +We suggest using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) +to avoid the risk of leaking secrets into the repository, branches and pull requests. +You can also use environment variables if you prefer. + +To set your secrets with Secret Manager: + +``` +cd dotnet/src/IntegrationTests + +dotnet user-secrets init +dotnet user-secrets set "OpenAI:ServiceId" "text-davinci-003" +dotnet user-secrets set "OpenAI:ModelId" "text-davinci-003" +dotnet user-secrets set "OpenAI:ChatModelId" "gpt-4" +dotnet user-secrets set "OpenAI:ApiKey" "..." + +dotnet user-secrets set "AzureOpenAI:ServiceId" "azure-text-davinci-003" +dotnet user-secrets set "AzureOpenAI:DeploymentName" "text-davinci-003" +dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "gpt-4" +dotnet user-secrets set "AzureOpenAI:Endpoint" "https://contoso.openai.azure.com/" +dotnet user-secrets set "AzureOpenAI:ApiKey" "..." + +dotnet user-secrets set "AzureOpenAIEmbeddings:ServiceId" "azure-text-embedding-ada-002" +dotnet user-secrets set "AzureOpenAIEmbeddings:DeploymentName" "text-embedding-ada-002" +dotnet user-secrets set "AzureOpenAIEmbeddings:Endpoint" "https://contoso.openai.azure.com/" +dotnet user-secrets set "AzureOpenAIEmbeddings:ApiKey" "..." + +dotnet user-secrets set "Bing:ApiKey" "..." +``` + +### Option 2: Use Configuration File +1. Create a `testsettings.development.json` file next to `testsettings.json`. This file will be ignored by git, + the content will not end up in pull requests, so it's safe for personal settings. Keep the file safe. +2. Edit `testsettings.development.json` and + 1. set you Azure OpenAI and OpenAI keys and settings found in Azure portal and OpenAI website. + 2. set the `Bing:ApiKey` using the API key you can find in the Azure portal. + +For example: + +```json +{ + "OpenAI": { + "ServiceId": "text-davinci-003", + "ModelId": "text-davinci-003", + "ChatModelId": "gpt-4", + "ApiKey": "sk-...." + }, + "AzureOpenAI": { + "ServiceId": "azure-text-davinci-003", + "DeploymentName": "text-davinci-003", + "ChatDeploymentName": "gpt-4", + "Endpoint": "https://contoso.openai.azure.com/", + "ApiKey": "...." + }, + "OpenAIEmbeddings": { + "ServiceId": "text-embedding-ada-002", + "ModelId": "text-embedding-ada-002", + "ApiKey": "sk-...." + }, + "AzureOpenAIEmbeddings": { + "ServiceId": "azure-text-embedding-ada-002", + "DeploymentName": "text-embedding-ada-002", + "Endpoint": "https://contoso.openai.azure.com/", + "ApiKey": "...." + }, + "Bing": { + "ApiKey": "...." + } +} +``` + +### Option 3: Use Environment Variables +You may also set the test settings in your environment variables. The environment variables will override the settings in the `testsettings.development.json` file. + +When setting environment variables, use a double underscore (i.e. "\_\_") to delineate between parent and child properties. For example: + +- bash: + + ```bash + export OpenAI__ApiKey="sk-...." + export AzureOpenAI__ApiKey="...." + export AzureOpenAI__DeploymentName="azure-text-davinci-003" + export AzureOpenAI__ChatDeploymentName="gpt-4" + export AzureOpenAIEmbeddings__DeploymentName="azure-text-embedding-ada-002" + export AzureOpenAI__Endpoint="https://contoso.openai.azure.com/" + export Bing__ApiKey="...." + ``` + +- PowerShell: + + ```ps + $env:OpenAI__ApiKey = "sk-...." + $env:AzureOpenAI__ApiKey = "...." + $env:AzureOpenAI__DeploymentName = "azure-text-davinci-003" + $env:AzureOpenAI__ChatDeploymentName = "gpt-4" + $env:AzureOpenAIEmbeddings__DeploymentName = "azure-text-embedding-ada-002" + $env:AzureOpenAI__Endpoint = "https://contoso.openai.azure.com/" + $env:Bing__ApiKey = "...." + ``` diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/RedirectOutput.cs b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/RedirectOutput.cs new file mode 100644 index 000000000000..9f56e701bd7e --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/RedirectOutput.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Text; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; + +namespace SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests; + +public sealed class RedirectOutput : TextWriter, ILogger, ILoggerFactory +{ + private readonly ITestOutputHelper _output; + private readonly StringBuilder _logs; + + public RedirectOutput(ITestOutputHelper output) + { + this._output = output; + this._logs = new StringBuilder(); + } + + public override Encoding Encoding { get; } = Encoding.UTF8; + + public override void WriteLine(string? value) + { + this._output.WriteLine(value); + this._logs.AppendLine(value); + } + + IDisposable ILogger.BeginScope(TState state) + { + return null!; + } + + bool ILogger.IsEnabled(LogLevel logLevel) + { + return true; + } + + public string GetLogs() + { + return this._logs.ToString(); + } + + void ILogger.Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + var message = formatter(state, exception); + this._output?.WriteLine(message); + this._logs.AppendLine(message); + } + + ILogger ILoggerFactory.CreateLogger(string categoryName) => this; + + void ILoggerFactory.AddProvider(ILoggerProvider provider) => throw new NotSupportedException(); +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/SendEmailPlugin.cs b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/SendEmailPlugin.cs new file mode 100644 index 000000000000..a1ecd274ff9b --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/SendEmailPlugin.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using System.Text.Json; +using Microsoft.SemanticKernel; + +namespace SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests; + +public sealed class SendEmailPlugin +{ + private static readonly JsonSerializerOptions s_writeIndented = new() { WriteIndented = true }; + + [KernelFunction] + [Description("Send email")] + public string SendEmail( + // ReSharper disable once InconsistentNaming +#pragma warning disable CA1707 // Identifiers should not contain underscores + string email_address, +#pragma warning restore CA1707 // Identifiers should not contain underscores + string answer, + KernelArguments variables) + { + var contract = new Email() + { + Address = email_address, + Content = answer, + }; + + // for demo purpose only + string emailPayload = JsonSerializer.Serialize(contract, s_writeIndented); + variables["email"] = emailPayload; + + return "Here's the API contract I will post to mail server: " + emailPayload; + } + + private sealed class Email + { + public string? Address { get; set; } + + public string? Content { get; set; } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs new file mode 100644 index 000000000000..f67d8bd814a9 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests.TestSettings; + +[SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", + Justification = "Configuration classes are instantiated through IConfiguration.")] +internal sealed class AzureOpenAIConfiguration +{ + public string ServiceId { get; set; } + + public string DeploymentName { get; set; } + + public string? ChatDeploymentName { get; set; } + + public string Endpoint { get; set; } + + public string ApiKey { get; set; } + + public AzureOpenAIConfiguration(string serviceId, string deploymentName, string endpoint, string apiKey, string? chatDeploymentName = null) + { + this.ServiceId = serviceId; + this.DeploymentName = deploymentName; + this.ChatDeploymentName = chatDeploymentName; + this.Endpoint = endpoint; + this.ApiKey = apiKey; + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/TestSettings/OpenAIConfiguration.cs b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/TestSettings/OpenAIConfiguration.cs new file mode 100644 index 000000000000..a861d1a4cebe --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/TestSettings/OpenAIConfiguration.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests.TestSettings; + +[SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", + Justification = "Configuration classes are instantiated through IConfiguration.")] +internal sealed class OpenAIConfiguration +{ + public string ServiceId { get; set; } + public string ModelId { get; set; } + public string? ChatModelId { get; set; } + public string ApiKey { get; set; } + + public OpenAIConfiguration(string serviceId, string modelId, string apiKey, string? chatModelId = null) + { + this.ServiceId = serviceId; + this.ModelId = modelId; + this.ChatModelId = chatModelId; + this.ApiKey = apiKey; + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/XunitLogger.cs b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/XunitLogger.cs new file mode 100644 index 000000000000..279ed17a7322 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/XunitLogger.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; + +namespace SemanticKernel.Experimental.Orchestration.Flow.IntegrationTests; + +/// +/// A logger that writes to the Xunit test output +/// +internal sealed class XunitLogger : ILoggerFactory, ILogger, IDisposable +{ + private readonly ITestOutputHelper _output; + + public XunitLogger(ITestOutputHelper output) + { + this._output = output; + } + + /// + public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + this._output.WriteLine(state?.ToString()); + } + + /// + public bool IsEnabled(LogLevel logLevel) => true; + + /// + IDisposable ILogger.BeginScope(TState state) => this; + + /// + public void Dispose() + { + // This class is marked as disposable to support the BeginScope method. + // However, there is no need to dispose anything. + } + + public ILogger CreateLogger(string categoryName) => this; + + public void AddProvider(ILoggerProvider provider) => throw new NotSupportedException(); +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/testsettings.json b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/testsettings.json new file mode 100644 index 000000000000..2b5e41c5cbd7 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/testsettings.json @@ -0,0 +1,34 @@ +{ + "OpenAI": { + "ServiceId": "text-davinci-003", + "ModelId": "text-davinci-003", + "ApiKey": "" + }, + "AzureOpenAI": { + "ServiceId": "azure-text-davinci-003", + "DeploymentName": "text-davinci-003", + "ChatDeploymentName": "gpt-4", + "Endpoint": "", + "ApiKey": "" + }, + "OpenAIEmbeddings": { + "ServiceId": "text-embedding-ada-002", + "ModelId": "text-embedding-ada-002", + "ApiKey": "" + }, + "AzureOpenAIEmbeddings": { + "ServiceId": "azure-text-embedding-ada-002", + "DeploymentName": "text-embedding-ada-002", + "Endpoint": "", + "ApiKey": "" + }, + "HuggingFace": { + "ApiKey": "" + }, + "Bing": { + "ApiKey": "" + }, + "Postgres": { + "ConnectionString": "" + } +} \ No newline at end of file diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/.editorconfig b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/ChatHistorySerializerTest.cs b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/ChatHistorySerializerTest.cs new file mode 100644 index 000000000000..308824090bfc --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/ChatHistorySerializerTest.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; +using Xunit; + +namespace SemanticKernel.Experimental.Orchestration.Flow.UnitTests; + +public class ChatHistorySerializerTest +{ + [Fact] + public void CanDeserializeChatHistory() + { + string input = "[{\"Role\":\"assistant\",\"Content\":\"To configure the email notification, please provide the following information:\\n\\n1. Email address: (Enter the valid email address)\\n2. Notification time: (Enter the schedule of notification)\\n3. Email Content: (Enter the content expected from email notification)\\n\\nOnce you have provided this information, please type \\u0022confirmed\\u0022 to confirm the details.\"}]\r\n"; + var history = ChatHistorySerializer.Deserialize(input); + + Assert.NotNull(history); + Assert.Single(history); + Assert.Equal(AuthorRole.Assistant.Label, history[0].Role.Label); + } + + [Fact] + public void CanSerializeChatHistory() + { + var history = new ChatHistory(); + var systemMessage = "system"; + var userMessage = "user"; + var assistantMessage = "assistant"; + + history.AddSystemMessage(systemMessage); + history.AddUserMessage(userMessage); + history.AddAssistantMessage(assistantMessage); + + var serialized = ChatHistorySerializer.Serialize(history); + var deserialized = ChatHistorySerializer.Deserialize(serialized); + + Assert.NotNull(deserialized); + + Assert.Equal(deserialized[0].Role, AuthorRole.System); + Assert.Equal(deserialized[0].Content, systemMessage); + + Assert.Equal(deserialized[1].Role, AuthorRole.User); + Assert.Equal(deserialized[1].Content, userMessage); + + Assert.Equal(deserialized[2].Role, AuthorRole.Assistant); + Assert.Equal(deserialized[2].Content, assistantMessage); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/Experimental.Orchestration.Flow.UnitTests.csproj b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/Experimental.Orchestration.Flow.UnitTests.csproj new file mode 100644 index 000000000000..770a1a3a7d1b --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/Experimental.Orchestration.Flow.UnitTests.csproj @@ -0,0 +1,38 @@ + + + SemanticKernel.Experimental.Orchestration.Flow.UnitTests + SemanticKernel.Experimental.Orchestration.Flow.UnitTests + net6.0 + LatestMajor + true + enable + disable + false + CA2007,VSTHRD111,SKEXP0102 + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + Always + + + Always + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowExtensionsTests.cs b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowExtensionsTests.cs new file mode 100644 index 000000000000..85f4bd62ac15 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowExtensionsTests.cs @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Orchestration; +using Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; +using Xunit; + +namespace SemanticKernel.Experimental.Orchestration.Flow.UnitTests; + +public class FlowExtensionsTests +{ + [Fact] + public async Task TestBuildReferenceStepAsync() + { + // Arrange + var flow1 = CreateFlowWithReferenceStep("flow2"); + + var flow2 = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("flow2", "test flow goal 2"); + flow2.CompletionType = CompletionType.Optional; + var step5 = new FlowStep("step1"); + step5.AddRequires("a"); + step5.AddProvides("b"); + flow2.AddProvides("b"); + flow2.AddStep(step5); + + // Act + var catalog = new InMemoryFlowCatalog(new List { flow1, flow2 }); + var flow1InCatalog = await catalog.GetFlowAsync("flow1"); + Assert.NotNull(flow1InCatalog); + + // Assert + Assert.DoesNotContain(flow1InCatalog.Steps, step => step is ReferenceFlowStep); + var flow2Step = flow1InCatalog.Steps.OfType().SingleOrDefault(); + Assert.NotNull(flow2Step); + Assert.Equal("flow2", flow2Step.Name); + Assert.Equal(CompletionType.Optional, flow2Step.CompletionType); + Assert.Equal("a", flow2Step.Requires.SingleOrDefault()); + Assert.Equal("b", flow2Step.Provides.SingleOrDefault()); + } + + [Fact] + public void TestBuildNonExistReferenceStep() + { + // Arrange + var flow1 = CreateFlowWithReferenceStep("flow2"); + + var flow2 = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("flow3", "test flow goal 2"); + var step5 = new FlowStep("step1"); + step5.AddProvides("a"); + flow2.AddProvides("a"); + flow2.AddStep(step5); + + // Act and assert + Assert.Throws(() => new InMemoryFlowCatalog(new List { flow1, flow2 })); + } + + private static Microsoft.SemanticKernel.Experimental.Orchestration.Flow CreateFlowWithReferenceStep(string referenceFlowName) + { + var flow = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("flow1", "test flow goal"); + var step1 = new FlowStep("step1"); + step1.AddProvides("a"); + var step2 = new FlowStep("step2"); + step2.AddRequires("a"); + step2.AddProvides("b"); + var step3 = new FlowStep("step3"); + step3.AddRequires("a", "b"); + step3.AddProvides("c"); + var step4 = new ReferenceFlowStep(referenceFlowName) + { + CompletionType = CompletionType.Optional + }; + flow.AddStep(step1); + flow.AddStep(step2); + flow.AddStep(step3); + flow.AddStep(step4); + + return flow; + } + + private sealed class InMemoryFlowCatalog : IFlowCatalog + { + private readonly Dictionary _flows = new(); + + internal InMemoryFlowCatalog() + { + } + + internal InMemoryFlowCatalog(IReadOnlyList flows) + { + // phase 1: register original flows + foreach (var flow in flows) + { + this._flows.Add(flow.Name, flow); + } + + // phase 2: build references + foreach (var flow in flows) + { +#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits + flow.BuildReferenceAsync(this).Wait(); +#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits + } + } + + public Task> GetFlowsAsync() + { + return Task.FromResult(this._flows.Select(_ => _.Value)); + } + + public Task GetFlowAsync(string flowName) + { + return Task.FromResult(this._flows.TryGetValue(flowName, out var flow) ? flow : null); + } + + public Task RegisterFlowAsync(Microsoft.SemanticKernel.Experimental.Orchestration.Flow flow) + { + this._flows.Add(flow.Name, flow); + + return Task.FromResult(true); + } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowSerializerTests.cs b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowSerializerTests.cs new file mode 100644 index 000000000000..40960f48dde2 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowSerializerTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Linq; +using Microsoft.SemanticKernel.Experimental.Orchestration; +using Xunit; + +namespace SemanticKernel.Experimental.Orchestration.Flow.UnitTests; + +public class FlowSerializerTests +{ + [Fact] + public void CanDeserializeFromYaml() + { + // Arrange + var yamlFile = "./TestData/Flow/flow.yml"; + var content = File.ReadAllText(yamlFile); + + // Act + var flow = FlowSerializer.DeserializeFromYaml(content); + + // Assert + this.ValidateFlow(flow); + } + + [Fact] + public void CanDeserializeFromJson() + { + // Arrange + var jsonFile = "./TestData/Flow/flow.json"; + var content = File.ReadAllText(jsonFile); + + // Act + var flow = FlowSerializer.DeserializeFromJson(content); + + // Assert + this.ValidateFlow(flow); + } + + private void ValidateFlow(Microsoft.SemanticKernel.Experimental.Orchestration.Flow? flow) + { + Assert.NotNull(flow); + Assert.NotEmpty(flow.Steps); + Assert.False(string.IsNullOrEmpty(flow.Goal)); + Assert.Contains("breakfast", flow.Provides); + Assert.Equal(5, flow.Steps.Count); + + var makeCoffeeStep = flow.Steps.First(step => step.Goal == "Make coffee"); + Assert.Equal("coffee_bean", makeCoffeeStep.Requires.Single()); + Assert.Equal("coffee", makeCoffeeStep.Provides.Single()); + Assert.NotNull(makeCoffeeStep.Plugins); + Assert.Single(makeCoffeeStep.Plugins); + Assert.Equal(CompletionType.Once, makeCoffeeStep.CompletionType); + + var recipeStep = flow.Steps.First(step => step.Goal == "Recipe"); + Assert.Equal("ingredients", recipeStep.Provides.Single()); + Assert.Equal(CompletionType.AtLeastOnce, recipeStep.CompletionType); + + var lunchStep = flow.Steps.First(step => step is ReferenceFlowStep) as ReferenceFlowStep; + Assert.NotNull(lunchStep); + Assert.Equal(CompletionType.Optional, lunchStep.CompletionType); + Assert.Equal("lunch_flow", lunchStep.FlowName); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowValidatorTests.cs b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowValidatorTests.cs new file mode 100644 index 000000000000..86acb272be58 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/FlowValidatorTests.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel.Experimental.Orchestration; +using Xunit; + +namespace SemanticKernel.Experimental.Orchestration.Flow.UnitTests; + +public class FlowValidatorTests +{ + [Fact] + public void TestValidateFlowReturnsTrueForValidFlow() + { + // Arrange + var validator = new FlowValidator(); + var flow = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("test_flow", "test flow goal"); + var step1 = new FlowStep("step1"); + step1.AddProvides("a"); + var step2 = new FlowStep("step2"); + step2.AddRequires("a"); + step2.AddProvides("b"); + var step3 = new FlowStep("step3"); + step3.AddRequires("a", "b"); + step3.AddProvides("c"); + var step4 = new ReferenceFlowStep("another flow") + { + CompletionType = CompletionType.Optional, + StartingMessage = "Would you like to start another flow?" + }; + flow.AddStep(step1); + flow.AddStep(step2); + flow.AddStep(step3); + flow.AddStep(step4); + + // Act and assert + validator.Validate(flow); + } + + [Fact] + public void TestValidateFlowThrowForEmptyFlow() + { + // Arrange + var validator = new FlowValidator(); + var flow = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("empty flow", "empty flow"); + + // Act and assert + Assert.Throws(() => validator.Validate(flow)); + } + + [Fact] + public void TestValidateFlowThrowForFlowWithDependencyLoops() + { + // Arrange + var validator = new FlowValidator(); + var flow = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("test_flow", "test flow goal"); + var step1 = new FlowStep("step1"); + step1.AddRequires("a"); + step1.AddProvides("b"); + var step2 = new FlowStep("step2"); + step2.AddRequires("b"); + step2.AddProvides("a"); + flow.AddStep(step1); + flow.AddStep(step2); + + // Act and assert + Assert.Throws(() => validator.Validate(flow)); + } + + [Fact] + public void TestValidateFlowThrowForReferenceStepWithRequires() + { + // Arrange + var validator = new FlowValidator(); + var flow = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("test_flow", "test flow goal"); + var step1 = new ReferenceFlowStep("another flow"); + step1.AddRequires("a"); + + // Act and assert + Assert.Throws(() => validator.Validate(flow)); + } + + [Fact] + public void TestValidateFlowThrowForReferenceStepWithProvides() + { + // Arrange + var validator = new FlowValidator(); + var flow = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("test_flow", "test flow goal"); + var step1 = new ReferenceFlowStep("another flow"); + step1.AddProvides("a"); + + // Act and assert + Assert.Throws(() => validator.Validate(flow)); + } + + [Fact] + public void TestValidateFlowThrowForOptionalStepWithoutStartingMessage() + { + // Arrange + var validator = new FlowValidator(); + var flow = new Microsoft.SemanticKernel.Experimental.Orchestration.Flow("test_flow", "test flow goal"); + var step1 = new FlowStep("step1"); + step1.AddProvides("a"); + var step2 = new ReferenceFlowStep("another flow") + { + CompletionType = CompletionType.Optional + }; + flow.AddStep(step1); + flow.AddStep(step2); + + // Act and assert + Assert.Throws(() => validator.Validate(flow)); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/TestData/Flow/flow.json b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/TestData/Flow/flow.json new file mode 100644 index 000000000000..2dc16bb8e47e --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/TestData/Flow/flow.json @@ -0,0 +1,39 @@ +{ + "name": "breakfast_flow", + "goal": "Make breakfast", + "steps": [ + { + "goal": "Make coffee", + "plugins": ["MakeCoffeePlugin"], + "requires": ["coffee_bean"], + "provides": ["coffee"] + }, + { + "goal": "Select coffee been", + "plugins": ["CoffeeRecommendationPlugin"], + "provides": ["coffee_bean"] + }, + { + "goal": "Recipe", + "plugins": [ + "WebSearchPlugin", + "CalorieCalculatorPlugin", + "HealthCheckPlugin" + ], + "provides": ["ingredients"], + "completionType": "AtLeastOnce" + }, + { + "goal": "Cook", + "plugins": ["CookPlugin", "WebSearchPlugin"], + "requires": ["coffee", "ingredients"], + "provides": ["breakfast"] + }, + { + "flowName": "lunch_flow", + "completionType": "Optional", + "startingMessage": "Would you like to prepare the lunch as well?" + } + ], + "provides": ["breakfast"] +} diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/TestData/Flow/flow.yml b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/TestData/Flow/flow.yml new file mode 100644 index 000000000000..f5f2ad539bce --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/TestData/Flow/flow.yml @@ -0,0 +1,44 @@ +name: breakfast_flow +goal: Make breakfast +steps: + - goal: Make coffee + plugins: + - MakeCoffeePlugin + requires: + - coffee_bean + provides: + - coffee + + - goal: Select coffee been + plugins: + - CoffeeRecommendationPlugin + provides: + - coffee_bean + completionType: AtLeastOnce + + - goal: Recipe + plugins: + - WebSearchPlugin + - CalorieCalculatorPlugin + - HealthCheckPlugin + provides: + - ingredients + completionType: AtLeastOnce + transitionMessage: Do you want to add one more recipe? + + - goal: Cook + plugins: + - CookPlugin + - WebSearchPlugin + requires: + - coffee + - ingredients + provides: + - breakfast + + - flowName: lunch_flow + completionType: Optional + startingMessage: Would you like to prepare the lunch as well? + +provides: + - breakfast diff --git a/dotnet/src/Experimental/Orchestration.Flow.UnitTests/XunitHelpers/TestConsoleLogger.cs b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/XunitHelpers/TestConsoleLogger.cs new file mode 100644 index 000000000000..476f5b921b99 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow.UnitTests/XunitHelpers/TestConsoleLogger.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; + +namespace SemanticKernel.Experimental.Orchestration.Flow.UnitTests.XunitHelpers; + +/// +/// Basic logger printing to console +/// +internal static class TestConsoleLogger +{ + internal static ILogger Log => LoggerFactory.CreateLogger(); + + internal static ILoggerFactory LoggerFactory => s_loggerFactory.Value; + private static readonly Lazy s_loggerFactory = new(LogBuilder); + + private static ILoggerFactory LogBuilder() + { + return Microsoft.Extensions.Logging.LoggerFactory.Create(builder => + { + builder.SetMinimumLevel(LogLevel.Trace); + // builder.AddFilter("Microsoft", LogLevel.Trace); + // builder.AddFilter("Microsoft", LogLevel.Debug); + // builder.AddFilter("Microsoft", LogLevel.Information); + // builder.AddFilter("Microsoft", LogLevel.Warning); + // builder.AddFilter("Microsoft", LogLevel.Error); + builder.AddConsole(); + }); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowCatalog.cs b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowCatalog.cs new file mode 100644 index 000000000000..4765c1dbabde --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowCatalog.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; + +/// +/// Interface for flow catalog, which provides functionality of flow registration, enumeration and search. +/// +public interface IFlowCatalog +{ + /// + /// Get all instances from the repository + /// + /// flows + Task> GetFlowsAsync(); + + /// + /// Get by name + /// + /// the flow name + /// flow given the name + Task GetFlowAsync(string flowName); + + /// + /// Register flow in the catalog + /// + /// flow + /// + Task RegisterFlowAsync(Flow flow); +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowExecutor.cs b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowExecutor.cs new file mode 100644 index 000000000000..48f6f8eabdc2 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowExecutor.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; + +/// +/// Flow executor interface +/// +public interface IFlowExecutor +{ + /// + /// Execute the + /// + /// Flow + /// Session id, which is used to track the execution status. + /// The input from client to continue the execution. + /// The request kernel arguments + /// The execution context + Task ExecuteFlowAsync(Flow flow, string sessionId, string input, KernelArguments kernelArguments); +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowStatusProvider.cs b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowStatusProvider.cs new file mode 100644 index 000000000000..8491a406ab57 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowStatusProvider.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; + +/// +/// The flow status provider interface. +/// +public interface IFlowStatusProvider +{ + /// + /// Get the state of current execution session. + /// + /// The session id + /// The variables + Task GetExecutionStateAsync(string sessionId); + + /// + /// Save the state for current execution session. + /// + /// The session id + /// The execution state + /// Task + Task SaveExecutionStateAsync(string sessionId, ExecutionState state); + + /// + /// Get the chat history for current execution session. + /// + /// The session id + /// The step id + /// + Task GetChatHistoryAsync(string sessionId, string stepId); + + /// + /// Save the chat history for current execution session. + /// + /// The session id + /// The step id + /// The chat history + /// + Task SaveChatHistoryAsync(string sessionId, string stepId, ChatHistory history); + + /// + /// Get the ReAct history for current execution . + /// + /// The session id + /// The step id + /// The list of ReAct steps for current flow step. + Task> GetReActStepsAsync(string sessionId, string stepId); + + /// + /// Save the ReAct history for current execution step to . + /// + /// The session id + /// The step id + /// The executed steps + /// Task + Task SaveReActStepsAsync(string sessionId, string stepId, List steps); +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowValidator.cs b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowValidator.cs new file mode 100644 index 000000000000..120ccd314a8c --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Abstractions/IFlowValidator.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; + +/// +/// Flow validator interface +/// +public interface IFlowValidator +{ + /// + /// Validate if the is valid. + /// + /// + void Validate(Flow flow); +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/AssemblyInfo.cs b/dotnet/src/Experimental/Orchestration.Flow/AssemblyInfo.cs new file mode 100644 index 000000000000..88690e89ce4d --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0102")] diff --git a/dotnet/src/Experimental/Orchestration.Flow/EmbeddedResource.cs b/dotnet/src/Experimental/Orchestration.Flow/EmbeddedResource.cs new file mode 100644 index 000000000000..9ca4e4c5d14e --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/EmbeddedResource.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Reflection; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +internal static class EmbeddedResource +{ + private static readonly string? s_namespace = typeof(EmbeddedResource).Namespace; + + internal static string? Read(string name, bool throwIfNotFound = true) + { + var assembly = typeof(EmbeddedResource).GetTypeInfo().Assembly; + if (assembly is null) { throw new KernelException($"[{s_namespace}] {name} assembly not found"); } + + using Stream? resource = assembly.GetManifestResourceStream($"{s_namespace}." + name); + if (resource is null) + { + if (!throwIfNotFound) + { + return null; + } + + throw new KernelException($"[{s_namespace}] {name} resource not found"); + } + + using var reader = new StreamReader(resource); + return reader.ReadToEnd(); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Execution/ChatHistorySerializer.cs b/dotnet/src/Experimental/Orchestration.Flow/Execution/ChatHistorySerializer.cs new file mode 100644 index 000000000000..c22eae855e2b --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Execution/ChatHistorySerializer.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Text.Json; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +internal static class ChatHistorySerializer +{ + internal static ChatHistory? Deserialize(string input) + { + if (string.IsNullOrEmpty(input)) + { + return null; + } + + var messages = JsonSerializer.Deserialize(input) ?? Array.Empty(); + ChatHistory history = new(); + foreach (var message in messages) + { + history.AddMessage(new AuthorRole(message.Role!), message.Content!); + } + + return history; + } + + internal static string Serialize(ChatHistory? history) + { + if (history is null) + { + return string.Empty; + } + + var messages = history.Select(m => new SerializableChatMessage() + { + Role = m.Role.Label, + Content = m.Content, + }); + + return JsonSerializer.Serialize(messages); + } + + private class SerializableChatMessage + { + public string? Role { get; set; } + + public string? Content { get; set; } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Execution/Constants.cs b/dotnet/src/Experimental/Orchestration.Flow/Execution/Constants.cs new file mode 100644 index 000000000000..c2c987de315c --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Execution/Constants.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +internal static class Constants +{ + /// + /// The function name to indicate stop execution and prompt user + /// + public const string StopAndPromptFunctionName = "StopAndPrompt"; + + /// + /// The parameter name of StopAndPrompt function + /// + public const string StopAndPromptParameterName = "prompt"; + + internal static class ActionVariableNames + { + /// + /// Variable name for the chat history + /// + public const string ChatHistory = "_chatHistory"; + + /// + /// Variable name for the chat input + /// + public const string ChatInput = "_chatInput"; + + /// + /// All reserved variable names + /// + public static readonly string[] All = new[] { ChatHistory, ChatInput }; + } + + internal static class ChatPluginVariables + { + /// + /// Variable name to prompt input + /// + public const string PromptInputName = "PromptInput"; + + /// + /// Variable name to exit out the of AtLeastOnce or ZeroOrMore loop + /// + public const string ExitLoopName = "ExitLoop"; + + /// + /// Variable name to force the next iteration of the of AtLeastOnce or ZeroOrMore loop + /// + public const string ContinueLoopName = "ContinueLoop"; + + /// + /// Variable name to terminate the flow + /// + public const string StopFlowName = "StopFlow"; + + /// + /// Default variable value + /// + public const string DefaultValue = "True"; + + /// + /// The variables that change the default flow + /// + public static readonly string[] ControlVariables = new[] { PromptInputName, ExitLoopName, ContinueLoopName, StopFlowName }; + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Execution/ExecutionState.cs b/dotnet/src/Experimental/Orchestration.Flow/Execution/ExecutionState.cs new file mode 100644 index 000000000000..4632d7b6fe1a --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Execution/ExecutionState.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +/// +/// Execution state +/// +public sealed class ExecutionState +{ + /// + /// Index of current step + /// + public int CurrentStepIndex { get; set; } = 0; + + /// + /// Execution state described by variables. + /// + public Dictionary Variables { get; set; } = new Dictionary(); + + /// + /// Execution state of each step + /// + public Dictionary StepStates { get; set; } = new Dictionary(); + + /// + /// Step execution state + /// + public class StepExecutionState + { + /// + /// The status of step execution + /// + public Status Status { get; set; } = Status.NotStarted; + + /// + /// The execution count of step. The value could be larger than one if the step allows repeatable execution. + /// + public int ExecutionCount { get; set; } + + /// + /// The output variables provided by the step + /// + public Dictionary> Output { get; set; } = new Dictionary>(); + + /// + /// Add or update variable for the step + /// + /// The execution index + /// The key of variable. + /// The value of variable. + public void AddOrUpdateVariable(int executionIndex, string key, string value) + { + if (!this.Output.TryGetValue(key, out List? output)) + { + this.Output[key] = output = new(); + } + + if (output!.Count <= executionIndex) + { + output.Add(value); + } + else + { + output[executionIndex] = value; + } + } + } + + /// + /// The execution status enum + /// + public enum Status + { + /// + /// Not started + /// + NotStarted, + + /// + /// In progress + /// + InProgress, + + /// + /// Completed + /// + Completed + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Execution/FlowExecutor.cs b/dotnet/src/Experimental/Orchestration.Flow/Execution/FlowExecutor.cs new file mode 100644 index 000000000000..a53fac6c5d97 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Execution/FlowExecutor.cs @@ -0,0 +1,783 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; +using Microsoft.SemanticKernel.Experimental.Orchestration.Extensions; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +/// +/// This is a flow executor which iterates over the flow steps and executes them one by one. +/// +/// +/// For each step, it is executed in the ReAct (Reasoning-Act-Observe) style, which is similar as StepwisePlanner, with the following differences: +/// 1. It is implemented in a way so that the chat could be streamed for more effective reasoning, action and feedback loop. +/// 2. The user input would be part of observation for the engine to reason and determine next action. +/// 3. For each step, it is considered as complete by verifying all the outputs are provided in programmatic way, instead of LLM evaluation. +/// +/// Further consolidation can happen in the future so that flow executor becomes a generalization of StepwisePlanner. +/// And both chatMode and completionMode could be supported. +/// +internal class FlowExecutor : IFlowExecutor +{ + /// + /// The kernel builder + /// + private readonly IKernelBuilder _kernelBuilder; + + /// + /// The logger + /// + private readonly ILogger _logger; + + /// + /// The global plugin collection + /// + private readonly Dictionary _globalPluginCollection; + + /// + /// The flow planner config + /// + private readonly FlowOrchestratorConfig _config; + + /// + /// The flow status provider + /// + private readonly IFlowStatusProvider _flowStatusProvider; + + /// + /// System kernel for flow execution + /// + private readonly Kernel _systemKernel; + + /// + /// Re-Act engine for flow execution + /// + private readonly ReActEngine _reActEngine; + + /// + /// Restricted plugin name + /// + private const string RestrictedPluginName = "FlowExecutor_Excluded"; + + /// + /// The regex for parsing the final answer response + /// + private static readonly Regex s_finalAnswerRegex = + new(@"\[FINAL.+\](?.+)", RegexOptions.Singleline); + + /// + /// The regex for parsing the question + /// + private static readonly Regex s_questionRegex = + new(@"\[QUESTION\](?.+)", RegexOptions.Singleline); + + /// + /// The regex for parsing the thought response + /// + private static readonly Regex s_thoughtRegex = + new(@"\[THOUGHT\](?.+)", RegexOptions.Singleline); + + /// + /// Check repeat step function + /// + private readonly KernelFunction _checkRepeatStepFunction; + + /// + /// Check start step function + /// + private readonly KernelFunction _checkStartStepFunction; + + /// + /// ExecuteFlow function + /// + private readonly KernelFunction _executeFlowFunction; + + /// + /// ExecuteStep function + /// + private readonly KernelFunction _executeStepFunction; + + internal FlowExecutor(IKernelBuilder kernelBuilder, IFlowStatusProvider statusProvider, Dictionary globalPluginCollection, FlowOrchestratorConfig? config = null) + { + this._kernelBuilder = kernelBuilder; + this._systemKernel = kernelBuilder.Build(); + + this._logger = this._systemKernel.LoggerFactory.CreateLogger(typeof(FlowExecutor)) ?? NullLogger.Instance; + this._config = config ?? new FlowOrchestratorConfig(); + + this._flowStatusProvider = statusProvider; + this._globalPluginCollection = globalPluginCollection; + + var checkRepeatStepConfig = this.ImportPromptTemplateConfig("CheckRepeatStep"); + this._checkRepeatStepFunction = KernelFunctionFactory.CreateFromPrompt(checkRepeatStepConfig); + + var checkStartStepConfig = this.ImportPromptTemplateConfig("CheckStartStep"); + this._checkStartStepFunction = KernelFunctionFactory.CreateFromPrompt(checkStartStepConfig); + + this._config.ExcludedPlugins.Add(RestrictedPluginName); + this._reActEngine = new ReActEngine(this._systemKernel, this._logger, this._config); + + this._executeFlowFunction = KernelFunctionFactory.CreateFromMethod(this.ExecuteFlowAsync, "ExecuteFlow", "Execute a flow"); + this._executeStepFunction = KernelFunctionFactory.CreateFromMethod(this.ExecuteStepAsync, "ExecuteStep", "Execute a flow step"); + } + + private PromptTemplateConfig ImportPromptTemplateConfig(string functionName) + { + var config = KernelFunctionYaml.ToPromptTemplateConfig(EmbeddedResource.Read($"Plugins.{functionName}.yaml")!); + + // if AIServiceIds is specified, only include the relevant execution settings + if (this._config.AIServiceIds.Count > 0) + { + var serviceIdsToRemove = config.ExecutionSettings.Keys.Except(this._config.AIServiceIds); + foreach (var serviceId in serviceIdsToRemove) + { + config.ExecutionSettings.Remove(serviceId); + } + } + + return config; + } + + public async Task ExecuteFlowAsync(Flow flow, string sessionId, string input, KernelArguments kernelArguments) + { + Verify.NotNull(flow, nameof(flow)); + + if (this._logger.IsEnabled(LogLevel.Information)) + { + this._logger.LogInformation("Executing flow {FlowName} with sessionId={SessionId}.", flow.Name, sessionId); + } + + var sortedSteps = flow.SortSteps(); + + var rootContext = new KernelArguments(kernelArguments); + + // populate persisted state arguments + ExecutionState executionState = await this._flowStatusProvider.GetExecutionStateAsync(sessionId).ConfigureAwait(false); + List outputs = new(); + + while (executionState.CurrentStepIndex < sortedSteps.Count) + { + int stepIndex = executionState.CurrentStepIndex; + FlowStep step = sortedSteps[stepIndex]; + + foreach (var kv in executionState.Variables) + { + rootContext[kv.Key] = kv.Value; + } + + this.ValidateStep(step, rootContext); + + // init step execution state + string stepKey = $"{stepIndex}_{step.Goal}"; + if (!executionState.StepStates.TryGetValue(stepKey, out ExecutionState.StepExecutionState? stepState)) + { + stepState = new ExecutionState.StepExecutionState(); + executionState.StepStates.Add(stepKey, stepState); + } + + var stepId = $"{stepKey}_{stepState.ExecutionCount}"; + + var continueLoop = false; + var completed = step.Provides.All(_ => executionState.Variables.ContainsKey(_)); + if (!completed) + { + // On the first iteration of an Optional or ZeroOrMore step, we need to check whether the user wants to start the step + if (step.CompletionType is CompletionType.Optional or CompletionType.ZeroOrMore && stepState.Status == ExecutionState.Status.NotStarted) + { + RepeatOrStartStepResult? startStep = await this.CheckStartStepAsync(rootContext, step, sessionId, stepId, input).ConfigureAwait(false); + if (startStep is null) + { + // Unknown error, try again + this._logger?.LogWarning("Unexpected error when checking whether to start the step, try again"); + continue; + } + else if (startStep.Execute is null) + { + // Unconfirmed, prompt user + outputs.Add(startStep.Prompt!); + await this._flowStatusProvider.SaveExecutionStateAsync(sessionId, executionState).ConfigureAwait(false); + break; + } + else if (startStep.Execute.Value) + { + stepState.Status = ExecutionState.Status.InProgress; + await this._flowStatusProvider.SaveExecutionStateAsync(sessionId, executionState).ConfigureAwait(false); + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Need to start step {StepIndex} for iteration={Iteration}, goal={StepGoal}.", stepIndex, stepState.ExecutionCount, step.Goal); + } + } + else + { + // User doesn't want to run the step + foreach (var variable in step.Provides) + { + executionState.Variables[variable] = "[]"; + } + + await this.CompleteStepAsync(rootContext, sessionId, executionState, step, stepState).ConfigureAwait(false); + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Completed step {StepIndex} with iteration={Iteration}, goal={StepGoal}.", stepIndex, stepState.ExecutionCount, step.Goal); + } + + continue; + } + } + + // execute step + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation( + "Executing step {StepIndex} for iteration={Iteration}, goal={StepGoal}, input={Input}.", stepIndex, + stepState.ExecutionCount, step.Goal, input); + } + + Kernel stepKernel = this._kernelBuilder.Build(); + var stepArguments = new KernelArguments(); + foreach (var key in step.Requires) + { + stepArguments[key] = rootContext[key]; + } + + foreach (var key in step.Passthrough) + { + if (rootContext.TryGetValue(key, out var val)) + { + stepArguments[key] = val; + } + } + + FunctionResult? stepResult; + if (step is Flow flowStep) + { + stepResult = await this.ExecuteFlowAsync(flowStep, $"{sessionId}_{stepId}", input, stepArguments).ConfigureAwait(false); + } + else + { + var stepPlugins = step.LoadPlugins(stepKernel, this._globalPluginCollection); + foreach (var plugin in stepPlugins) + { + stepKernel.ImportPluginFromObject(plugin, plugin.GetType().Name); + } + + stepResult = await this.ExecuteStepAsync(step, sessionId, stepId, input, stepKernel, stepArguments).ConfigureAwait(false); + } + + if (!string.IsNullOrEmpty(stepResult.ToString()) && (stepResult.IsPromptInput() || stepResult.IsTerminateFlow())) + { + if (stepResult.ValueType == typeof(List)) + { + outputs.AddRange(stepResult.GetValue>()!); + } + else + { + outputs.Add(stepResult.ToString()); + } + } + else if (stepResult.TryGetExitLoopResponse(out string? exitResponse)) + { + stepState.Status = ExecutionState.Status.Completed; + + var metadata = stepResult.Metadata!.ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + foreach (var variable in step.Provides) + { + if (!metadata.ContainsKey(variable)) + { + metadata[variable] = string.Empty; + } + } + + stepResult = new FunctionResult(stepResult.Function, stepResult.GetValue(), metadata: metadata); + + if (!string.IsNullOrWhiteSpace(exitResponse)) + { + outputs.Add(exitResponse!); + } + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Exiting loop for step {StepIndex} with iteration={Iteration}, goal={StepGoal}.", stepIndex, stepState.ExecutionCount, step.Goal); + } + } + else if (stepResult.IsContinueLoop()) + { + continueLoop = true; + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Continuing to the next loop iteration for step {StepIndex} with iteration={Iteration}, goal={StepGoal}.", stepIndex, stepState.ExecutionCount, step.Goal); + } + } + + // check if current execution is complete by checking whether all arguments are already provided + completed = true; + foreach (var variable in step.Provides) + { + if (!stepResult.Metadata!.ContainsKey(variable)) + { + completed = false; + } + else + { + executionState.Variables[variable] = (string)stepResult.Metadata[variable]!; + stepState.AddOrUpdateVariable(stepState.ExecutionCount, variable, (string)stepResult.Metadata[variable]!); + } + } + + foreach (var variable in step.Passthrough) + { + if (stepResult.Metadata!.TryGetValue(variable, out object? variableValue)) + { + executionState.Variables[variable] = (string)variableValue!; + stepState.AddOrUpdateVariable(stepState.ExecutionCount, variable, (string)variableValue!); + + // propagate arguments to root context, needed if Flow itself is a step + this.PropagateVariable(rootContext, stepResult, variable); + } + } + + // propagate arguments to root context, needed if Flow itself is a step + foreach (var variable in Constants.ChatPluginVariables.ControlVariables) + { + this.PropagateVariable(rootContext, stepResult, variable); + } + } + + if (completed) + { + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Completed step {StepIndex} for iteration={Iteration}, goal={StepGoal}.", stepIndex, stepState.ExecutionCount, step.Goal); + } + + if (step.CompletionType is CompletionType.AtLeastOnce or CompletionType.ZeroOrMore && stepState.Status != ExecutionState.Status.Completed) + { + var nextStepId = $"{stepKey}_{stepState.ExecutionCount + 1}"; + var repeatStep = continueLoop + ? new RepeatOrStartStepResult(true, null) + : await this.CheckRepeatStepAsync(rootContext, step, sessionId, nextStepId, input).ConfigureAwait(false); + + if (repeatStep is null) + { + // unknown error, try again + this._logger?.LogWarning("Unexpected error when checking whether to repeat the step, try again"); + } + else if (repeatStep.Execute is null) + { + // unconfirmed, prompt user + outputs.Add(repeatStep.Prompt!); + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Unclear intention, need follow up to check whether to repeat the step"); + } + + await this._flowStatusProvider.SaveExecutionStateAsync(sessionId, executionState).ConfigureAwait(false); + break; + } + else if (repeatStep.Execute.Value) + { + // need repeat the step again + foreach (var variable in step.Provides) + { + executionState.Variables.Remove(variable); + } + + stepState.ExecutionCount++; + await this._flowStatusProvider.SaveExecutionStateAsync(sessionId, executionState).ConfigureAwait(false); + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Need repeat step {StepIndex} for iteration={Iteration}, goal={StepGoal}.", stepIndex, stepState.ExecutionCount, step.Goal); + } + } + else + { + // completed + await this.CompleteStepAsync(rootContext, sessionId, executionState, step, stepState).ConfigureAwait(false); + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Completed step {StepIndex} with iteration={Iteration}, goal={StepGoal}.", stepIndex, stepState.ExecutionCount, step.Goal); + } + } + } + else + { + await this.CompleteStepAsync(rootContext, sessionId, executionState, step, stepState).ConfigureAwait(false); + } + } + else + { + await this._flowStatusProvider.SaveExecutionStateAsync(sessionId, executionState).ConfigureAwait(false); + break; + } + } + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + foreach (var output in outputs) + { + this._logger?.LogInformation("[Output] {Output}", output); + } + } + + return new FunctionResult(this._executeFlowFunction, outputs, metadata: rootContext); + } + + private void PropagateVariable(KernelArguments rootContext, FunctionResult stepResult, string variableName) + { + if (stepResult.Metadata!.ContainsKey(variableName)) + { + rootContext[variableName] = stepResult.Metadata[variableName]; + } + } + + private async Task CompleteStepAsync(KernelArguments context, string sessionId, ExecutionState state, FlowStep step, ExecutionState.StepExecutionState stepState) + { + stepState.Status = ExecutionState.Status.Completed; + state.CurrentStepIndex++; + + foreach (var kvp in stepState.Output) + { + if (step.CompletionType == CompletionType.Once) + { + state.Variables[kvp.Key] = kvp.Value.Single(); + } + else + { + // kvp.Value may contain empty strings when the loop was exited and the arguments the step provides weren't set + state.Variables[kvp.Key] = JsonSerializer.Serialize(kvp.Value.Where(x => !string.IsNullOrWhiteSpace(x)).ToList()); + } + } + + foreach (var variable in step.Provides) + { + context[variable] = state.Variables[variable]; + } + + await this._flowStatusProvider.SaveExecutionStateAsync(sessionId, state).ConfigureAwait(false); + } + + private void ValidateStep(FlowStep step, KernelArguments context) + { + if (step.Requires.Any(p => !context.ContainsName(p))) + { + throw new KernelException($"Step {step.Goal} requires arguments {string.Join(",", step.Requires.Where(p => !context.ContainsName(p)))} that are not provided. "); + } + } + + private async Task CheckStartStepAsync(KernelArguments context, FlowStep step, string sessionId, string stepId, string input) + { + context = new KernelArguments(context) + { + ["goal"] = step.Goal, + ["message"] = step.StartingMessage + }; + return await this.CheckRepeatOrStartStepAsync(context, this._checkStartStepFunction, sessionId, $"{stepId}_CheckStartStep", input).ConfigureAwait(false); + } + + private async Task CheckRepeatStepAsync(KernelArguments context, FlowStep step, string sessionId, string nextStepId, string input) + { + context = new KernelArguments(context) + { + ["goal"] = step.Goal, + ["transitionMessage"] = step.TransitionMessage + }; + return await this.CheckRepeatOrStartStepAsync(context, this._checkRepeatStepFunction, sessionId, $"{nextStepId}_CheckRepeatStep", input).ConfigureAwait(false); + } + + private async Task CheckRepeatOrStartStepAsync(KernelArguments context, KernelFunction function, string sessionId, string checkRepeatOrStartStepId, string input) + { + var chatHistory = await this._flowStatusProvider.GetChatHistoryAsync(sessionId, checkRepeatOrStartStepId).ConfigureAwait(false); + if (chatHistory != null) + { + chatHistory.AddUserMessage(input); + } + else + { + chatHistory = new ChatHistory(); + } + + var scratchPad = this.CreateRepeatOrStartStepScratchPad(chatHistory); + context["agentScratchPad"] = scratchPad; + + if (this._logger.IsEnabled(LogLevel.Information)) + { + this._logger.LogInformation("Scratchpad: {ScratchPad}", scratchPad); + } + + var llmResponse = await this._systemKernel.InvokeAsync(function, context).ConfigureAwait(false); + + string llmResponseText = llmResponse.GetValue()?.Trim() ?? string.Empty; + + if (this._logger.IsEnabled(LogLevel.Information)) + { + this._logger.LogInformation("Response from {Function} : {ActionText}", "CheckRepeatOrStartStep", llmResponseText); + } + + Match finalAnswerMatch = s_finalAnswerRegex.Match(llmResponseText); + if (finalAnswerMatch.Success) + { + string resultString = finalAnswerMatch.Groups[1].Value.Trim(); + if (bool.TryParse(resultString, out bool result)) + { + await this._flowStatusProvider.SaveChatHistoryAsync(sessionId, checkRepeatOrStartStepId, chatHistory).ConfigureAwait(false); + return new RepeatOrStartStepResult(result); + } + } + + // Extract thought + Match thoughtMatch = s_thoughtRegex.Match(llmResponseText); + if (thoughtMatch.Success) + { + string thoughtString = thoughtMatch.Groups[1].Value.Trim(); + chatHistory.AddSystemMessage(thoughtString); + } + + Match questionMatch = s_questionRegex.Match(llmResponseText); + if (questionMatch.Success) + { + string prompt = questionMatch.Groups[1].Value.Trim(); + chatHistory.AddAssistantMessage(prompt); + await this._flowStatusProvider.SaveChatHistoryAsync(sessionId, checkRepeatOrStartStepId, chatHistory).ConfigureAwait(false); + + return new RepeatOrStartStepResult(null, prompt); + } + + this._logger.LogWarning("Missing result tag from {Function} : {ActionText}", "CheckRepeatOrStartStep", llmResponseText); + chatHistory.AddSystemMessage(llmResponseText + "\nI should provide either [QUESTION] or [FINAL_ANSWER]."); + await this._flowStatusProvider.SaveChatHistoryAsync(sessionId, checkRepeatOrStartStepId, chatHistory).ConfigureAwait(false); + return null; + } + + private string CreateRepeatOrStartStepScratchPad(ChatHistory chatHistory) + { + var scratchPadLines = new List(); + foreach (var message in chatHistory) + { + if (message.Role == AuthorRole.Assistant) + { + scratchPadLines.Add("[QUESTION]"); + } + else if (message.Role == AuthorRole.User) + { + scratchPadLines.Add("[RESPONSE]"); + } + else if (message.Role == AuthorRole.System) + { + scratchPadLines.Add("[THOUGHT]"); + } + + scratchPadLines.Add(message.Content!); + } + + return string.Join("\n", scratchPadLines).Trim(); + } + + private async Task ExecuteStepAsync(FlowStep step, string sessionId, string stepId, string input, Kernel kernel, KernelArguments arguments) + { + var stepsTaken = await this._flowStatusProvider.GetReActStepsAsync(sessionId, stepId).ConfigureAwait(false); + var lastStep = stepsTaken.LastOrDefault(); + if (lastStep != null) + { + lastStep.Observation += $"{AuthorRole.User.Label}: {input}\n"; + await this._flowStatusProvider.SaveReActStepsAsync(sessionId, stepId, stepsTaken).ConfigureAwait(false); + } + + var question = step.Goal; + foreach (var variable in step.Requires) + { + if (!variable.StartsWith("_", StringComparison.InvariantCulture) && ((string)arguments[variable]!).Length <= this._config.MaxVariableLength) + { + question += $"\n - {variable}: {JsonSerializer.Serialize(arguments[variable])}"; + } + } + + for (int i = stepsTaken.Count; i < this._config.MaxStepIterations; i++) + { + var actionStep = await this._reActEngine.GetNextStepAsync(kernel, arguments, question, stepsTaken).ConfigureAwait(false); + + if (actionStep is null) + { + this._logger?.LogWarning("Failed to get action step given input=\"{Input}\"", input); + continue; + } + + stepsTaken.Add(actionStep); + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Thought: {Thought}", actionStep.Thought); + } + + if (!string.IsNullOrEmpty(actionStep.FinalAnswer)) + { + if (step.Provides.Count() == 1) + { + arguments[step.Provides.Single()] = actionStep.FinalAnswer; + return new FunctionResult(this._executeStepFunction, actionStep.FinalAnswer, metadata: arguments); + } + } + else if (!string.IsNullOrEmpty(actionStep.Action!)) + { + if (actionStep.Action!.Contains(Constants.StopAndPromptFunctionName)) + { + string prompt = actionStep.ActionVariables![Constants.StopAndPromptParameterName]; + arguments.TerminateFlow(); + + return new FunctionResult(this._executeStepFunction, prompt, metadata: arguments); + } + + var actionContextVariables = new KernelArguments(); + foreach (var kvp in arguments) + { + if (step.Requires.Contains(kvp.Key) || step.Passthrough.Contains(kvp.Key)) + { + actionContextVariables[kvp.Key] = kvp.Value; + } + } + + // get chat history + var chatHistory = await this._flowStatusProvider.GetChatHistoryAsync(sessionId, stepId).ConfigureAwait(false); + if (chatHistory is null) + { + chatHistory = new ChatHistory(); + } + else + { + chatHistory.AddUserMessage(input); + } + + string? actionResult; + try + { + await Task.Delay(this._config.MinIterationTimeMs).ConfigureAwait(false); + actionResult = await this._reActEngine.InvokeActionAsync(actionStep, input, chatHistory, kernel, actionContextVariables).ConfigureAwait(false); + + if (string.IsNullOrEmpty(actionResult)) + { + actionStep.Observation = "Got no result from action"; + } + else + { + actionStep.Observation = $"{AuthorRole.Assistant.Label}: {actionResult}\n"; + chatHistory.AddAssistantMessage(actionResult); + await this._flowStatusProvider.SaveChatHistoryAsync(sessionId, stepId, chatHistory).ConfigureAwait(false); + + foreach (var passthroughParam in step.Passthrough) + { + if (actionContextVariables.TryGetValue(passthroughParam, out object? paramValue) && paramValue is string paramStringValue && !string.IsNullOrEmpty(paramStringValue)) + { + arguments[passthroughParam] = actionContextVariables[passthroughParam]; + } + } + + foreach (var providedParam in step.Provides) + { + if (actionContextVariables.TryGetValue(providedParam, out object? paramValue) && paramValue is string paramStringValue && !string.IsNullOrEmpty(paramStringValue)) + { + arguments[providedParam] = actionContextVariables[providedParam]; + } + } + + foreach (var variable in Constants.ChatPluginVariables.ControlVariables) + { + if (actionContextVariables.TryGetValue(variable, out object? variableValue)) + { + arguments[variable] = variableValue; + } + } + } + } + catch (MissingMethodException ex) + { + actionStep.Observation = $"Error invoking action {actionStep.Action} : {ex.Message}. " + + "Use only the available functions listed in the [AVAILABLE FUNCTIONS] section. " + + "Do not attempt to use any other functions that are not specified.\n"; + + continue; + } + catch (Exception ex) when (!ex.IsNonRetryable()) + { + actionStep.Observation = $"Error invoking action {actionStep.Action} : {ex.Message}"; + this._logger?.LogWarning(ex, "Error invoking action {Action}", actionStep.Action); + + continue; + } + + if (this._logger?.IsEnabled(LogLevel.Information) ?? false) + { + this._logger.LogInformation("Observation: {Observation}", actionStep.Observation); + } + + await this._flowStatusProvider.SaveReActStepsAsync(sessionId, stepId, stepsTaken).ConfigureAwait(false); + + if (!string.IsNullOrEmpty(actionResult)) + { + if (arguments.IsTerminateFlow()) + { + // Terminate the flow without another round of reasoning, to save the LLM reasoning calls. + // This is not suggested unless plugin has performance requirement and has explicitly set the control variable. + return new FunctionResult(this._executeStepFunction, actionResult, metadata: arguments); + } + + foreach (var variable in Constants.ChatPluginVariables.ControlVariables) + { + if (arguments.ContainsName(variable)) + { + // redirect control to client + return new FunctionResult(this._executeStepFunction, actionResult, metadata: arguments); + } + } + + if (!step.Provides.Except(arguments.Where(v => !string.IsNullOrEmpty((string)v.Value!)).Select(_ => _.Key)).Any()) + { + // step is complete + return new FunctionResult(this._executeStepFunction, actionResult, metadata: arguments); + } + + // continue to next iteration + continue; + } + + this._logger?.LogWarning("Action: No result from action"); + } + else + { + actionStep.Observation = "ACTION $JSON_BLOB must be provided as part of thought process."; + this._logger?.LogWarning("Action: No action to take"); + } + + // continue to next iteration + await Task.Delay(this._config.MinIterationTimeMs).ConfigureAwait(false); + } + + throw new KernelException($"Failed to complete step {stepId} for session {sessionId}."); + } + + private class RepeatOrStartStepResult + { + public RepeatOrStartStepResult(bool? execute, string? prompt = null) + { + this.Prompt = prompt; + this.Execute = execute; + } + + public bool? Execute { get; } + + public string? Prompt { get; } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Execution/FlowStatusProvider.cs b/dotnet/src/Experimental/Orchestration.Flow/Execution/FlowStatusProvider.cs new file mode 100644 index 000000000000..74e0b2527ced --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Execution/FlowStatusProvider.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Default flow status provider implemented on top of +/// +public sealed class FlowStatusProvider : IFlowStatusProvider +{ + private readonly IMemoryStore _memoryStore; + + private readonly string _collectionName; + + /// + /// Initializes a new instance of the class. + /// + public static async Task ConnectAsync(IMemoryStore memoryStore, string? collectionName = null) + { + var provider = new FlowStatusProvider(memoryStore, collectionName); + return await InitializeProviderStoreAsync(provider).ConfigureAwait(false); + } + + /// + /// Initializes a new instance of the class. + /// + /// instance + /// Collection name in instance + private FlowStatusProvider(IMemoryStore memoryStore, string? collectionName = null) + { + this._memoryStore = memoryStore; + this._collectionName = collectionName ?? nameof(FlowStatusProvider); + } + + /// + public async Task GetExecutionStateAsync(string sessionId) + { + var result = await (this._memoryStore.GetAsync(this._collectionName, this.GetExecutionStateStorageKey(sessionId))).ConfigureAwait(false); + var text = result?.Metadata.Text ?? string.Empty; + + if (!string.IsNullOrEmpty(text)) + { + try + { + return JsonSerializer.Deserialize(text) ?? new ExecutionState(); + } + catch + { + throw new InvalidOperationException( + $"Failed to deserialize execution state for sessionId={sessionId}, data={text}"); + } + } + else + { + return new ExecutionState(); + } + } + + /// + public async Task SaveExecutionStateAsync(string sessionId, ExecutionState state) + { + var json = JsonSerializer.Serialize(state); + await this._memoryStore.UpsertAsync(this._collectionName, this.CreateMemoryRecord(this.GetExecutionStateStorageKey(sessionId), json)) + .ConfigureAwait(false); + } + + private string GetExecutionStateStorageKey(string sessionId) + { + return $"FlowStatus_{sessionId}"; + } + + /// + public async Task GetChatHistoryAsync(string sessionId, string stepId) + { + var result = await this._memoryStore.GetAsync(this._collectionName, this.GetChatHistoryStorageKey(sessionId, stepId)).ConfigureAwait(false); + var text = result?.Metadata.Text ?? string.Empty; + + if (!string.IsNullOrEmpty(text)) + { + try + { + return ChatHistorySerializer.Deserialize(text); + } + catch + { + throw new InvalidOperationException( + $"Failed to deserialize chat history for session {sessionId}, data={text}"); + } + } + else + { + return null; + } + } + + /// + public async Task SaveChatHistoryAsync(string sessionId, string stepId, ChatHistory history) + { + var json = ChatHistorySerializer.Serialize(history); + await this._memoryStore.UpsertAsync(this._collectionName, this.CreateMemoryRecord(this.GetChatHistoryStorageKey(sessionId, stepId), json)) + .ConfigureAwait(false); + } + + private string GetChatHistoryStorageKey(string sessionId, string stepId) + { + return $"ChatHistory_{sessionId}_{stepId}"; + } + + /// + public async Task> GetReActStepsAsync(string sessionId, string stepId) + { + var result = await this._memoryStore.GetAsync(this._collectionName, this.GetStepsStorageKey(sessionId, stepId)).ConfigureAwait(false); + var text = result?.Metadata.Text ?? string.Empty; + + if (!string.IsNullOrEmpty(text)) + { + try + { + return JsonSerializer.Deserialize>(text) ?? new List(); + } + catch + { + throw new InvalidOperationException( + $"Failed to deserialize steps for session {sessionId}, data={text}"); + } + } + + return new List(); + } + + /// + public async Task SaveReActStepsAsync(string sessionId, string stepId, List steps) + { + var json = JsonSerializer.Serialize(steps); + await this._memoryStore.UpsertAsync(this._collectionName, this.CreateMemoryRecord(this.GetStepsStorageKey(sessionId, stepId), json)) + .ConfigureAwait(false); + } + + private static async Task InitializeProviderStoreAsync(FlowStatusProvider flowProvider, CancellationToken cancellationToken = default) + { + if (!await flowProvider._memoryStore.DoesCollectionExistAsync(flowProvider._collectionName, cancellationToken).ConfigureAwait(false)) + { + await flowProvider._memoryStore.CreateCollectionAsync(flowProvider._collectionName, cancellationToken).ConfigureAwait(false); + } + + return flowProvider; + } + + private string GetStepsStorageKey(string sessionId, string stepId) + { + return $"Steps_{sessionId}_{stepId}"; + } + + private MemoryRecord CreateMemoryRecord(string key, string text) + { + return MemoryRecord.LocalRecord(key, text, null, ReadOnlyMemory.Empty); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Execution/ReActEngine.cs b/dotnet/src/Experimental/Orchestration.Flow/Execution/ReActEngine.cs new file mode 100644 index 000000000000..6409ab0144d1 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Execution/ReActEngine.cs @@ -0,0 +1,426 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Orchestration.Extensions; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +/// +/// Chat ReAct Engine +/// +internal sealed class ReActEngine +{ + /// + /// The logger + /// + private readonly ILogger _logger; + + /// + /// Re-Act function for flow execution + /// + private readonly KernelFunction _reActFunction; + + /// + /// The flow planner config + /// + private readonly FlowOrchestratorConfig _config; + + /// + /// The goal to use when creating semantic functions that are restricted from flow creation + /// + private const string RestrictedPluginName = "ReActEngine_Excluded"; + + /// + /// The Action tag + /// + private const string Action = "[ACTION]"; + + /// + /// The Thought tag + /// + private const string Thought = "[THOUGHT]"; + + /// + /// The Observation tag + /// + private const string Observation = "[OBSERVATION]"; + + /// + /// The prefix used for the scratch pad + /// + private const string ScratchPadPrefix = + "This was my previous work (but they haven't seen any of it! They only see what I return as final answer):"; + + /// + /// The regex for parsing the action response + /// + private static readonly Regex s_actionRegex = + new(@"(?<=\[ACTION\])[^{}]*(\{.*?\})(?=\n\[)", RegexOptions.Singleline); + + /// + /// The regex for parsing the final action response + /// + private static readonly Regex s_finalActionRegex = + new(@"\[FINAL.+\][^{}]*({(?:[^{}]*{[^{}]*})*[^{}]*})", RegexOptions.Singleline); + + /// + /// The regex for parsing the thought response + /// + private static readonly Regex s_thoughtRegex = + new(@"(\[THOUGHT\])?(?.+?)(?=\[ACTION\]|$)", RegexOptions.Singleline); + + /// + /// The regex for parsing the final answer response + /// + private static readonly Regex s_finalAnswerRegex = + new(@"\[FINAL.+\](?.+)", RegexOptions.Singleline); + + internal ReActEngine(Kernel systemKernel, ILogger logger, FlowOrchestratorConfig config) + { + this._logger = logger; + + this._config = config; + this._config.ExcludedPlugins.Add(RestrictedPluginName); + + var modelId = config.AIRequestSettings?.ModelId; + var promptConfig = config.ReActPromptTemplateConfig; + if (promptConfig is null) + { + string promptConfigString = EmbeddedResource.Read("Plugins.ReActEngine.yaml")!; + if (!string.IsNullOrEmpty(modelId)) + { + var modelConfigString = EmbeddedResource.Read($"Plugins.ReActEngine.{modelId}.yaml", false); + promptConfigString = string.IsNullOrEmpty(modelConfigString) ? promptConfigString : modelConfigString!; + } + + promptConfig = KernelFunctionYaml.ToPromptTemplateConfig(promptConfigString); + + if (!string.IsNullOrEmpty(modelId)) + { + var modelConfigString = EmbeddedResource.Read($"Plugins.ReActEngine.{modelId}.yaml", false); + promptConfigString = string.IsNullOrEmpty(modelConfigString) ? promptConfigString : modelConfigString!; + } + } + + this._reActFunction = systemKernel.CreateFunctionFromPrompt(promptConfig); + } + + internal async Task GetNextStepAsync(Kernel kernel, KernelArguments arguments, string question, List previousSteps) + { + arguments["question"] = question; + var scratchPad = this.CreateScratchPad(previousSteps); + arguments["agentScratchPad"] = scratchPad; + + var availableFunctions = this.GetAvailableFunctions(kernel).ToArray(); + if (availableFunctions.Length == 1) + { + var firstActionFunction = availableFunctions.First(); + if (firstActionFunction.Parameters.Count == 0) + { + var action = $"{firstActionFunction.PluginName}.{firstActionFunction.Name}"; + + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger?.LogDebug("Auto selecting {Action} as it is the only function available and it has no parameters.", action); + } + + return new ReActStep + { + Action = action + }; + } + } + + var functionDesc = this.GetFunctionDescriptions(availableFunctions); + arguments["functionDescriptions"] = functionDesc; + + if (this._logger.IsEnabled(LogLevel.Information)) + { + this._logger?.LogInformation("question: {Question}", question); + this._logger?.LogInformation("functionDescriptions: {FunctionDescriptions}", functionDesc); + this._logger?.LogInformation("Scratchpad: {ScratchPad}", scratchPad); + } + + var llmResponse = await this._reActFunction.InvokeAsync(kernel, arguments).ConfigureAwait(false); + + string llmResponseText = llmResponse.GetValue()!.Trim(); + + if (this._logger?.IsEnabled(LogLevel.Debug) ?? false) + { + this._logger?.LogDebug("Response : {ActionText}", llmResponseText); + } + + var actionStep = this.ParseResult(llmResponseText); + + if (!string.IsNullOrEmpty(actionStep.Action) || previousSteps.Count == 0 || !string.IsNullOrEmpty(actionStep.FinalAnswer)) + { + return actionStep; + } + + actionStep.Thought = llmResponseText; + actionStep.Observation = "Failed to parse valid action step, missing action or final answer."; + this._logger?.LogWarning("Failed to parse valid action step from llm response={LLMResponseText}", llmResponseText); + this._logger?.LogWarning("Scratchpad={ScratchPad}", scratchPad); + return actionStep; + } + + internal async Task InvokeActionAsync(ReActStep actionStep, string chatInput, ChatHistory chatHistory, Kernel kernel, KernelArguments contextVariables) + { + var variables = actionStep.ActionVariables ?? new Dictionary(); + + variables[Constants.ActionVariableNames.ChatInput] = chatInput; + variables[Constants.ActionVariableNames.ChatHistory] = ChatHistorySerializer.Serialize(chatHistory); + + if (this._logger.IsEnabled(LogLevel.Information)) + { + this._logger?.LogInformation("Action: {Action}({ActionVariables})", actionStep.Action, JsonSerializer.Serialize(variables)); + } + + var availableFunctions = this.GetAvailableFunctions(kernel); + var targetFunction = availableFunctions.FirstOrDefault(f => ToFullyQualifiedName(f) == actionStep.Action) ?? throw new MissingMethodException($"The function '{actionStep.Action}' was not found."); + var function = kernel.Plugins.GetFunction(targetFunction.PluginName, targetFunction.Name); + var functionView = function.Metadata; + + var actionContextVariables = this.CreateActionKernelArguments(variables, contextVariables); + + foreach (var parameter in functionView.Parameters) + { + if (!actionContextVariables.ContainsName(parameter.Name)) + { + actionContextVariables[parameter.Name] = parameter.DefaultValue ?? string.Empty; + } + } + + try + { + var result = await function.InvokeAsync(kernel, actionContextVariables).ConfigureAwait(false); + + foreach (var variable in actionContextVariables) + { + contextVariables[variable.Key] = variable.Value; + } + + if (this._logger?.IsEnabled(LogLevel.Debug) ?? false) + { + this._logger?.LogDebug("Invoked {FunctionName}. Result: {Result}", targetFunction.Name, result.GetValue()); + } + + return result.GetValue() ?? string.Empty; + } + catch (Exception e) when (!e.IsNonRetryable()) + { + this._logger?.LogError(e, "Something went wrong in action step: {0}.{1}. Error: {2}", targetFunction.PluginName, targetFunction.Name, e.Message); + return $"Something went wrong in action step: {targetFunction.PluginName}.{targetFunction.Name}. Error: {e.Message} {e.InnerException?.Message}"; + } + } + + private KernelArguments CreateActionKernelArguments(Dictionary actionVariables, KernelArguments context) + { + var actionContext = new KernelArguments(context); + + foreach (var kvp in actionVariables) + { + actionContext[kvp.Key] = kvp.Value; + } + + return actionContext; + } + + private string CreateScratchPad(List stepsTaken) + { + if (stepsTaken.Count == 0) + { + return string.Empty; + } + + var scratchPadLines = new List + { + // Add the original first thought + ScratchPadPrefix, + $"{Thought} {stepsTaken[0].Thought}" + }; + + // Keep track of where to insert the next step + var insertPoint = scratchPadLines.Count; + + // Keep the most recent steps in the scratch pad. + for (var i = stepsTaken.Count - 1; i >= 0; i--) + { + if (scratchPadLines.Count / 4.0 > (this._config.MaxTokens * 0.75)) + { + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Scratchpad is too long, truncating. Skipping {CountSkipped} steps.", i + 1); + } + + break; + } + + var s = stepsTaken[i]; + + if (!string.IsNullOrEmpty(s.Observation)) + { + scratchPadLines.Insert(insertPoint, $"{Observation} \n{s.Observation}"); + } + + if (!string.IsNullOrEmpty(s.Action)) + { + // ignore the built-in context variables + var variablesToPrint = s.ActionVariables?.Where(v => !Constants.ActionVariableNames.All.Contains(v.Key)).ToDictionary(_ => _.Key, _ => _.Value); + scratchPadLines.Insert(insertPoint, $"{Action} {{\"action\": \"{s.Action}\",\"action_variables\": {JsonSerializer.Serialize(variablesToPrint)}}}"); + } + + if (i != 0) + { + scratchPadLines.Insert(insertPoint, $"{Thought} {s.Thought}"); + } + } + + return string.Join("\n", scratchPadLines).Trim(); + } + + private ReActStep ParseResult(string input) + { + var result = new ReActStep + { + OriginalResponse = input + }; + + // Extract final answer + Match finalAnswerMatch = s_finalAnswerRegex.Match(input); + + if (finalAnswerMatch.Success) + { + result.FinalAnswer = finalAnswerMatch.Groups[1].Value.Trim(); + } + + // Extract thought + Match thoughtMatch = s_thoughtRegex.Match(input); + + if (thoughtMatch.Success) + { + result.Thought = thoughtMatch.Value.Trim(); + } + else if (!input.Contains(Action)) + { + result.Thought = input; + } + else + { + throw new InvalidOperationException("Unexpected input format"); + } + + result.Thought = result.Thought.Replace(Thought, string.Empty).Trim(); + + // Extract action + string actionStepJson = input; + Match actionMatch = s_actionRegex.Match(input + "\n["); + if (actionMatch.Success) + { + actionStepJson = actionMatch.Groups[1].Value.Trim(); + } + else + { + Match finalActionMatch = s_finalActionRegex.Match(input); + if (finalActionMatch.Success) + { + actionStepJson = finalActionMatch.Groups[1].Value.Trim(); + } + } + + try + { + var reActStep = JsonSerializer.Deserialize(actionStepJson); + if (reActStep is null) + { + result.Observation = $"Action step parsing error, empty JSON: {actionStepJson}"; + } + else + { + result.Action = reActStep.Action; + result.ActionVariables = reActStep.ActionVariables; + } + } + catch (JsonException) + { + result.Observation = $"Action step parsing error, invalid JSON: {actionStepJson}"; + } + + if (string.IsNullOrEmpty(result.Thought) && string.IsNullOrEmpty(result.Action)) + { + result.Observation = "Action step error, no thought or action found. Please give a valid thought and/or action."; + } + + return result; + } + + private string GetFunctionDescriptions(KernelFunctionMetadata[] functions) + { + return string.Join("\n", functions.Select(ToManualString)); + } + + private IEnumerable GetAvailableFunctions(Kernel kernel) + { + var functionViews = kernel.Plugins.GetFunctionsMetadata(); + + var excludedPlugins = this._config.ExcludedPlugins ?? new HashSet(); + var excludedFunctions = this._config.ExcludedFunctions ?? new HashSet(); + + var availableFunctions = + functionViews + .Where(s => !excludedPlugins.Contains(s.PluginName!) && !excludedFunctions.Contains(s.Name)) + .OrderBy(x => x.PluginName) + .ThenBy(x => x.Name); + + return this._config.EnableAutoTermination + ? availableFunctions.Append(GetStopAndPromptUserFunction()) + : availableFunctions; + } + + private static KernelFunctionMetadata GetStopAndPromptUserFunction() + { + KernelParameterMetadata promptParameter = new(Constants.StopAndPromptParameterName) + { + Description = "The message to be shown to the user.", + ParameterType = typeof(string), + Schema = KernelJsonSchema.Parse("{\"type\":\"string\"}"), + }; + + return new KernelFunctionMetadata(Constants.StopAndPromptFunctionName) + { + PluginName = "_REACT_ENGINE_", + Description = "Terminate the session, only used when previous attempts failed with FATAL error and need notify user", + Parameters = new[] { promptParameter } + }; + } + + private static string ToManualString(KernelFunctionMetadata function) + { + var inputs = string.Join("\n", function.Parameters.Select(parameter => + { + var defaultValueString = parameter.DefaultValue is not string value || string.IsNullOrEmpty(value) ? string.Empty : $"(default='{parameter.DefaultValue}')"; + return $" - {parameter.Name}: {parameter.Description} {defaultValueString}"; + })); + + var functionDescription = function.Description.Trim(); + + if (string.IsNullOrEmpty(inputs)) + { + return $"{ToFullyQualifiedName(function)}: {functionDescription}\n"; + } + + return $"{ToFullyQualifiedName(function)}: {functionDescription}\n{inputs}\n"; + } + + private static string ToFullyQualifiedName(KernelFunctionMetadata function) + { + return $"{function.PluginName}.{function.Name}"; + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Execution/ReActStep.cs b/dotnet/src/Experimental/Orchestration.Flow/Execution/ReActStep.cs new file mode 100644 index 000000000000..0e39452c7e77 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Execution/ReActStep.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +/// +/// An ReAct (Reasoning-Action-Observation) step in flow execution. +/// +/// +/// https://arxiv.org/pdf/2210.03629.pdf +/// +public class ReActStep +{ + /// + /// Gets or sets the step number. + /// + [JsonPropertyName("thought")] + public string? Thought { get; set; } + + /// + /// Gets or sets the action of the step + /// + [JsonPropertyName("action")] + public string? Action { get; set; } + + /// + /// Gets or sets the variables for the action + /// + [JsonPropertyName("action_variables")] + public Dictionary? ActionVariables { get; set; } + + /// + /// Gets or sets the output of the action + /// + [JsonPropertyName("observation")] + public string? Observation { get; set; } + + /// + /// Gets or sets the output of the system + /// + [JsonPropertyName("final_answer")] + public string? FinalAnswer { get; set; } + + /// + /// The raw response from the action + /// + [JsonPropertyName("original_response")] + public string? OriginalResponse { get; set; } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Experimental.Orchestration.Flow.csproj b/dotnet/src/Experimental/Orchestration.Flow/Experimental.Orchestration.Flow.csproj new file mode 100644 index 000000000000..69d38b2ec362 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Experimental.Orchestration.Flow.csproj @@ -0,0 +1,30 @@ + + + + Microsoft.SemanticKernel.Experimental.Orchestration.Flow + Microsoft.SemanticKernel.Experimental.Orchestration + netstandard2.0 + Latest + + + + + + Semantic Kernel - Flow Orchestrator + Semantic Kernel Flow Orchestrator + + + + + Always + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Orchestration.Flow/Extensions/ExceptionExtensions.cs b/dotnet/src/Experimental/Orchestration.Flow/Extensions/ExceptionExtensions.cs new file mode 100644 index 000000000000..b15e77591299 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Extensions/ExceptionExtensions.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration.Extensions; + +internal static class ExceptionExtensions +{ + internal static bool IsNonRetryable(this Exception ex) + { + bool isContentFilterException = ex is HttpOperationException + { + StatusCode: HttpStatusCode.BadRequest, InnerException: { } + } hoe && hoe.InnerException.Message.Contains("content_filter"); + + return isContentFilterException || ex.IsCriticalException(); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Extensions/FlowExtensions.cs b/dotnet/src/Experimental/Orchestration.Flow/Extensions/FlowExtensions.cs new file mode 100644 index 000000000000..411a61cd57f2 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Extensions/FlowExtensions.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Extension methods for . +/// +public static class FlowExtensions +{ + internal static List SortSteps(this Flow flow) + { + var sortedSteps = new List(); + var remainingSteps = new List(flow.Steps); + + while (remainingSteps.Count > 0) + { + var independentStep = remainingSteps.FirstOrDefault(step => !remainingSteps.Any(step.DependsOn)); + + if (independentStep is null) + { + throw new KernelException("The plan contains circular dependencies."); + } + + sortedSteps.Add(independentStep); + remainingSteps.Remove(independentStep); + } + + return sortedSteps; + } + + /// + /// Hydrate the reference steps in the flow. + /// + /// the flow + /// the flow repository + /// The flow with hydrated steps + /// if referenced flow cannot be found in the repository + public static async Task BuildReferenceAsync(this Flow flow, IFlowCatalog flowRepository) + { + var referenceSteps = flow.Steps.OfType().ToList(); + + foreach (var step in referenceSteps) + { + flow.Steps.Remove(step); + var referencedFlow = await flowRepository.GetFlowAsync(step.FlowName).ConfigureAwait(false); + if (referencedFlow is null) + { + throw new ArgumentException($"Referenced flow {step.FlowName} is not found"); + } + + referencedFlow.CompletionType = step.CompletionType; + referencedFlow.AddPassthrough(step.Passthrough.ToArray()); + referencedFlow.StartingMessage = step.StartingMessage; + referencedFlow.TransitionMessage = step.TransitionMessage; + + foreach (var referencedFlowStep in referencedFlow.Steps) + { + referencedFlowStep.AddPassthrough(step.Passthrough.ToArray(), isReferencedFlow: true); + } + + flow.Steps.Add(referencedFlow); + } + + return flow; + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Extensions/FunctionResultExtensions.cs b/dotnet/src/Experimental/Orchestration.Flow/Extensions/FunctionResultExtensions.cs new file mode 100644 index 000000000000..0fff5b1fabd1 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Extensions/FunctionResultExtensions.cs @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Extension methods for +/// +// ReSharper disable once InconsistentNaming +public static class FunctionResultExtensions +{ + /// + /// Check if we should prompt user for input based on function result. + /// + /// Function result. + internal static bool IsPromptInput(this FunctionResult result) + { + return result.Metadata!.TryGetValue(Constants.ChatPluginVariables.PromptInputName, out object? promptInput) + && promptInput is Constants.ChatPluginVariables.DefaultValue; + } + + /// + /// Check if we should force the next iteration loop based on function result. + /// + /// Function result. + internal static bool IsContinueLoop(this FunctionResult result) + { + return result.Metadata!.TryGetValue(Constants.ChatPluginVariables.ContinueLoopName, out object? continueLoop) + && continueLoop is Constants.ChatPluginVariables.DefaultValue; + } + + /// + /// Check if we should exit the loop based on function result. + /// + /// Function result. + /// The response to exit loop + internal static bool TryGetExitLoopResponse(this FunctionResult result, out string? response) + { + if (result.Metadata!.TryGetValue(Constants.ChatPluginVariables.ExitLoopName, out object? exitLoop) + && exitLoop is string exitLoopResponse) + { + response = exitLoopResponse; + return true; + } + + response = null; + return false; + } + + /// + /// Check if we should terminate flow based on function result. + /// + /// Function result. + public static bool IsTerminateFlow(this FunctionResult result) + { + return result.Metadata!.TryGetValue(Constants.ChatPluginVariables.StopFlowName, out object? stopFlow) + && stopFlow is Constants.ChatPluginVariables.DefaultValue; + } + + /// + /// Check if all arguments to be provided with the flow is available in the context + /// + /// Function result. + /// flow + /// + public static bool IsComplete(this FunctionResult result, Flow flow) + { + return flow.Provides.All(result.Metadata!.ContainsKey); + } + + /// + /// Get from context. + /// + /// Function result. + /// The chat history + public static ChatHistory? GetChatHistory(this FunctionResult result) + { + if (result.Metadata!.TryGetValue(Constants.ActionVariableNames.ChatHistory, out object? chatHistory) + && chatHistory is string chatHistoryText + && !string.IsNullOrEmpty(chatHistoryText)) + { + return ChatHistorySerializer.Deserialize(chatHistoryText!); + } + + return null; + } + + /// + /// Get latest chat input from context. + /// + /// Function result. + /// The latest chat input. + public static string GetChatInput(this FunctionResult result) + { + if (result.Metadata!.TryGetValue(Constants.ActionVariableNames.ChatInput, out object? chatInput) + && chatInput is string chatInputString) + { + return chatInputString; + } + + return string.Empty; + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Extensions/KernelArgumentsExtensions.cs b/dotnet/src/Experimental/Orchestration.Flow/Extensions/KernelArgumentsExtensions.cs new file mode 100644 index 000000000000..da4108424134 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Extensions/KernelArgumentsExtensions.cs @@ -0,0 +1,133 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Extension methods for +/// +// ReSharper disable once InconsistentNaming +public static class KernelArgumentsExtensions +{ + /// + /// Check if we should prompt user for input based on current context. + /// + /// Context arguments. + internal static bool IsPromptInput(this KernelArguments variables) + { + return variables.TryGetValue(Constants.ChatPluginVariables.PromptInputName, out object? promptInput) + && promptInput is Constants.ChatPluginVariables.DefaultValue; + } + + /// + /// Check if we should force the next iteration loop based on current context. + /// + /// Context arguments. + internal static bool IsContinueLoop(this KernelArguments arguments) + { + return arguments.TryGetValue(Constants.ChatPluginVariables.ContinueLoopName, out object? continueLoop) + && continueLoop is Constants.ChatPluginVariables.DefaultValue; + } + + /// + /// Check if we should terminate flow based on current context. + /// + /// Context arguments. + public static bool IsTerminateFlow(this KernelArguments arguments) + { + return arguments.TryGetValue(Constants.ChatPluginVariables.StopFlowName, out object? stopFlow) + && stopFlow is Constants.ChatPluginVariables.DefaultValue; + } + + /// + /// Check if all arguments to be provided with the flow is available in the context + /// + /// Context arguments. + /// flow + /// + public static bool IsComplete(this KernelArguments arguments, Flow flow) + { + return flow.Provides.All(arguments.ContainsName); + } + + /// + /// Get from context. + /// + /// Context arguments. + /// The chat history + public static ChatHistory? GetChatHistory(this KernelArguments arguments) + { + if (arguments.TryGetValue(Constants.ActionVariableNames.ChatHistory, out object? chatHistory) + && chatHistory is string chatHistoryText + && !string.IsNullOrEmpty(chatHistoryText)) + { + return ChatHistorySerializer.Deserialize(chatHistoryText!); + } + + return null; + } + + /// + /// Get latest chat input from context. + /// + /// Context arguments. + /// The latest chat input. + public static string GetChatInput(this KernelArguments arguments) + { + if (arguments.TryGetValue(Constants.ActionVariableNames.ChatInput, out object? chatInput) + && chatInput is string chatInputString) + { + return chatInputString; + } + + return string.Empty; + } + + /// + /// Signal the orchestrator to prompt user for input with current function response. + /// + /// Context arguments. + public static void PromptInput(this KernelArguments arguments) + { + // Cant prompt the user for input and exit the execution at the same time + if (!arguments.ContainsName(Constants.ChatPluginVariables.ExitLoopName)) + { + arguments[Constants.ChatPluginVariables.PromptInputName] = Constants.ChatPluginVariables.DefaultValue; + } + } + + /// + /// Signal the orchestrator to exit out of the AtLeastOnce or ZeroOrMore loop. If response is non-null, that value will be outputted to the user. + /// + /// Context arguments. + /// context + public static void ExitLoop(this KernelArguments arguments, string? response = null) + { + // Cant prompt the user for input and exit the execution at the same time + if (!arguments.ContainsName(Constants.ChatPluginVariables.PromptInputName)) + { + arguments[Constants.ChatPluginVariables.ExitLoopName] = response ?? string.Empty; + } + } + + /// + /// Signal the orchestrator to go to the next iteration of the loop in the AtLeastOnce or ZeroOrMore step. + /// + /// Context arguments. + public static void ContinueLoop(this KernelArguments arguments) + { + arguments[Constants.ChatPluginVariables.ContinueLoopName] = Constants.ChatPluginVariables.DefaultValue; + } + + /// + /// Signal the orchestrator to terminate the flow. + /// + /// context + public static void TerminateFlow(this KernelArguments arguments) + { + arguments[Constants.ChatPluginVariables.StopFlowName] = Constants.ChatPluginVariables.DefaultValue; + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Extensions/PromptTemplateConfigExtensions.cs b/dotnet/src/Experimental/Orchestration.Flow/Extensions/PromptTemplateConfigExtensions.cs new file mode 100644 index 000000000000..f9c63846d63e --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Extensions/PromptTemplateConfigExtensions.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Extension methods for PromptTemplateConfig +/// +internal static class PromptTemplateConfigExtensions +{ + /// + /// Set the max_tokens request setting to be used by OpenAI models + /// + /// PromptTemplateConfig instance + /// Value of max tokens to set + internal static void SetMaxTokens(this PromptTemplateConfig config, int maxTokens) + { + var executionSettings = config.ExecutionSettings; + foreach (var setting in executionSettings) + { + if (setting.Value.ExtensionData != null) + { + setting.Value.ExtensionData["max_tokens"] = maxTokens; + } + } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs new file mode 100644 index 000000000000..32cbaa7c0c72 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// A flow orchestrator that using semantic kernel for execution. +/// +public class FlowOrchestrator +{ + private readonly IKernelBuilder _kernelBuilder; + + private readonly IFlowStatusProvider _flowStatusProvider; + + private readonly Dictionary _globalPluginCollection; + + private readonly IFlowValidator _flowValidator; + + private readonly FlowOrchestratorConfig? _config; + + /// + /// Initialize a new instance of the class. + /// + /// The semantic kernel builder. + /// The flow status provider. + /// The global plugin collection + /// The flow validator. + /// Optional configuration object + public FlowOrchestrator( + IKernelBuilder kernelBuilder, + IFlowStatusProvider flowStatusProvider, + Dictionary? globalPluginCollection = null, + IFlowValidator? validator = null, + FlowOrchestratorConfig? config = null) + { + Verify.NotNull(kernelBuilder); + + this._kernelBuilder = kernelBuilder; + this._flowStatusProvider = flowStatusProvider; + this._globalPluginCollection = globalPluginCollection ?? new Dictionary(); + this._flowValidator = validator ?? new FlowValidator(); + this._config = config; + } + + /// + /// Execute a given flow. + /// + /// goal to achieve + /// execution session id + /// current input + /// execution kernel arguments + /// KernelArguments, which includes a json array of strings as output. The flow result is also exposed through the context when completes. + public async Task ExecuteFlowAsync( + [Description("The flow to execute")] Flow flow, + [Description("Execution session id")] string sessionId, + [Description("Current input")] string input, + [Description("Execution arguments")] + KernelArguments? kernelArguments = null) + { + try + { + this._flowValidator.Validate(flow); + } + catch (Exception ex) + { + throw new KernelException("Invalid flow", ex); + } + + var executor = new FlowExecutor(this._kernelBuilder, this._flowStatusProvider, this._globalPluginCollection, this._config); + return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments(null)).ConfigureAwait(false); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestratorConfig.cs b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestratorConfig.cs new file mode 100644 index 000000000000..171756034cce --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestratorConfig.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Configuration for flow planner instances. +/// +public sealed class FlowOrchestratorConfig +{ + /// + /// A list of plugins to exclude from the plan creation request. + /// + public HashSet ExcludedPlugins { get; } = new(); + + /// + /// A list of functions to exclude from the plan creation request. + /// + public HashSet ExcludedFunctions { get; } = new(); + + /// + /// The maximum number of tokens to allow in a plan. + /// + public int MaxTokens { get; set; } = 1024; + + /// + /// The maximum length of a string variable. + /// + /// + /// In most cases, the required variables are passed to ReAct engine to infer the next plugin and parameters to execute. + /// However when the variable is too long, it will either be truncated or decrease the robustness of value passing. + /// To mitigate that, the will avoid rendering the variables exceeding MaxVariableLength in the prompt. + /// And the variables should be accessed implicitly from ContextVariables instead of function parameters by the plugins. + /// + public int MaxVariableLength { get; set; } = 400; + + /// + /// The maximum number of iterations to allow for a step. + /// + public int MaxStepIterations { get; set; } = 10; + + /// + /// The minimum time to wait between iterations in milliseconds. + /// + public int MinIterationTimeMs { get; set; } = 0; + + /// + /// Optional. The prompt template configuration override for the ReAct engine. + /// + public PromptTemplateConfig? ReActPromptTemplateConfig { get; set; } = null; + + /// + /// When this is enabled, the flow will be terminated automatically if ReAct engine has exhausted available plugins. + /// + public bool EnableAutoTermination { get; set; } = false; + + /// + /// Optional. The allowed AI service id for the React engine. + /// + public HashSet AIServiceIds { get; set; } = new(); + + /// + /// Optional. The AI request settings for the ReAct engine. + /// + /// + /// Prompt used for reasoning may be different for different models, the prompt selection would be based on the PromptExecutionSettings. + /// if the built in prompt template does not work for your model, suggest to override it with . + /// + public PromptExecutionSettings? AIRequestSettings { get; set; } = null; +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/FlowSerializer.cs b/dotnet/src/Experimental/Orchestration.Flow/FlowSerializer.cs new file mode 100644 index 000000000000..d36a725034a6 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/FlowSerializer.cs @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.IO; +using System.Text.Json; +using System.Text.Json.Serialization; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Serializer for +/// +public static class FlowSerializer +{ + /// Options for . + private static readonly JsonSerializerOptions s_deserializeOptions = new() + { + PropertyNameCaseInsensitive = true, + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) } + }; + + /// + /// Deserialize flow from yaml + /// + /// the yaml string + /// the instance + public static Flow DeserializeFromYaml(string yaml) + { + var deserializer = new DeserializerBuilder() + .WithNamingConvention(CamelCaseNamingConvention.Instance) + .Build(); + + var flow = deserializer.Deserialize(new StringReader(yaml)); + + return UpCast(flow); + } + + /// + /// Deserialize flow from json + /// + /// the json string + /// the instance + public static Flow? DeserializeFromJson(string json) + { + var flow = JsonSerializer.Deserialize(json, s_deserializeOptions) ?? + throw new JsonException("Failed to deserialize flow"); + + return UpCast(flow); + } + + private static Flow UpCast(FlowModel flow) + { + Flow result = new(flow.Name, flow.Goal); + + foreach (var step in flow.Steps) + { + result.AddStep(UpCast(step)); + } + + PopulateVariables(result, flow); + + return result; + } + + private static FlowStep UpCast(FlowStepModel step) + { + FlowStep result = string.IsNullOrEmpty(step.FlowName) ? new FlowStep(step.Goal) : new ReferenceFlowStep(step.FlowName!); + + result.CompletionType = step.CompletionType; + result.StartingMessage = step.StartingMessage; + result.TransitionMessage = step.TransitionMessage; + result.Plugins = step.Plugins; + + PopulateVariables(result, step); + + return result; + } + + private static void PopulateVariables(FlowStep step, FlowStepModel model) + { + step.AddProvides(model.Provides.ToArray()); + step.AddRequires(model.Requires.ToArray()); + step.AddPassthrough(model.Passthrough.ToArray()); + } + + private class FlowStepModel + { + public string Goal { get; set; } = string.Empty; + + public List Requires { get; set; } = new(); + + public List Provides { get; set; } = new(); + + public List Passthrough { get; set; } = new(); + + public CompletionType CompletionType { get; set; } = CompletionType.Once; + + public string? StartingMessage { get; set; } + + public string? TransitionMessage { get; set; } + + public List Plugins { get; set; } = new(); + + public string? FlowName { get; set; } + } + + private class FlowModel : FlowStepModel + { + public string Name { get; set; } = string.Empty; + + public List Steps { get; set; } = new(); + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/FlowValidator.cs b/dotnet/src/Experimental/Orchestration.Flow/FlowValidator.cs new file mode 100644 index 000000000000..098883e444a9 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/FlowValidator.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using Microsoft.SemanticKernel.Experimental.Orchestration.Abstractions; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// The flow validator +/// +public class FlowValidator : IFlowValidator +{ + /// + public void Validate(Flow flow) + { + Verify.NotNullOrWhiteSpace(flow.Goal, nameof(flow.Goal)); + + this.ValidateNonEmpty(flow); + this.ValidatePartialOrder(flow); + this.ValidateReferenceStep(flow); + this.ValidateStartingMessage(flow); + this.ValidatePassthroughVariables(flow); + } + + private void ValidateStartingMessage(Flow flow) + { + foreach (var step in flow.Steps) + { + if (step.CompletionType is CompletionType.Optional or CompletionType.ZeroOrMore + && string.IsNullOrEmpty(step.StartingMessage)) + { + throw new ArgumentException( + $"Missing starting message for step={step.Goal} with completion type={step.CompletionType}"); + } + } + } + + private void ValidateNonEmpty(Flow flow) + { + if (flow.Steps.Count == 0) + { + throw new ArgumentException("Flow must contain at least one flow step."); + } + } + + private void ValidatePartialOrder(Flow flow) + { + try + { + var sorted = flow.SortSteps(); + } + catch (Exception ex) + { + throw new ArgumentException("Flow steps must be a partial order set.", ex); + } + } + + private void ValidateReferenceStep(Flow flow) + { + var steps = flow.Steps + .Select(step => step as ReferenceFlowStep) + .Where(step => step != null); + + foreach (var step in steps) + { + Verify.NotNullOrWhiteSpace(step!.FlowName); + + if (step.Requires.Any()) + { + throw new ArgumentException("Reference flow step cannot have any direct requirements."); + } + + if (step.Provides.Any()) + { + throw new ArgumentException("Reference flow step cannot have any direct provides."); + } + + if (step.Plugins?.Count != 0) + { + throw new ArgumentException("Reference flow step cannot have any direct plugins."); + } + } + } + + private void ValidatePassthroughVariables(Flow flow) + { + foreach (var step in flow.Steps) + { + if (step.CompletionType != CompletionType.AtLeastOnce + && step.CompletionType != CompletionType.ZeroOrMore + && step.Passthrough.Any()) + { + throw new ArgumentException( + $"step={step.Goal} with completion type={step.CompletionType} cannot have passthrough variables as that is only applicable for the AtLeastOnce or ZeroOrMore completion types"); + } + } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Model/CompletionType.cs b/dotnet/src/Experimental/Orchestration.Flow/Model/CompletionType.cs new file mode 100644 index 000000000000..fda7de9f8d03 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Model/CompletionType.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// The completion type of step +/// +public enum CompletionType +{ + /// + /// Once + /// + Once, + + /// + /// Optional + /// + Optional, + + /// + /// At least once + /// + AtLeastOnce, + + /// + /// Optional or multiple times + /// + ZeroOrMore, +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Model/Flow.cs b/dotnet/src/Experimental/Orchestration.Flow/Model/Flow.cs new file mode 100644 index 000000000000..da78aba9cf28 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Model/Flow.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Flow data model +/// +/// +/// Principles: +/// 1. The model should be decoupled from execution status +/// 2. The model is mutable to allow dynamic changes +/// 3. The model doesn't enforce any execution order as long as the dependencies are satisfied +/// +public sealed class Flow : FlowStep +{ + private List _steps; + + /// + /// Initializes a new instance of the class. + /// + /// The name of flow + /// The goal of flow + public Flow(string name, string goal) : base(goal, null) + { + this.Name = name; + this._steps = new List(); + } + + /// + /// Steps of the flow + /// + public List Steps + { + get => this._steps; + set => this._steps = value; + } + + /// + /// Friendly name and identifier of the flow + /// + public string Name { get; set; } + + /// + /// Adds a step to the flow + /// + /// the instance + public void AddStep(FlowStep step) + { + this._steps.Add(step); + } + + /// + /// Adds steps to the flow + /// + /// the array of instance to be add + public void AddSteps(params FlowStep[] steps) + { + this._steps.AddRange(steps); + } + + /// + public override IEnumerable Requires + { + get + { + var requires = new List(); + foreach (var step in this._steps) + { + requires.AddRange(step.Requires); + } + + foreach (var step in this._steps) + { + requires.RemoveAll(r => step.Provides.Contains(r)); + } + + return requires; + } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Model/FlowStep.cs b/dotnet/src/Experimental/Orchestration.Flow/Model/FlowStep.cs new file mode 100644 index 000000000000..c659ed4a9617 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Model/FlowStep.cs @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using Microsoft.SemanticKernel.Experimental.Orchestration.Execution; + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// Step within a which defines the step goal, available plugins, required and provided variables. +/// +public class FlowStep +{ + private readonly List _requires = new(); + + private readonly List _provides = new(); + + private readonly List _passthrough = new(); + + private Dictionary _pluginTypes = new(); + + private Func, IEnumerable>? _pluginsFactory; + + /// + /// Initializes a new instance of the class. + /// + /// The goal of step + /// The factory to get plugins + public FlowStep(string goal, Func, IEnumerable>? pluginsFactory = null) + { + this.Goal = goal; + this._pluginsFactory = pluginsFactory; + } + + /// + /// Goal of the step + /// + public string Goal { get; set; } + + /// + /// of the step + /// + public CompletionType CompletionType { get; set; } = CompletionType.Once; + + /// + /// If the CompletionType is CompletionType.ZeroOrMore, this message will be used to ask the user if they want to execute the current step or skip it. + /// + public string? StartingMessage { get; set; } + + /// + /// If the CompletionType is CompletionType.AtLeastOnce or CompletionType.ZeroOrMore, this message will be used to ask the user if they want to try the step again. + /// + public string? TransitionMessage { get; set; } = "Did you want to try the previous step again?"; + + /// + /// Parameters required for executing the step + /// + public virtual IEnumerable Requires => this._requires; + + /// + /// Variables to be provided by the step + /// + public IEnumerable Provides => this._provides; + + /// + /// Variables to be passed through on iterations of the step + /// + public IEnumerable Passthrough => this._passthrough; + + /// + /// Gets or sets the plugin available for the current step + /// + public List? Plugins + { + get => this._pluginTypes.Keys.ToList(); + set + { + Dictionary plugins = GetPluginTypes(value); + + this._pluginTypes = plugins; + this._pluginsFactory = (kernel, globalPlugins) => this.GetPlugins(globalPlugins, kernel); + } + } + + private List GetPlugins(Dictionary globalPlugins, Kernel kernel) + { + return this._pluginTypes.Select(kvp => + { + var pluginName = kvp.Key; + var globalPlugin = globalPlugins.FirstOrDefault(_ => _.Key.GetType().Name.Contains(pluginName)).Key; + if (globalPlugin != null) + { + return globalPlugin; + } + + var type = kvp.Value; + if (type != null) + { + try + { + return Activator.CreateInstance(type, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance, null, new object[] { kernel }, null); + } + catch (MissingMethodException) + { + try + { + return Activator.CreateInstance(type, true); + } + catch (MissingMethodException) + { + } + } + } + + return null; + }).Where(plugin => plugin != null).ToList()!; + } + + private static Dictionary GetPluginTypes(List? value) + { + Dictionary plugins = new(); + + if (value is not null) + { + var types = AppDomain.CurrentDomain.GetAssemblies() + .Where(a => !a.IsDynamic) + .SelectMany(a => a.GetTypes()) + .ToList(); + + foreach (var pluginName in value) + { + if (pluginName is null) + { + continue; + } + + var type = types.FirstOrDefault(predicate: t => t.FullName?.Equals(pluginName, StringComparison.OrdinalIgnoreCase) ?? false); + if (type is null) + { + type = types.FirstOrDefault(t => t.FullName?.Contains(pluginName) ?? false); + + if (type is null) + { + // If not found, assume the plugin would be loaded separately. + plugins.Add(pluginName, null); + continue; + } + } + + plugins.Add(pluginName, type); + } + } + + return plugins; + } + + /// + /// Register the required arguments for the step + /// + /// Array of required arguments + public void AddRequires(params string[] requiredArguments) + { + this.ValidateArguments(requiredArguments); + this._requires.AddRange(requiredArguments); + } + + /// + /// Register the arguments provided by the step + /// + /// Array of provided arguments + public void AddProvides(params string[] providedArguments) + { + this.ValidateArguments(providedArguments); + this._provides.AddRange(providedArguments); + } + + /// + /// Register the arguments passed through by the step + /// + /// Array of passthrough arguments + /// Is referenced flow + public void AddPassthrough(string[] passthroughArguments, bool isReferencedFlow = false) + { + // A referenced flow is allowed to have steps that have passthrough arguments even if the completion type is not AtLeastOnce or ZeroOrMore. This is so the step can pass arguments to the outer flow. + if (!isReferencedFlow + && passthroughArguments.Length != 0 + && this.CompletionType != CompletionType.AtLeastOnce + && this.CompletionType != CompletionType.ZeroOrMore) + { + throw new ArgumentException("Passthrough arguments can only be set for the AtLeastOnce or ZeroOrMore completion type"); + } + + this.ValidateArguments(passthroughArguments); + this._passthrough.AddRange(passthroughArguments); + } + + /// + /// Get the plugin instances registered with the step + /// + /// The semantic kernel + /// The global plugins available + /// + public IEnumerable LoadPlugins(Kernel kernel, Dictionary globalPlugins) + { + if (this._pluginsFactory != null) + { + return this._pluginsFactory(kernel, globalPlugins); + } + + return Enumerable.Empty(); + } + + /// + /// Check if the step depends on another step + /// + /// The other step + /// true if the step depends on the other step, false otherwise + public bool DependsOn(FlowStep otherStep) + { + return this.Requires.Intersect(otherStep.Provides).Any(); + } + + private void ValidateArguments(string[] arguments) + { + var invalidArguments = arguments.Intersect(Constants.ActionVariableNames.All).ToArray(); + + if (invalidArguments.Length != 0) + { + throw new ArgumentException($"Invalid arguments: {string.Join(",", invalidArguments)}"); + } + } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Model/ReferenceFlowStep.cs b/dotnet/src/Experimental/Orchestration.Flow/Model/ReferenceFlowStep.cs new file mode 100644 index 000000000000..4dbea359f1cb --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Model/ReferenceFlowStep.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Experimental.Orchestration; + +/// +/// The flow step which references another flow. +/// +public sealed class ReferenceFlowStep : FlowStep +{ + /// + /// Initializes a new instance of the class. + /// + /// The name of referenced flow + public ReferenceFlowStep(string flowName) : base(string.Empty) + { + this.FlowName = flowName; + } + + /// + /// Only for deserialization. + /// + public ReferenceFlowStep() : this(string.Empty) + { + } + + /// + /// Name of reference . + /// + public string FlowName { get; set; } +} diff --git a/dotnet/src/Experimental/Orchestration.Flow/Plugins/CheckRepeatStep.yaml b/dotnet/src/Experimental/Orchestration.Flow/Plugins/CheckRepeatStep.yaml new file mode 100644 index 000000000000..0ef31a45f860 --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Plugins/CheckRepeatStep.yaml @@ -0,0 +1,69 @@ +template_format: semantic-kernel +template: | + [INSTRUCTION] + Work with user to determine if he or she would like to work on the previous step for one more time. + + [THOUGHT PROCESS] + [Goal] + The goal of proposed step. + [THOUGHT] + To solve this problem, I should carefully analyze the previous question and response to identify if user is willing to repeat the action again. + Any facts I discover earlier in my thought process should be repeated here to keep them readily available. + [QUESTION] + If there is any ambiguity in the chat history, ask the follow up question to the user to get clarification on whether the user wants to repeat the previous step. + The way you will check if the user wants to repeat the step is by asking the question "{{$transitionMessage}}". + IMPORTANT: Do NOT update the wording in the question stated above. If you need clarification, you will ask that question word for word. + If the user says something along the lines of "yes", "sure", "fine", "ok", then they are asking to repeat the step. + [RESPONSE] + The result of the action will be provided here. It could be result or error message of the action, or chat history between assistant and user to tackle the problem. + ... (These THOUGHT/QUESTION/RESPONSE can repeat until the final answer is reached.) + [FINAL ANSWER] + Once I have gathered all the necessary observations and can reliably tell if user would like to repeat the step for one more time, output TRUE or FALSE. + [END THOUGHT PROCESS] + + Example: + [Goal] + {{$goal}} + [QUESTION] + {{$transitionMessage}} + [RESPONSE] + yes + [THOUGHT] + Based on the response, the user wants to try the previous step again. + [FINAL ANSWER] + TRUE + + IMPORTANT REMINDER: Your each response MUST contain one of [QUESTION] and [FINAL ANSWER]! + Let's break down the problem step by step and think about the best approach. + + Begin! + + [Goal] + {{$goal}} + {{$agentScratchPad}} + [THOUGHT] +description: Given the chat history, determine if user would like to execute the previous task for one more time. If not concluded, generate the next message for follow up. +name: CheckRepeatStep +input_variables: + - name: goal + description: The goal of proposed step + - name: transitionMessage + description: The transition message + default: Do you want to try the previous step again? + - name: agentScratchPad + description: The agent's scratch pad +execution_settings: + text-davinci-003: + temperature: 0.0 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[RESPONSE]"] + default: + temperature: 0.0 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[RESPONSE]"] diff --git a/dotnet/src/Experimental/Orchestration.Flow/Plugins/CheckStartStep.yaml b/dotnet/src/Experimental/Orchestration.Flow/Plugins/CheckStartStep.yaml new file mode 100644 index 000000000000..960cbfd898ea --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Plugins/CheckStartStep.yaml @@ -0,0 +1,68 @@ +template_format: semantic-kernel +template: | + [INSTRUCTION] + Work with user to determine if he or she would like to execute the current step for the first time. + + [THOUGHT PROCESS] + [Goal] + The goal of proposed step. + [THOUGHT] + To solve this problem, I should carefully analyze the question and response to identify if user is willing to begin the current action. + Any facts I discover earlier in my thought process should be repeated here to keep them readily available. + [QUESTION] + If there is any ambiguity in the chat history, ask the follow up question to the user to get clarification on whether the user wants to repeat the previous step. + The way you will check if the user wants to execute the step is by asking the question "{{$message}}". + IMPORTANT: Do NOT update the wording in the question stated above. If you need clarification, you will ask that question word for word. + If the user says something along the lines of "yes", "sure", "fine", "ok", then they are asking to start the step. + [RESPONSE] + The result of the action will be provided here. It could be result or error message of the action, or chat history between assistant and user to tackle the problem. + ... (These THOUGHT/QUESTION/RESPONSE can repeat until the final answer is reached.) + [FINAL ANSWER] + Once I have gathered all the necessary observations and can reliably tell if user would like to repeat the step for one more time, output TRUE or FALSE. + [END THOUGHT PROCESS] + + Example: + [Goal] + {{$goal}} + [QUESTION] + {{$message}} + [RESPONSE] + yes + [THOUGHT] + Based on the response, the user wants to execute the current step. + [FINAL ANSWER] + TRUE + + IMPORTANT REMINDER: your each response should contain at most one question. Do not provide more than one step. + Let's break down the problem step by step and think about the best approach. + + Begin! + + [Goal] + {{$goal}} + {{$agentScratchPad}} + [THOUGHT] +description: Given the chat history, determine if user would like to execute the previous task for one more time. If not concluded, generate the next message for follow up. +name: CheckRepeatStep +input_variables: + - name: goal + description: The goal of proposed step + - name: message + description: he message to display to the user + - name: agentScratchPad + description: The agent's scratch pad +execution_settings: + text-davinci-003: + temperature: 0.0 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[RESPONSE]"] + default: + temperature: 0.0 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[RESPONSE]"] diff --git a/dotnet/src/Experimental/Orchestration.Flow/Plugins/ReActEngine.gpt4.yaml b/dotnet/src/Experimental/Orchestration.Flow/Plugins/ReActEngine.gpt4.yaml new file mode 100644 index 000000000000..26e3b1d777cf --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Plugins/ReActEngine.gpt4.yaml @@ -0,0 +1,81 @@ +template_format: semantic-kernel +template: | + [INSTRUCTION] + Answer the following questions as accurately as possible using the provided functions. + + [AVAILABLE FUNCTIONS] + The function definitions below are in the following format: + : + - : + - ... + + {{$functionDescriptions}} + [END AVAILABLE FUNCTIONS] + + [USAGE INSTRUCTIONS] + To use the functions, specify a JSON blob representing an action. The JSON blob should contain fully qualified name of the function to use, and an "action_variables" key with a JSON object of string values to use when calling the function. + Do not call functions directly; they must be invoked through an action. + The "action_variables" values should match the defined [PARAMETERS] of the named "action" in [AVAILABLE FUNCTIONS]. + Dictionary values in "action_variables" must be strings and represent the actual values to be passed to the function. + Ensure that the $JSON_BLOB contains only a SINGLE action; do NOT return multiple actions. + IMPORTANT: Use only the available functions listed in the [AVAILABLE FUNCTIONS] section. Do not attempt to use any other functions that are not specified. + The value of parameters should either by empty if the expectation is for the user to provide them and have not been provided yet, or derived from the agent scratchpad. + You are not allowed to ask user directly for more information. + + Here is an example of a valid $JSON_BLOB: + { + "action": "FUNCTION.NAME", + "action_variables": {"INPUT": "some input", "PARAMETER_NAME": "some value", "PARAMETER_NAME_2": "42"} + } + [END USAGE INSTRUCTIONS] + [END INSTRUCTION] + + [THOUGHT PROCESS] + [QUESTION] + The input question I must answer + [THOUGHT] + To solve this problem, I should carefully analyze the given question and identify the necessary steps. Any facts I discover earlier in my thought process should be repeated here to keep them readily available. + If there is function which can be leveraged for validation, use it in ACTION before jumping into FINAL ANSWER. + [ACTION] + $JSON_BLOB + [OBSERVATION] + The result of the action will be provided here. It could be result or error message of the action, or chat history between assistant and user to tackle the problem. + ... (These THOUGHT/ACTION/OBSERVATION can repeat until the final answer is reached.) + [FINAL ANSWER] + Once I have gathered all the necessary observations and performed any required actions, if there is a suitable function for validation, provide the final answer in JSON in the following format: + { "action": "$(last_action_name)", "action_variables": {"INPUT": "some input", "PARAMETER_NAME": "some value", "PARAMETER_NAME_2": "42"}} + If there is not a fitting function available to validate the result, I can provide the final answer in a clear and human-readable format. + [END THOUGHT PROCESS] + + IMOPRTANT REMINDER: your each response should contain only one next step and only single one $JSON_BLOB. Do not provide more than one step. + Let's break down the problem step by step and think about the best approach. + + Begin! + + [QUESTION] + {{$question}} + {{$agentScratchPad}} +description: Given a request or command or goal generate multi-step plan to reach the goal. After each step LLM is called to perform the reasoning for the next step. +name: ReActEngine +input_variables: + - name: question + description: The question to answer + - name: agentScratchPad + description: The agent's scratch pad + - name: functionDescriptions + description: The manual of the agent's functions +execution_settings: + text-davinci-003: + temperature: 0.0 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[OBSERVATION]", "[Observation]", "[QUESTION]"] + default: + temperature: 0.1 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[OBSERVATION]", "[Observation]", "[QUESTION]"] diff --git a/dotnet/src/Experimental/Orchestration.Flow/Plugins/ReActEngine.yaml b/dotnet/src/Experimental/Orchestration.Flow/Plugins/ReActEngine.yaml new file mode 100644 index 000000000000..6679bf411eda --- /dev/null +++ b/dotnet/src/Experimental/Orchestration.Flow/Plugins/ReActEngine.yaml @@ -0,0 +1,104 @@ +template_format: semantic-kernel +template: | + [INSTRUCTION] + Answer the following questions as accurately as possible using the provided functions. + + [AVAILABLE FUNCTIONS] + The function definitions below are in the following format: + : + - : + - ... + [END AVAILABLE FUNCTIONS] + + [USAGE INSTRUCTIONS] + To use the functions, specify a JSON blob representing an action. The JSON blob should contain fully qualified name of the function to use, and an "action_variables" key with a JSON object of string values to use when calling the function. + Do not call functions directly; they must be invoked through an action. + + Here is an example of a valid $JSON_BLOB: + ``` + { + "action": "_Namespace_.FUNCTION.NAME", + "action_variables": {"PARAMETER_NAME_1": "some value", "PARAMETER_NAME_2": "42"} + } + ``` + The keys of "action_variables" should match the defined [PARAMETERS] of the named "action" in [AVAILABLE FUNCTIONS]. + Dictionary values in "action_variables" must be strings and represent the actual values to be passed to the function. + Ensure that the $JSON_BLOB contains only a SINGLE action; do NOT return multiple actions. + IMPORTANT: + * Use only the available functions listed in the [AVAILABLE FUNCTIONS] section. + * Do not attempt to use any other functions that are not specified. + * The value of parameters should either by empty if the expectation is for the user to provide them and have not been provided yet, or derived from the agent scratchpad. + * You are not allowed to ask user directly for more information. + [END USAGE INSTRUCTIONS] + [END INSTRUCTION] + + [THOUGHT PROCESS] + [QUESTION] + The input question I must answer + [THOUGHT] + To solve this problem, I should carefully analyze the given question and identify the necessary steps. Any facts I discover earlier in my thought process should be repeated here to keep them readily available. + If there is function which can be leveraged for validation, use it in ACTION before jumping into FINAL ANSWER. + [ACTION] + $JSON_BLOB + [OBSERVATION] + The result of the action will be provided here. It could be result or error message of the action, or chat history between assistant and user to tackle the problem. + ... (These THOUGHT/ACTION/OBSERVATION can repeat until the final answer is reached.) + [FINAL ANSWER] + Once I have gathered all the necessary observations and performed any required actions, provide the final answer in a clear and human-readable format. + [END THOUGHT PROCESS] + + Example: + [AVAILABLE FUNCTIONS] + AuthorPlugin.WritePoem: useful to write poem given a style and input + - input: input for the poem + - style: style of the poem, leave empty if not specified + [END AVAILABLE FUNCTIONS] + [QUESTION] + Write a poem about sun in Whitman's style. + [THOUGHT] + I should use WritePoem function for it. + [ACTION] + { + "action": "AuthorPlugin.WritePoem", + "action_variables": { + "input": "sun", + "stype": "Whitman", + } + } + + IMPORTANT REMINDER: your each response should contain only one next step and only single one [ACTION] part. Do not provide more than one step. + Let's break down the problem step by step and think about the best approach. + + Begin! + + [AVAILABLE FUNCTIONS] + {{$functionDescriptions}} + [END AVAILABLE FUNCTIONS] + [QUESTION] + {{$question}} + {{$agentScratchPad}} + [THOUGHT] +description: Given a request or command or goal generate multi-step plan to reach the goal. After each step LLM is called to perform the reasoning for the next step. +name: ReActEngine +input_variables: + - name: question + description: The question to answer + - name: agentScratchPad + description: The agent's scratch pad + - name: functionDescriptions + description: The manual of the agent's functions +execution_settings: + text-davinci-003: + temperature: 0.0 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[OBSERVATION]", "[Observation]", "[QUESTION]", "[AVAILABLE FUNCTIONS]"] + default: + temperature: 0.1 + top_p: 1.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 400 + stop_sequences: ["[OBSERVATION]", "[Observation]", "[QUESTION]", "[AVAILABLE FUNCTIONS]"] diff --git a/dotnet/src/Extensions/Extensions.UnitTests/Extensions.UnitTests.csproj b/dotnet/src/Extensions/Extensions.UnitTests/Extensions.UnitTests.csproj index 5724cc90ffc4..648f459ff587 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/Extensions.UnitTests.csproj +++ b/dotnet/src/Extensions/Extensions.UnitTests/Extensions.UnitTests.csproj @@ -1,5 +1,4 @@  - SemanticKernel.Extensions.UnitTests SemanticKernel.Extensions.UnitTests @@ -11,7 +10,6 @@ false CA2007,VSTHRD111 - @@ -26,11 +24,7 @@ - - - - + - - + \ No newline at end of file diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateFactoryTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateFactoryTests.cs new file mode 100644 index 000000000000..18cc2d343e40 --- /dev/null +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateFactoryTests.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Xunit; + +using static Extensions.UnitTests.PromptTemplates.Handlebars.TestUtilities; + +namespace SemanticKernel.Extensions.UnitTests.PromptTemplates.Handlebars; + +public sealed class HandlebarsPromptTemplateFactoryTests +{ + [Fact] + public void ItCreatesHandlebarsPromptTemplate() + { + // Arrange + var templateString = "{{input}}"; + var promptConfig = InitializeHbPromptConfig(templateString); + var target = new HandlebarsPromptTemplateFactory(); + + // Act + var result = target.Create(promptConfig); + + // Assert + Assert.NotNull(result); + Assert.True(result is HandlebarsPromptTemplate); + } + + [Fact] + public void ItThrowsExceptionForUnknowPromptTemplateFormat() + { + // Arrange + var templateString = "{{input}}"; + var promptConfig = new PromptTemplateConfig() { TemplateFormat = "unknown-format", Template = templateString }; + var target = new HandlebarsPromptTemplateFactory(); + + // Act + // Assert + Assert.Throws(() => target.Create(promptConfig)); + } +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTestUtils.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTestUtils.cs new file mode 100644 index 000000000000..6463116a44a5 --- /dev/null +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTestUtils.cs @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +namespace Extensions.UnitTests.PromptTemplates.Handlebars; + +internal static class TestUtilities +{ + public static PromptTemplateConfig InitializeHbPromptConfig(string template) + { + return new PromptTemplateConfig() + { + TemplateFormat = HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + Template = template + }; + } +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs new file mode 100644 index 000000000000..80538e9aff3e --- /dev/null +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/HandlebarsPromptTemplateTests.cs @@ -0,0 +1,185 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Threading.Tasks; +using HandlebarsDotNet; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Xunit; +using static Extensions.UnitTests.PromptTemplates.Handlebars.TestUtilities; + +namespace SemanticKernel.Extensions.UnitTests.PromptTemplates.Handlebars; + +public sealed class HandlebarsPromptTemplateTests +{ + public HandlebarsPromptTemplateTests() + { + this._factory = new(); + this._kernel = new(); + this._arguments = new() { ["input"] = Guid.NewGuid().ToString("X") }; + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ItInitializesHandlebarsPromptTemplateInstanceCorrectly(bool includeOptions) + { + // Arrange & Act + var template = includeOptions ? + new HandlebarsPromptTemplate(new()) : + new HandlebarsPromptTemplate(new(), new()); + + // Assert + Assert.NotNull(template); + } + + [Fact] + public async Task ItRendersVariablesAsync() + { + // Arrange + var template = "Foo {{bar}}"; + var promptConfig = InitializeHbPromptConfig(template); + var target = (HandlebarsPromptTemplate)this._factory.Create(promptConfig); + this._arguments["bar"] = "Bar"; + + // Act + var prompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("Foo Bar", prompt); + } + + [Fact] + public async Task ItUsesDefaultValuesAsync() + { + // Arrange + var template = "Foo {{bar}} {{baz}}{{null}}{{empty}}"; + var promptConfig = InitializeHbPromptConfig(template); + + promptConfig.InputVariables.Add(new() { Name = "bar", Description = "Bar", Default = "Bar" }); + promptConfig.InputVariables.Add(new() { Name = "baz", Description = "Baz", Default = "Baz" }); + promptConfig.InputVariables.Add(new() { Name = "null", Description = "Null", Default = null }); + promptConfig.InputVariables.Add(new() { Name = "empty", Description = "empty", Default = string.Empty }); + + var target = (HandlebarsPromptTemplate)this._factory.Create(promptConfig); + + // Act + var prompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("Foo Bar Baz", prompt); + } + + [Fact] + public async Task ItRendersNestedFunctionsAsync() + { + // Arrange + this._kernel.ImportPluginFromObject(new Foo()); + var template = "Foo {{Foo-Bar}} {{Foo-Baz}} {{Foo-Qux (Foo-Bar)}}"; + var promptConfig = InitializeHbPromptConfig(template); + var target = (HandlebarsPromptTemplate)this._factory.Create(promptConfig); + + // Act + var prompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("Foo Bar Baz QuxBar", prompt); + } + + [Fact] + public async Task ItRendersConditionalStatementsAsync() + { + // Arrange + var template = "Foo {{#if bar}}{{bar}}{{else}}No Bar{{/if}}"; + var promptConfig = InitializeHbPromptConfig(template); + var target = (HandlebarsPromptTemplate)this._factory.Create(promptConfig); + + // Act on positive case + this._arguments["bar"] = "Bar"; + var prompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("Foo Bar", prompt); + + // Act on negative case + this._arguments.Remove("bar"); + prompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("Foo No Bar", prompt); + } + + [Fact] + public async Task ItRendersLoopsAsync() + { + // Arrange + var template = "List: {{#each items}}{{this}}{{/each}}"; + var promptConfig = InitializeHbPromptConfig(template); + var target = (HandlebarsPromptTemplate)this._factory.Create(promptConfig); + this._arguments["items"] = new List { "item1", "item2", "item3" }; + + // Act + var prompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("List: item1item2item3", prompt); + } + + [Fact] + public async Task ItRegistersCustomHelpersAsync() + { + // Arrange + var template = "Custom: {{customHelper}}"; + var promptConfig = InitializeHbPromptConfig(template); + + var options = new HandlebarsPromptTemplateOptions + { + RegisterCustomHelpers = (registerHelper, options, variables) => + { + registerHelper("customHelper", (Context context, Arguments arguments) => + { + return "Custom Helper Output"; + }); + } + }; + + this._factory = new HandlebarsPromptTemplateFactory(options); + var target = (HandlebarsPromptTemplate)this._factory.Create(promptConfig); + + // Act + var prompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("Custom: Custom Helper Output", prompt); + } + + #region private + + private HandlebarsPromptTemplateFactory _factory; + private readonly Kernel _kernel; + private readonly KernelArguments _arguments; + + private sealed class Foo + { + [KernelFunction, Description("Return Bar")] + public string Bar() => "Bar"; + + [KernelFunction, Description("Return Baz")] + public async Task BazAsync() + { + await Task.Delay(1000); + return await Task.FromResult("Baz"); + } + + [KernelFunction, Description("Return Qux")] + public async Task QuxAsync(string input) + { + await Task.Delay(1000); + return await Task.FromResult($"Qux{input}"); + } + } + + #endregion +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelFunctionHelpersTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelFunctionHelpersTests.cs new file mode 100644 index 000000000000..04e58b1d918e --- /dev/null +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelFunctionHelpersTests.cs @@ -0,0 +1,231 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Globalization; +using System.Threading.Tasks; +using HandlebarsDotNet; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Xunit; +using static Extensions.UnitTests.PromptTemplates.Handlebars.TestUtilities; + +namespace SemanticKernel.Extensions.UnitTests.PromptTemplates.Handlebars.Helpers; + +public sealed class KernelFunctionHelpersTests +{ + public KernelFunctionHelpersTests() + { + this._factory = new(); + this._kernel = new(); + this._arguments = new() { ["input"] = Guid.NewGuid().ToString("X") }; + } + + [Fact] + public async Task ItRendersFunctionsAsync() + { + // Arrange and Act + var template = "Foo {{Foo-Bar}}"; + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("Foo Bar", result); + } + + [Fact] + public async Task ItRendersAsyncFunctionsAsync() + { + // Arrange and Act + var template = "Foo {{Foo-Bar}} {{Foo-Baz}}"; + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("Foo Bar Baz", result); + } + + [Fact] + public async Task ItRendersFunctionHelpersWithPositionalArgumentsAsync() + { + // Arrange and Act + var template = "{{Foo-Combine \"Bar\" \"Baz\"}}"; // Use positional arguments instead of hashed arguments + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("BazBar", result); + } + + [Fact] + public async Task ItThrowsExceptionWhenPositionalArgumentHasInvalidTypeAsync() + { + // Arrange + var template = "{{Foo-StringifyInt \"twelve\"}}"; + + // Act and Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + + Assert.Contains("Invalid parameter type for function", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ItThrowsExceptionWhenPositionalArgumentNumberIsIncorrectAsync() + { + // Arrange + var template = "{{Foo-Combine \"Bar\"}}"; + + // Act and Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + + Assert.Contains("Invalid parameter count for function", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ItRendersFunctionHelpersWitHashArgumentsAsync() + { + // Arrange and Act + var template = "{{Foo-Combine x=\"Bar\" y=\"Baz\"}}"; // Use positional arguments instead of hashed arguments + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("BazBar", result); + } + + [Fact] + public async Task ShouldThrowExceptionWhenMissingRequiredParameterAsync() + { + // Arrange and Act + var template = "{{Foo-Combine x=\"Bar\"}}"; + + // Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + Assert.Matches("Parameter .* is required for function", exception.Message); + } + + [Fact] + public async Task ShouldThrowExceptionWhenArgumentsAreNotProvidedAsync() + { + // Arrange + var template = "{{Foo-Combine}}"; + + // Act and Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + Assert.Matches("No arguments are provided for .*", exception.Message); + } + + [Fact] + public async Task ShouldThrowExceptionWhenFunctionHelperHasInvalidParameterTypeAsync() + { + // Arrange and Act + var template = "{{Foo-StringifyInt x=\"twelve\"}}"; + + // Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + Assert.Contains("Invalid argument type", exception.Message, StringComparison.CurrentCultureIgnoreCase); + } + + [Fact] + public async Task ShouldThrowExceptionWhenFunctionHelperIsNotDefinedAsync() + { + // Arrange and Act + var template = "{{Foo-Random x=\"random\"}}"; + + // Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + Assert.Contains("Template references a helper that cannot be resolved", exception.Message, StringComparison.CurrentCultureIgnoreCase); + } + + [Fact] + public async Task ItCanReturnChatMessageContentAsync() + { + // Arrange + var template = "{{Foo-ChatMessageContent \"user\" \"User content\"}}"; + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("User content", result); + } + + [Theory] + [InlineData("{{Foo-RestApiOperationResponse \"text\" \"text/plain\"}}", "text")] + [InlineData("{{Foo-RestApiOperationResponse \'{\"key\":\"value\"}\' \'application/json\'}}", "[key, value]")] + public async Task ItCanReturnRestApiOperationResponseAsync(string template, string expectedResult) + { + // Arrange and Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal(expectedResult, result); + } + + [Fact] + public async Task ItCanReturnCustomReturnTypeAsync() + { + // Arrange + var template = "{{Foo-CustomReturnType \"text\"}}"; + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("text", result); + } + + private readonly HandlebarsPromptTemplateFactory _factory; + private readonly Kernel _kernel; + private readonly KernelArguments _arguments; + + private async Task RenderPromptTemplateAsync(string template) + { + // Arrange + this._kernel.ImportPluginFromObject(new Foo()); + var resultConfig = InitializeHbPromptConfig(template); + var target = (HandlebarsPromptTemplate)this._factory.Create(resultConfig); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + return result; + } + + private sealed class Foo + { + [KernelFunction, Description("Return Bar")] + public string Bar() => "Bar"; + + [KernelFunction, Description("Return Baz")] + public async Task BazAsync() + { + await Task.Delay(1000); + return await Task.FromResult("Baz"); + } + + [KernelFunction, Description("Return words concatenated")] + public string Combine([Description("First word")] string x, [Description("Second word")] string y) => y + x; + + [KernelFunction, Description("Return number as string")] + public string StringifyInt([Description("Number to stringify")] int x) => x.ToString(CultureInfo.InvariantCulture); + + [KernelFunction, Description("Return ChatMessageContent")] + public ChatMessageContent ChatMessageContent(string role, string content) => new(new AuthorRole(role), content); + + [KernelFunction, Description("Return RestApiOperationResponse")] + public RestApiOperationResponse RestApiOperationResponse(string content, string contentType) => new(content, contentType); + + [KernelFunction, Description("Return CustomReturnType")] + public CustomReturnType CustomReturnType(string textProperty) => new(textProperty); + } + + private sealed class CustomReturnType + { + public CustomReturnType(string textProperty) + { + this.TextProperty = textProperty; + } + + public string TextProperty { get; set; } + + public override string ToString() => this.TextProperty; + } +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelHelperUtilsTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelHelperUtilsTests.cs new file mode 100644 index 000000000000..e009cd61d470 --- /dev/null +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelHelperUtilsTests.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Globalization; +using HandlebarsDotNet; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars.Helpers; +using Xunit; + +namespace SemanticKernel.Extensions.UnitTests.PromptTemplates.Handlebars.Helpers; + +public class KernelHelperUtilsTests +{ + [Fact] + public void ItRegistersHelperWhenNameIsUnique() + { + // Arrange + var handlebarsInstance = HandlebarsDotNet.Handlebars.Create(); + string helperName = "uniqueHelper"; + static object helper(Context context, Arguments arguments) => "Unique Helper Output"; + + // Act + KernelHelpersUtils.RegisterHelperSafe(handlebarsInstance, helperName, (HandlebarsReturnHelper)helper); + + // Assert + Assert.True(handlebarsInstance.Configuration.Helpers.ContainsKey(helperName)); + } + + [Fact] + public void ItThrowsInvalidOperationExceptionWhenNameIsAlreadyRegistered() + { + // Arrange + var handlebarsInstance = HandlebarsDotNet.Handlebars.Create(); + string helperName = "alreadyRegisteredHelper"; + object helper1(Context context, Arguments arguments) => "Helper 1 Output"; + object helper2(Context context, Arguments arguments) => "Helper 2 Output"; + handlebarsInstance.RegisterHelper(helperName, (HandlebarsReturnHelper)helper1); + + // Act & Assert + Assert.Throws(() => KernelHelpersUtils.RegisterHelperSafe(handlebarsInstance, helperName, (HandlebarsReturnHelper)helper2)); + } + + [Theory] + [InlineData(null, false)] + [InlineData(typeof(string), false)] + [InlineData(typeof(nuint), true)] + [InlineData(typeof(nint), true)] + [InlineData(typeof(sbyte), true)] + [InlineData(typeof(short), true)] + [InlineData(typeof(int), true)] + [InlineData(typeof(long), true)] + [InlineData(typeof(byte), true)] + [InlineData(typeof(ushort), true)] + [InlineData(typeof(uint), true)] + [InlineData(typeof(ulong), true)] + [InlineData(typeof(double), true)] + [InlineData(typeof(float), true)] + [InlineData(typeof(decimal), true)] + public void IsNumericTypeWorksCorrectly(Type? type, bool expectedResult) + { + Assert.Equal(expectedResult, KernelHelpersUtils.IsNumericType(type)); + } + + [Theory] + [MemberData(nameof(NumberInputs))] + public void TryParseAnyNumberWorksCorrectly(string number, bool expectedResult) + { + Assert.Equal(expectedResult, KernelHelpersUtils.TryParseAnyNumber(number)); + } + + public static TheoryData NumberInputs => new() + { + { 1234567890123456789L.ToString(CultureInfo.InvariantCulture), true }, + { 9876543210987654321UL.ToString(CultureInfo.InvariantCulture), true }, + { 123.456.ToString(CultureInfo.InvariantCulture), true }, + { 123456789.0123456789m.ToString(CultureInfo.InvariantCulture), true }, + { "test", false }, + }; +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelSystemHelpersTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelSystemHelpersTests.cs new file mode 100644 index 000000000000..c413e050cb5c --- /dev/null +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelSystemHelpersTests.cs @@ -0,0 +1,297 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using HandlebarsDotNet; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Xunit; +using static Extensions.UnitTests.PromptTemplates.Handlebars.TestUtilities; + +namespace SemanticKernel.Extensions.UnitTests.PromptTemplates.Handlebars.Helpers; + +public sealed class KernelSystemHelpersTests +{ + public KernelSystemHelpersTests() + { + this._factory = new(); + this._kernel = new(); + this._arguments = new() { ["input"] = Guid.NewGuid().ToString("X") }; + } + + [Fact] + public async Task ItRendersTemplateWithMessageHelperAsync() + { + // Arrange + var template = "{{#message role=\"title\"}}Hello World!{{/message}}"; + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("Hello World!", result); + } + + [Theory] + [InlineData("{{set name=\"x\" value=10}}{{json x}}")] + [InlineData("{{set \"x\" 10}}{{json x}}")] + public async Task ItRendersTemplateWithSetHelperAsync(string template) + { + // Arrange + var arguments = new KernelArguments(); + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("10", result); + } + + [Theory] + [MemberData(nameof(JsonObjectsToParse))] + public async Task ItRendersTemplateWithJsonHelperAsync(object json) + { + // Arrange + var template = "{{json person}}"; + var arguments = new KernelArguments + { + { "person", json } + }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal("{\"name\":\"Alice\",\"age\":25}", result); + } + + [Fact] + public async Task ItThrowsExceptionWithJsonHelperWithoutArgumentsAsync() + { + // Arrange + var template = "{{json}}"; + + // Act + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + + // Assert + Assert.Equal("`json` helper requires a value to be passed in.", exception.Message); + } + + [Fact] + public async Task ComplexVariableTypeReturnsObjectAsync() + { + // Arrange + var template = "{{person}}"; + var arguments = new KernelArguments + { + { "person", new { name = "Alice", age = 25 } } + }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal("{ name = Alice, age = 25 }", result); + } + + [Fact] + public async Task VariableWithPropertyReferenceReturnsPropertyValueAsync() + { + // Arrange + var template = "{{person.name}}"; + var arguments = new KernelArguments + { + { "person", new { name = "Alice", age = 25 } } + }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal("Alice", result); + } + + [Fact] + public async Task VariableWithNestedObjectReturnsNestedObjectAsync() + { + // Arrange + var template = "{{person.Address}}"; + var arguments = new KernelArguments + { + { "person", new { Name = "Alice", Age = 25, Address = new { City = "New York", Country = "USA" } } } + }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal("{ City = New York, Country = USA }", result); + } + + [Fact] + public async Task ItRendersTemplateWithArrayHelperAsync() + { + // Arrange + var template = "{{#each (array 1 2 3)}}{{this}}{{/each}}"; + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("123", result); + } + + [Fact] + public async Task ItRendersTemplateWithArrayHelperAndVariableReferenceAsync() + { + // Arrange + var template = @"{{array ""hi"" "" "" name ""!"" ""Welcome to"" "" "" Address.City}}"; + var arguments = new KernelArguments + { + { "name", "Alice" }, + { "Address", new { City = "New York", Country = "USA" } } + }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal("hi, ,Alice,!,Welcome to, ,New York", result); + } + + [Fact] + public async Task ItRendersTemplateWithRawHelperAsync() + { + // Arrange + var template = "{{{{raw}}}}{{x}}{{{{/raw}}}}"; + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("{{x}}", result); + } + + [Fact] + public async Task ItRendersTemplateWithRangeHelperAsync() + { + // Arrange + var template = "{{#each (range 1 5)}}{{this}}{{/each}}"; + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("12345", result); + } + + [Fact] + public async Task ItRendersTemplateWithConcatHelperAsync() + { + // Arrange + var template = "{{concat \"Hello\" \" \" name \"!\"}}"; + var arguments = new KernelArguments + { + { "name", "Alice" } + }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal("Hello Alice!", result); + } + + [Fact] + public async Task ItRendersTemplateWithdSetAndConcatHelpersAsync() + { + // Arrange + var template = "{{set name=\"name\" value=\"Alice\"}}{{concat \"Hello\" \" \" name \"!\"}}"; + + // Act + var result = await this.RenderPromptTemplateAsync(template); + + // Assert + Assert.Equal("Hello Alice!", result); + } + + [Theory] + [InlineData("{{or true true}}", "True")] + [InlineData("{{or true false}}", "True")] + [InlineData("{{or false false}}", "False")] + [InlineData("{{or x x}}", "True")] + [InlineData("{{or x y}}", "True")] + [InlineData("{{or x z}}", "True")] + [InlineData("{{or y y}}", "False")] + [InlineData("{{or y z}}", "False")] + [InlineData("{{or z z}}", "False")] + public async Task ItRendersTemplateWithOrHelperAsync(string template, string expectedResult) + { + // Arrange + var arguments = new KernelArguments { { "x", true }, { "y", false }, { "z", null } }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal(expectedResult, result); + } + + [Theory] + [InlineData("{{#if (equals x y)}}Equal{{else}}Not equal{{/if}}", "Equal")] + [InlineData("{{#if (equals x)}}Equal{{else}}Not equal{{/if}}", "Not equal")] + [InlineData("{{#if (equals a b)}}Equal{{else}}Not equal{{/if}}", "Not equal")] + [InlineData("{{#if (equals b z)}}Equal{{else}}Not equal{{/if}}", "Equal")] + public async Task ItRendersTemplateWithEqualHelperAsync(string template, string expectedResult) + { + // Arrange + var arguments = new KernelArguments { { "x", 10 }, { "y", 10 }, { "a", null }, { "b", "test" }, { "z", "test" } }; + + // Act + var result = await this.RenderPromptTemplateAsync(template, arguments); + + // Assert + Assert.Equal(expectedResult, result); + } + + [Fact] + public async Task ItThrowsExceptionIfMessageDoesNotContainRoleAsync() + { + // Arrange + var template = "{{#message attribute=\"value\"}}Hello World!{{/message}}"; + + // Act & Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template)); + + // Assert + Assert.Equal("Message must have a role.", exception.Message); + } + + public static TheoryData JsonObjectsToParse => new() + { + new { name = "Alice", age = 25 }, + "{\"name\":\"Alice\",\"age\":25}", + JsonNode.Parse("{\"name\":\"Alice\",\"age\":25}")! + }; + + #region private + + private readonly HandlebarsPromptTemplateFactory _factory; + private readonly Kernel _kernel; + private readonly KernelArguments _arguments; + + private async Task RenderPromptTemplateAsync(string template, KernelArguments? args = null) + { + var resultConfig = InitializeHbPromptConfig(template); + var target = (HandlebarsPromptTemplate)this._factory.Create(resultConfig); + + // Act + var result = await target.RenderAsync(this._kernel, args); + + return result; + } + + #endregion +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Basic/BasicHttpRetryHandlerTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Basic/BasicHttpRetryHandlerTests.cs deleted file mode 100644 index 94751de1b100..000000000000 --- a/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Basic/BasicHttpRetryHandlerTests.cs +++ /dev/null @@ -1,678 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Reliability.Basic; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.Extensions.UnitTests.Reliability.Basic; - -public class BasicHttpRetryHandlerTests -{ - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - [InlineData(HttpStatusCode.TooManyRequests)] - public async Task NoMaxRetryCountCallsOnceForStatusAsync(HttpStatusCode statusCode) - { - // Arrange - using var retry = new BasicHttpRetryHandler(new BasicRetryConfig() { MaxRetryCount = 0 }, NullLoggerFactory.Instance); - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - [InlineData(HttpStatusCode.TooManyRequests)] - public async Task ItRetriesOnceOnRetryableStatusAsync(HttpStatusCode statusCode) - { - // Arrange - using var retry = ConfigureRetryHandler(); - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - } - - [Theory] - [InlineData(typeof(HttpRequestException))] - public async Task ItRetriesOnceOnRetryableExceptionAsync(Type exceptionType) - { - // Arrange - using var retry = ConfigureRetryHandler(); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(exceptionType); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await Assert.ThrowsAsync(exceptionType, - async () => await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Theory] - [InlineData(typeof(HttpRequestException))] - public async Task NoMaxRetryCountCallsOnceForExceptionAsync(Type exceptionType) - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { MaxRetryCount = 0 }); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(exceptionType); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await Assert.ThrowsAsync(exceptionType, - async () => await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - [InlineData(HttpStatusCode.TooManyRequests)] - public async Task ItRetriesOnceOnTransientStatusWithExponentialBackoffAsync(HttpStatusCode statusCode) - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { UseExponentialBackoff = true }); - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - } - - [Theory] - [InlineData(typeof(HttpRequestException))] - public async Task ItRetriesOnceOnRetryableExceptionWithExponentialBackoffAsync(Type exceptionType) - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { UseExponentialBackoff = true }); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(exceptionType); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await Assert.ThrowsAsync(exceptionType, - async () => await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - public async Task ItRetriesExponentiallyWithExponentialBackoffAsync(HttpStatusCode statusCode) - { - // Arrange - var currentTime = DateTimeOffset.UtcNow; - var mockTimeProvider = new Mock(); - var mockDelayProvider = new Mock(); - mockTimeProvider.SetupSequence(x => x.GetCurrentTime()) - .Returns(() => currentTime) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(5)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(510)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(1015)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(1520)); - using var retry = ConfigureRetryHandler(new BasicRetryConfig() - { - UseExponentialBackoff = true, MaxRetryCount = 3, - MinRetryDelay = TimeSpan.FromMilliseconds(500) - }, mockTimeProvider, mockDelayProvider); - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(4), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - mockTimeProvider.Verify(x => x.GetCurrentTime(), Times.Exactly(4)); - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(500), It.IsAny()), Times.Once); - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(1000), It.IsAny()), Times.Once); - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(2000), It.IsAny()), Times.Once); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - public async Task ItRetriesOnceOnTransientStatusCodeWithRetryValueAsync(HttpStatusCode statusCode) - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig(), null); - using var mockResponse = new HttpResponseMessage() - { - StatusCode = statusCode, - Headers = { RetryAfter = new RetryConditionHeaderValue(new TimeSpan(0, 0, 0, 1)) }, - }; - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - using var testContent = new StringContent("test"); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - Assert.Equal(new TimeSpan(0, 0, 0, 1), response.Headers.RetryAfter?.Delta); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - public async Task ItRetriesStatusCustomCountAsync(HttpStatusCode expectedStatus) - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { MaxRetryCount = 3 }, null); - using var mockResponse = new HttpResponseMessage(expectedStatus); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(4), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(expectedStatus, response.StatusCode); - } - - [Theory] - [InlineData(typeof(HttpRequestException))] - public async Task ItRetriesExceptionsCustomCountAsync(Type expectedException) - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { MaxRetryCount = 3 }, null); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(expectedException); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await Assert.ThrowsAsync(expectedException, - async () => await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(4), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Fact] - public async Task NoExceptionNoRetryAsync() - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { MaxRetryCount = 3 }, null); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.OK); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(1), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - } - - [Fact] - public async Task ItDoesNotExecuteOnCancellationTokenAsync() - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { MaxRetryCount = 3 }, null); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.OK); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - var cancellationToken = new CancellationToken(true); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await Assert.ThrowsAsync(async () => - await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, cancellationToken)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Never(), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Fact] - public async Task ItDoestExecuteOnFalseCancellationTokenAsync() - { - // Arrange - using var retry = ConfigureRetryHandler(new BasicRetryConfig() { MaxRetryCount = 3 }, null); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.OK); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - var cancellationToken = new CancellationToken(false); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, cancellationToken); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(1), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - } - - [Fact] - public async Task ItRetriesWithMinRetryDelayAsync() - { - var BasicRetryConfig = new BasicRetryConfig - { - MinRetryDelay = TimeSpan.FromMilliseconds(500) - }; - - var mockDelayProvider = new Mock(); - var mockTimeProvider = new Mock(); - - var currentTime = DateTimeOffset.UtcNow; - - mockTimeProvider.SetupSequence(x => x.GetCurrentTime()) - .Returns(() => currentTime) - .Returns(() => currentTime.AddMilliseconds(5)) - .Returns(() => currentTime.AddMilliseconds(510)); - - mockDelayProvider.Setup(x => x.DelayAsync(It.IsAny(), It.IsAny())) - .Returns(() => Task.CompletedTask); - - using var retry = ConfigureRetryHandler(BasicRetryConfig, mockTimeProvider, mockDelayProvider); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.TooManyRequests); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockTimeProvider.Verify(x => x.GetCurrentTime(), Times.Exactly(2)); - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(500), It.IsAny()), Times.Once); - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.TooManyRequests, response.StatusCode); - } - - [Fact] - public async Task ItRetriesWithMaxRetryDelayAsync() - { - var BasicRetryConfig = new BasicRetryConfig - { - MinRetryDelay = TimeSpan.FromMilliseconds(1), - MaxRetryDelay = TimeSpan.FromMilliseconds(500) - }; - - var mockDelayProvider = new Mock(); - var mockTimeProvider = new Mock(); - - var currentTime = DateTimeOffset.UtcNow; - - mockTimeProvider.SetupSequence(x => x.GetCurrentTime()) - .Returns(() => currentTime) - .Returns(() => currentTime.AddMilliseconds(5)) - .Returns(() => currentTime.AddMilliseconds(505)); - - mockDelayProvider.Setup(x => x.DelayAsync(It.IsAny(), It.IsAny())) - .Returns(() => Task.CompletedTask); - - using var retry = ConfigureRetryHandler(BasicRetryConfig, mockTimeProvider, mockDelayProvider); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.TooManyRequests) - { - Headers = { RetryAfter = new RetryConditionHeaderValue(TimeSpan.FromMilliseconds(2000)) } - }; - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockTimeProvider.Verify(x => x.GetCurrentTime(), Times.Exactly(2)); - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(500), It.IsAny()), Times.Once); - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.TooManyRequests, response.StatusCode); - Assert.Equal(TimeSpan.FromMilliseconds(2000), response.Headers.RetryAfter?.Delta); - } - - [Theory] - [InlineData(HttpStatusCode.TooManyRequests)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - [InlineData(HttpStatusCode.RequestTimeout)] - public async Task ItRetriesWithMaxTotalDelayAsync(HttpStatusCode statusCode) - { - // Arrange - var BasicRetryConfig = new BasicRetryConfig - { - MaxRetryCount = 5, - MinRetryDelay = TimeSpan.FromMilliseconds(50), - MaxRetryDelay = TimeSpan.FromMilliseconds(50), - MaxTotalRetryTime = TimeSpan.FromMilliseconds(350) - }; - - var mockDelayProvider = new Mock(); - var mockTimeProvider = new Mock(); - - var currentTime = DateTimeOffset.UtcNow; - mockTimeProvider.SetupSequence(x => x.GetCurrentTime()) - .Returns(() => currentTime) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(5)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(55)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(110)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(165)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(220)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(275)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(330)); - - using var retry = ConfigureRetryHandler(BasicRetryConfig, mockTimeProvider, mockDelayProvider); - - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockTimeProvider.Verify(x => x.GetCurrentTime(), Times.Exactly(6)); - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(50), It.IsAny()), Times.Exactly(5)); - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(6), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - } - - [Fact] - public async Task ItRetriesFewerWithMaxTotalDelayAsync() - { - // Arrange - var BasicRetryConfig = new BasicRetryConfig - { - MaxRetryCount = 5, - MinRetryDelay = TimeSpan.FromMilliseconds(50), - MaxRetryDelay = TimeSpan.FromMilliseconds(50), - MaxTotalRetryTime = TimeSpan.FromMilliseconds(100) - }; - - var mockDelayProvider = new Mock(); - var mockTimeProvider = new Mock(); - - var currentTime = DateTimeOffset.UtcNow; - mockTimeProvider.SetupSequence(x => x.GetCurrentTime()) - .Returns(() => currentTime) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(5)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(55)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(110)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(165)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(220)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(275)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(330)); - - using var retry = ConfigureRetryHandler(BasicRetryConfig, mockTimeProvider, mockDelayProvider); - - using var mockResponse = new HttpResponseMessage(HttpStatusCode.TooManyRequests); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockTimeProvider.Verify(x => x.GetCurrentTime(), Times.Exactly(4)); // 1 initial, 2 retries, 1 for logging time taken. - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(50), It.IsAny()), Times.Exactly(1)); - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.TooManyRequests, response.StatusCode); - } - - [Fact] - public async Task ItRetriesFewerWithMaxTotalDelayOnExceptionAsync() - { - // Arrange - var BasicRetryConfig = new BasicRetryConfig - { - MaxRetryCount = 5, - MinRetryDelay = TimeSpan.FromMilliseconds(50), - MaxRetryDelay = TimeSpan.FromMilliseconds(50), - MaxTotalRetryTime = TimeSpan.FromMilliseconds(100) - }; - - var mockDelayProvider = new Mock(); - var mockTimeProvider = new Mock(); - - var currentTime = DateTimeOffset.UtcNow; - mockTimeProvider.SetupSequence(x => x.GetCurrentTime()) - .Returns(() => currentTime) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(5)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(55)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(110)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(165)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(220)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(275)) - .Returns(() => currentTime + TimeSpan.FromMilliseconds(330)); - - using var retry = ConfigureRetryHandler(BasicRetryConfig, mockTimeProvider, mockDelayProvider); - var mockHandler = GetHttpMessageHandlerMock(typeof(HttpRequestException)); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - await Assert.ThrowsAsync(() => httpClient.GetAsync(new Uri("https://www.microsoft.com"), CancellationToken.None)); - - // Assert - mockTimeProvider.Verify(x => x.GetCurrentTime(), Times.Exactly(4)); // 1 initial, 2 retries, 1 for logging time taken. - mockDelayProvider.Verify(x => x.DelayAsync(TimeSpan.FromMilliseconds(50), It.IsAny()), Times.Exactly(1)); - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Fact] - public async Task ItRetriesOnRetryableStatusCodesAsync() - { - // Arrange - var config = new BasicRetryConfig() { RetryableStatusCodes = new List { HttpStatusCode.Unauthorized } }; - using var retry = ConfigureRetryHandler(config); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.Unauthorized); - - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - } - - [Fact] - public async Task ItDoesNotRetryOnNonRetryableStatusCodesAsync() - { - // Arrange - var config = new BasicRetryConfig() { RetryableStatusCodes = new List { HttpStatusCode.Unauthorized } }; - using var retry = ConfigureRetryHandler(config); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.TooManyRequests); - - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.TooManyRequests, response.StatusCode); - } - - [Fact] - public async Task ItRetriesOnRetryableExceptionsAsync() - { - // Arrange - var config = new BasicRetryConfig() { RetryableExceptionTypes = new List { typeof(InvalidOperationException) } }; - using var retry = ConfigureRetryHandler(config); - - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(typeof(InvalidOperationException)); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - await Assert.ThrowsAsync(async () => - await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(2), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Fact] - public async Task ItDoesNotRetryOnNonRetryableExceptionsAsync() - { - // Arrange - var config = new BasicRetryConfig() { RetryableExceptionTypes = new List { typeof(InvalidOperationException) } }; - using var retry = ConfigureRetryHandler(config); - - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(typeof(ArgumentException)); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - await Assert.ThrowsAsync(async () => - await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - } - - private static BasicHttpRetryHandler ConfigureRetryHandler(BasicRetryConfig? config = null, - Mock? timeProvider = null, Mock? delayProvider = null) - { - delayProvider ??= new Mock(); - timeProvider ??= new Mock(); - - var retry = new BasicHttpRetryHandler(config ?? new BasicRetryConfig(), null, delayProvider.Object, timeProvider.Object); - return retry; - } - - private static Mock GetHttpMessageHandlerMock(HttpResponseMessage mockResponse) - { - var mockHandler = new Mock(); - mockHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(mockResponse); - return mockHandler; - } - - private static Mock GetHttpMessageHandlerMock(Type exceptionType) - { - var mockHandler = new Mock(); - mockHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ThrowsAsync(Activator.CreateInstance(exceptionType) as Exception); - return mockHandler; - } -} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Basic/BasicRetryConfigTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Basic/BasicRetryConfigTests.cs deleted file mode 100644 index f210722cdf34..000000000000 --- a/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Basic/BasicRetryConfigTests.cs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Reliability.Basic; -using Xunit; - -namespace SemanticKernel.Extensions.UnitTests.Reliability.Basic; - -/// -/// Unit tests of . -/// -public class BasicRetryConfigTests -{ - [Fact] - public async Task NegativeMaxRetryCountThrowsAsync() - { - // Act - await Assert.ThrowsAsync(() => - { - var BasicRetryConfig = new BasicRetryConfig() { MaxRetryCount = -1 }; - return Task.CompletedTask; - }); - } - - [Fact] - public void SetDefaultBasicRetryConfig() - { - // Arrange - var builder = new KernelBuilder(); - var basicRetryConfig = new BasicRetryConfig() { MaxRetryCount = 3 }; - builder.WithRetryBasic(basicRetryConfig); - - // Act - var kernel = builder.Build(); - - // Assert - Assert.IsType(kernel.HttpHandlerFactory); - var httpHandlerFactory = kernel.HttpHandlerFactory as BasicHttpRetryHandlerFactory; - Assert.NotNull(httpHandlerFactory); - Assert.Equal(basicRetryConfig, httpHandlerFactory.Config); - } - - [Fact] - public void SetDefaultBasicRetryConfigToDefaultIfNotSet() - { - // Arrange - var retryConfig = new BasicRetryConfig(); - var builder = new KernelBuilder(); - builder.WithRetryBasic(retryConfig); - - // Act - var kernel = builder.Build(); - - // Assert - Assert.IsType(kernel.HttpHandlerFactory); - var httpHandlerFactory = kernel.HttpHandlerFactory as BasicHttpRetryHandlerFactory; - Assert.NotNull(httpHandlerFactory); - Assert.Equal(retryConfig.MaxRetryCount, httpHandlerFactory.Config.MaxRetryCount); - Assert.Equal(retryConfig.MaxRetryDelay, httpHandlerFactory.Config.MaxRetryDelay); - Assert.Equal(retryConfig.MinRetryDelay, httpHandlerFactory.Config.MinRetryDelay); - Assert.Equal(retryConfig.MaxTotalRetryTime, httpHandlerFactory.Config.MaxTotalRetryTime); - Assert.Equal(retryConfig.UseExponentialBackoff, httpHandlerFactory.Config.UseExponentialBackoff); - Assert.Equal(retryConfig.RetryableStatusCodes, httpHandlerFactory.Config.RetryableStatusCodes); - Assert.Equal(retryConfig.RetryableExceptionTypes, httpHandlerFactory.Config.RetryableExceptionTypes); - } -} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Polly/PollyHttpRetryHandlerTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Polly/PollyHttpRetryHandlerTests.cs deleted file mode 100644 index a42f5f052959..000000000000 --- a/dotnet/src/Extensions/Extensions.UnitTests/Reliability/Polly/PollyHttpRetryHandlerTests.cs +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Reliability.Polly; -using Moq; -using Moq.Protected; -using Polly; -using Polly.Utilities; -using Xunit; - -namespace SemanticKernel.Extensions.UnitTests.Reliability.Polly; - -public sealed class PollyHttpRetryHandlerTests : IDisposable -{ - public PollyHttpRetryHandlerTests() - { - SystemClock.SleepAsync = (_, _) => Task.CompletedTask; - SystemClock.Sleep = (_, _) => { }; - } - - public void Dispose() - { - SystemClock.Reset(); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - [InlineData(HttpStatusCode.TooManyRequests)] - public async Task CustomPolicyNoOpShouldNotAvoidSendRequestsAsync(HttpStatusCode statusCode) - { - // Arrange - var asyncPolicy = Policy.NoOpAsync(); - var (mockLoggerFactory, mockLogger) = GetLoggerMocks(); - using var retry = new PollyHttpRetryHandler(asyncPolicy); - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout)] - [InlineData(HttpStatusCode.ServiceUnavailable)] - [InlineData(HttpStatusCode.GatewayTimeout)] - [InlineData(HttpStatusCode.TooManyRequests)] - public async Task CustomPolicyStatusDontMatchNeverTriggersAsync(HttpStatusCode statusCode) - { - // Arrange - var asyncPolicy = Policy - .HandleResult(result => result.StatusCode != statusCode) - .WaitAndRetryAsync( - retryCount: 1, - sleepDurationProvider: (retryTimes) => TimeSpan.FromMilliseconds(10)); - - var (mockLoggerFactory, mockLogger) = GetLoggerMocks(); - using var retry = new PollyHttpRetryHandler(asyncPolicy); - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - } - - [Theory] - [InlineData(HttpStatusCode.RequestTimeout, HttpStatusCode.TooManyRequests)] - [InlineData(HttpStatusCode.ServiceUnavailable, HttpStatusCode.TooManyRequests)] - [InlineData(HttpStatusCode.GatewayTimeout, HttpStatusCode.TooManyRequests)] - [InlineData(HttpStatusCode.TooManyRequests, HttpStatusCode.TooManyRequests)] - public async Task CustomPolicyRetryStatusShouldTriggerRetrialsAsync(HttpStatusCode statusCode, HttpStatusCode retryStatusCode) - { - // Arrange - var retryCount = 3; - var asyncPolicy = Policy - .HandleResult(result => result.StatusCode == retryStatusCode) - .WaitAndRetryAsync( - retryCount, - (retryNumber) => TimeSpan.FromMilliseconds(10)); - - var (mockLoggerFactory, mockLogger) = GetLoggerMocks(); - using var retry = new PollyHttpRetryHandler(asyncPolicy); - using var mockResponse = new HttpResponseMessage(statusCode); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - var expectedSendAsyncTimes = (statusCode == retryStatusCode) - ? retryCount + 1 - : 1; - - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(expectedSendAsyncTimes), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(statusCode, response.StatusCode); - } - - [Theory] - [InlineData(typeof(ApplicationException), typeof(HttpRequestException))] - [InlineData(typeof(HttpRequestException), typeof(HttpRequestException))] - public async Task CustomPolicyRetryExceptionsShouldTriggerRetrialsAsync(Type exceptionType, Type retryExceptionType) - { - // Arrange - var retryCount = 1; - var asyncPolicy = Policy.Handle(exception => exception.GetType() == retryExceptionType) - .WaitAndRetryAsync( - retryCount, - (retryNumber) => TimeSpan.FromMilliseconds(10)); - - var (mockLoggerFactory, mockLogger) = GetLoggerMocks(); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(exceptionType); - using var retry = new PollyHttpRetryHandler(asyncPolicy); - - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await Assert.ThrowsAsync(exceptionType, - async () => await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None)); - - // Assert - var expectedSendAsyncTimes = (exceptionType == retryExceptionType) - ? retryCount + 1 - : 1; - - mockHandler.Protected() - .Verify>("SendAsync", Times.Exactly(expectedSendAsyncTimes), ItExpr.IsAny(), ItExpr.IsAny()); - } - - private static (Mock, Mock) GetLoggerMocks() - { - var mockLoggerFactory = new Mock(); - var mockLogger = new Mock(); - mockLoggerFactory.Setup(x => x.CreateLogger(It.IsAny())).Returns(mockLogger.Object); - - return (mockLoggerFactory, mockLogger); - } - - private static Mock GetHttpMessageHandlerMock(HttpResponseMessage mockResponse) - { - var mockHandler = new Mock(); - mockHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(mockResponse); - return mockHandler; - } - - private static Mock GetHttpMessageHandlerMock(Type exceptionType) - { - var mockHandler = new Mock(); - mockHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ThrowsAsync(Activator.CreateInstance(exceptionType) as Exception); - return mockHandler; - } -} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/CodeBlockTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/CodeBlockTests.cs deleted file mode 100644 index 3a26b1bf94e7..000000000000 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/CodeBlockTests.cs +++ /dev/null @@ -1,372 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; -using Moq; -using Xunit; - -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt.Blocks; - -public class CodeBlockTests -{ - private readonly Mock _functions; - private readonly ILoggerFactory _logger = NullLoggerFactory.Instance; - private readonly Mock _functionRunner = new(); - private readonly Mock _serviceProvider = new(); - private readonly Mock _serviceSelector = new(); - - public CodeBlockTests() - { - this._functions = new Mock(); - } - - [Fact] - public async Task ItThrowsIfAFunctionDoesntExistAsync() - { - // Arrange - var functionRunner = new Mock(); - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object); - var target = new CodeBlock("functionName", this._logger); - - this._functionRunner.Setup(r => r.RunAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((pluginName, functionName, variables, cancellationToken) => - { - throw new SKException("No function was found"); - }); - - // Act & Assert - await Assert.ThrowsAsync(() => target.RenderCodeAsync(context)); - } - - [Fact] - public async Task ItThrowsIfAFunctionCallThrowsAsync() - { - // Arrange - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, functions: this._functions.Object); - var function = new Mock(); - function - .Setup(x => x.InvokeAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Throws(new RuntimeWrappedException("error")); - - this.MockFunctionRunner(function.Object); - - var target = new CodeBlock("functionName", this._logger); - - // Act & Assert - await Assert.ThrowsAsync(() => target.RenderCodeAsync(context)); - } - - [Fact] - public void ItHasTheCorrectType() - { - // Act - var target = new CodeBlock("", NullLoggerFactory.Instance); - - // Assert - Assert.Equal(BlockTypes.Code, target.Type); - } - - [Fact] - public void ItTrimsSpaces() - { - // Act + Assert - Assert.Equal("aa", new CodeBlock(" aa ", NullLoggerFactory.Instance).Content); - } - - [Fact] - public void ItChecksValidityOfInternalBlocks() - { - // Arrange - var validBlock1 = new FunctionIdBlock("x"); - var validBlock2 = new ValBlock("''"); - var invalidBlock = new VarBlock(""); - - // Act - var codeBlock1 = new CodeBlock(new List { validBlock1, validBlock2 }, "", NullLoggerFactory.Instance); - var codeBlock2 = new CodeBlock(new List { validBlock1, invalidBlock }, "", NullLoggerFactory.Instance); - - // Assert - Assert.True(codeBlock1.IsValid(out _)); - Assert.False(codeBlock2.IsValid(out _)); - } - - [Fact] - public void ItRequiresAValidFunctionCall() - { - // Arrange - var funcId = new FunctionIdBlock("funcName"); - var valBlock = new ValBlock("'value'"); - var varBlock = new VarBlock("$var"); - var namedArgBlock = new NamedArgBlock("varName='foo'"); - - // Act - var codeBlock1 = new CodeBlock(new List { funcId, valBlock }, "", NullLoggerFactory.Instance); - var codeBlock2 = new CodeBlock(new List { funcId, varBlock }, "", NullLoggerFactory.Instance); - var codeBlock3 = new CodeBlock(new List { funcId, funcId }, "", NullLoggerFactory.Instance); - var codeBlock4 = new CodeBlock(new List { funcId, varBlock, varBlock }, "", NullLoggerFactory.Instance); - var codeBlock5 = new CodeBlock(new List { funcId, varBlock, namedArgBlock }, "", NullLoggerFactory.Instance); - var codeBlock6 = new CodeBlock(new List { varBlock, valBlock }, "", NullLoggerFactory.Instance); - var codeBlock7 = new CodeBlock(new List { namedArgBlock }, "", NullLoggerFactory.Instance); - - // Assert - Assert.True(codeBlock1.IsValid(out _)); - Assert.True(codeBlock2.IsValid(out _)); - - // Assert - Can't pass a function to a function - Assert.False(codeBlock3.IsValid(out var errorMessage3)); - Assert.Equal("The first arg of a function must be a quoted string, variable or named argument", errorMessage3); - - // Assert - Can't pass more than one unnamed param - Assert.False(codeBlock4.IsValid(out var errorMessage4)); - Assert.Equal("Functions only support named arguments after the first argument. Argument 2 is not named.", errorMessage4); - - // Assert - Can pass one unnamed param and named args - Assert.True(codeBlock5.IsValid(out var errorMessage5)); - Assert.Empty(errorMessage5); - - // Assert - Can't use > 1 block if not a function call - Assert.False(codeBlock6.IsValid(out var errorMessage6)); - Assert.Equal("Unexpected second token found: 'value'", errorMessage6); - - // Assert - Can't use a named argument without a function block - Assert.False(codeBlock7.IsValid(out var errorMessage7)); - Assert.Equal("Unexpected named argument found. Expected function name first.", errorMessage7); - } - - [Fact] - public async Task ItRendersCodeBlockConsistingOfJustAVarBlock1Async() - { - // Arrange - var variables = new ContextVariables { ["varName"] = "foo" }; - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables, functions: this._functions.Object); - - // Act - var codeBlock = new CodeBlock("$varName", NullLoggerFactory.Instance); - var result = await codeBlock.RenderCodeAsync(context); - - // Assert - Assert.Equal("foo", result); - } - - [Fact] - public async Task ItRendersCodeBlockConsistingOfJustAVarBlock2Async() - { - // Arrange - var variables = new ContextVariables { ["varName"] = "bar" }; - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables, functions: this._functions.Object); - var varBlock = new VarBlock("$varName"); - - // Act - var codeBlock = new CodeBlock(new List { varBlock }, "", NullLoggerFactory.Instance); - var result = await codeBlock.RenderCodeAsync(context); - - // Assert - Assert.Equal("bar", result); - } - - [Fact] - public async Task ItRendersCodeBlockConsistingOfJustAValBlock1Async() - { - // Arrange - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object); - - // Act - var codeBlock = new CodeBlock("'ciao'", NullLoggerFactory.Instance); - var result = await codeBlock.RenderCodeAsync(context); - - // Assert - Assert.Equal("ciao", result); - } - - [Fact] - public async Task ItRendersCodeBlockConsistingOfJustAValBlock2Async() - { - // Arrange - var kernel = new Mock(); - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object); - var valBlock = new ValBlock("'arrivederci'"); - - // Act - var codeBlock = new CodeBlock(new List { valBlock }, "", NullLoggerFactory.Instance); - var result = await codeBlock.RenderCodeAsync(context); - - // Assert - Assert.Equal("arrivederci", result); - } - - [Fact] - public async Task ItInvokesFunctionCloningAllVariablesAsync() - { - // Arrange - const string Func = "funcName"; - const string Plugin = "pluginName"; - - var variables = new ContextVariables { ["input"] = "zero", ["var1"] = "uno", ["var2"] = "due" }; - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables, functions: this._functions.Object); - var funcId = new FunctionIdBlock(Func); - - var canary0 = string.Empty; - var canary1 = string.Empty; - var canary2 = string.Empty; - var function = new Mock(); - function - .Setup(x => x.InvokeAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((context, _, _) => - { - canary0 = context!.Variables["input"]; - canary1 = context.Variables["var1"]; - canary2 = context.Variables["var2"]; - - context.Variables["input"] = "overridden"; - context.Variables["var1"] = "overridden"; - context.Variables["var2"] = "overridden"; - }) - .ReturnsAsync((SKContext inputcontext, object _, CancellationToken _) => new FunctionResult(Func, Plugin, inputcontext)); - - this.MockFunctionRunner(function.Object); - - // Act - var codeBlock = new CodeBlock(new List { funcId }, "", NullLoggerFactory.Instance); - string result = await codeBlock.RenderCodeAsync(context); - - // Assert - Values are received - Assert.Equal("zero", canary0); - Assert.Equal("uno", canary1); - Assert.Equal("due", canary2); - - // Assert - Original context is intact - Assert.Equal("zero", variables["input"]); - Assert.Equal("uno", variables["var1"]); - Assert.Equal("due", variables["var2"]); - } - - [Fact] - public async Task ItInvokesFunctionWithCustomVariableAsync() - { - // Arrange - const string Func = "funcName"; - const string Plugin = "pluginName"; - const string Var = "varName"; - const string VarValue = "varValue"; - - var variables = new ContextVariables { [Var] = VarValue }; - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables, functions: this._functions.Object); - var funcId = new FunctionIdBlock(Func); - var varBlock = new VarBlock($"${Var}"); - - var canary = string.Empty; - var function = new Mock(); - function - .Setup(x => x.InvokeAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((context, _, _) => - { - canary = context!.Variables["input"]; - }) - .ReturnsAsync((SKContext inputcontext, object _, CancellationToken _) => new FunctionResult(Func, Plugin, inputcontext)); - - this.MockFunctionRunner(function.Object); - - // Act - var codeBlock = new CodeBlock(new List { funcId, varBlock }, "", NullLoggerFactory.Instance); - string result = await codeBlock.RenderCodeAsync(context); - - // Assert - Assert.Equal(VarValue, result); - Assert.Equal(VarValue, canary); - } - - [Fact] - public async Task ItInvokesFunctionWithCustomValueAsync() - { - // Arrange - const string Func = "funcName"; - const string Plugin = "pluginName"; - const string Value = "value"; - - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables: null, functions: this._functions.Object); - var funcId = new FunctionIdBlock(Func); - var valBlock = new ValBlock($"'{Value}'"); - - var canary = string.Empty; - var function = new Mock(); - function - .Setup(x => x.InvokeAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((context, _, _) => - { - canary = context!.Variables["input"]; - }) - .ReturnsAsync((SKContext inputcontext, object _, CancellationToken _) => new FunctionResult(Func, Plugin, inputcontext)); - - this.MockFunctionRunner(function.Object); - - // Act - var codeBlock = new CodeBlock(new List { funcId, valBlock }, "", NullLoggerFactory.Instance); - string result = await codeBlock.RenderCodeAsync(context); - - // Assert - Assert.Equal(Value, result); - Assert.Equal(Value, canary); - } - - [Fact] - public async Task ItInvokesFunctionWithNamedArgsAsync() - { - // Arrange - const string Func = "funcName"; - const string Plugin = "pluginName"; - const string Value = "value"; - const string FooValue = "bar"; - const string BobValue = "bob's value"; - - var variables = new ContextVariables(); - variables.Set("bob", BobValue); - variables.Set("input", Value); - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables: variables, functions: this._functions.Object); - var funcId = new FunctionIdBlock(Func); - var namedArgBlock1 = new NamedArgBlock($"foo='{FooValue}'"); - var namedArgBlock2 = new NamedArgBlock("baz=$bob"); - - var foo = string.Empty; - var baz = string.Empty; - var function = new Mock(); - function - .Setup(x => x.InvokeAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((context, _, _) => - { - foo = context!.Variables["foo"]; - baz = context!.Variables["baz"]; - }) - .ReturnsAsync((SKContext inputcontext, object _, CancellationToken _) => new FunctionResult(Func, Plugin, inputcontext)); - - this.MockFunctionRunner(function.Object); - - // Act - var codeBlock = new CodeBlock(new List { funcId, namedArgBlock1, namedArgBlock2 }, "", NullLoggerFactory.Instance); - string result = await codeBlock.RenderCodeAsync(context); - - // Assert - Assert.Equal(FooValue, foo); - Assert.Equal(BobValue, baz); - Assert.Equal(Value, result); - } - - private void MockFunctionRunner(ISKFunction function) - { - this._functionRunner.Setup(r => r.RunAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((pluginName, functionName, variables, cancellationToken) => - { - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables); - return function.InvokeAsync(context, null, cancellationToken); - }); - } -} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/VarBlockTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/VarBlockTests.cs deleted file mode 100644 index 2e7e2d53eda9..000000000000 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/VarBlockTests.cs +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; -using Xunit; - -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt.Blocks; - -public class VarBlockTests -{ - [Fact] - public void ItHasTheCorrectType() - { - // Act - var target = new VarBlock(""); - - // Assert - Assert.Equal(BlockTypes.Variable, target.Type); - } - - [Fact] - public void ItTrimsSpaces() - { - // Act + Assert - Assert.Equal("$", new VarBlock(" $ ").Content); - } - - [Fact] - public void ItIgnoresSpacesAround() - { - // Act - var target = new VarBlock(" $var \n "); - - // Assert - Assert.Equal("$var", target.Content); - } - - [Fact] - public void ItRendersToEmptyStringWithoutVariables() - { - // Arrange - var target = new VarBlock(" $var \n "); - - // Act - var result = target.Render(null); - - // Assert - Assert.Equal(string.Empty, result); - } - - [Fact] - public void ItRendersToEmptyStringIfVariableIsMissing() - { - // Arrange - var target = new VarBlock(" $var \n "); - var variables = new ContextVariables - { - ["foo"] = "bar" - }; - - // Act - var result = target.Render(variables); - - // Assert - Assert.Equal(string.Empty, result); - } - - [Fact] - public void ItRendersToVariableValueWhenAvailable() - { - // Arrange - var target = new VarBlock(" $var \n "); - var variables = new ContextVariables - { - ["foo"] = "bar", - ["var"] = "able", - }; - - // Act - var result = target.Render(variables); - - // Assert - Assert.Equal("able", result); - } - - [Fact] - public void ItThrowsIfTheVarNameIsEmpty() - { - // Arrange - var variables = new ContextVariables - { - ["foo"] = "bar", - ["var"] = "able", - }; - var target = new VarBlock(" $ "); - - // Act + Assert - Assert.Throws(() => target.Render(variables)); - } - - [Theory] - [InlineData("0", true)] - [InlineData("1", true)] - [InlineData("a", true)] - [InlineData("_", true)] - [InlineData("01", true)] - [InlineData("01a", true)] - [InlineData("a01", true)] - [InlineData("_0", true)] - [InlineData("a01_", true)] - [InlineData("_a01", true)] - [InlineData(".", false)] - [InlineData("-", false)] - [InlineData("a b", false)] - [InlineData("a\nb", false)] - [InlineData("a\tb", false)] - [InlineData("a\rb", false)] - [InlineData("a.b", false)] - [InlineData("a,b", false)] - [InlineData("a-b", false)] - [InlineData("a+b", false)] - [InlineData("a~b", false)] - [InlineData("a`b", false)] - [InlineData("a!b", false)] - [InlineData("a@b", false)] - [InlineData("a#b", false)] - [InlineData("a$b", false)] - [InlineData("a%b", false)] - [InlineData("a^b", false)] - [InlineData("a*b", false)] - [InlineData("a(b", false)] - [InlineData("a)b", false)] - [InlineData("a|b", false)] - [InlineData("a{b", false)] - [InlineData("a}b", false)] - [InlineData("a[b", false)] - [InlineData("a]b", false)] - [InlineData("a:b", false)] - [InlineData("a;b", false)] - [InlineData("a'b", false)] - [InlineData("a\"b", false)] - [InlineData("ab", false)] - [InlineData("a/b", false)] - [InlineData("a\\b", false)] - public void ItAllowsUnderscoreLettersAndDigits(string name, bool isValid) - { - // Arrange - var target = new VarBlock($" ${name} "); - var variables = new ContextVariables { [name] = "value" }; - - // Act - var result = target.Render(variables); - - // Assert - Assert.Equal(isValid, target.IsValid(out _)); - if (isValid) { Assert.Equal("value", result); } - } -} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/PromptTemplateEngineTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/PromptTemplateEngineTests.cs deleted file mode 100644 index c394a54d0db7..000000000000 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/PromptTemplateEngineTests.cs +++ /dev/null @@ -1,409 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Globalization; -using System.Linq; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; -using Moq; -using SemanticKernel.Extensions.UnitTests.XunitHelpers; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt; - -public sealed class PromptTemplateEngineTests -{ - private const string DateFormat = "M/d/yyyy"; - private readonly BasicPromptTemplateEngine _target; - private readonly ContextVariables _variables; - private readonly Mock _functions; - private readonly ITestOutputHelper _logger; - private readonly Mock _kernel; - private readonly Mock _functionRunner; - private readonly Mock _serviceProvider; - private readonly Mock _serviceSelector; - - public PromptTemplateEngineTests(ITestOutputHelper testOutputHelper) - { - this._logger = testOutputHelper; - this._target = new BasicPromptTemplateEngine(TestConsoleLogger.LoggerFactory); - this._variables = new ContextVariables(Guid.NewGuid().ToString("X")); - this._functions = new Mock(); - this._kernel = new Mock(); - this._functionRunner = new Mock(); - this._serviceProvider = new Mock(); - this._serviceSelector = new Mock(); - } - - [Fact] - public void ItRendersVariables() - { - // Arrange - var template = "{$x11} This {$a} is {$_a} a {{$x11}} test {{$x11}} " + - "template {{foo}}{{bar $a}}{{baz $_a}}{{yay $x11}}{{food a='b' c = $d}}"; - - // Act - var blocks = this._target.ExtractBlocks(template); - var updatedBlocks = this._target.RenderVariables(blocks, this._variables); - - // Assert - Assert.Equal(10, blocks.Count); - Assert.Equal(10, updatedBlocks.Count); - - Assert.Equal("$x11", blocks[1].Content); - Assert.Equal("", updatedBlocks[1].Content); - Assert.Equal(BlockTypes.Variable, blocks[1].Type); - Assert.Equal(BlockTypes.Text, updatedBlocks[1].Type); - - Assert.Equal("$x11", blocks[3].Content); - Assert.Equal("", updatedBlocks[3].Content); - Assert.Equal(BlockTypes.Variable, blocks[3].Type); - Assert.Equal(BlockTypes.Text, updatedBlocks[3].Type); - - Assert.Equal("foo", blocks[5].Content); - Assert.Equal("foo", updatedBlocks[5].Content); - Assert.Equal(BlockTypes.Code, blocks[5].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[5].Type); - - Assert.Equal("bar $a", blocks[6].Content); - Assert.Equal("bar $a", updatedBlocks[6].Content); - Assert.Equal(BlockTypes.Code, blocks[6].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[6].Type); - - Assert.Equal("baz $_a", blocks[7].Content); - Assert.Equal("baz $_a", updatedBlocks[7].Content); - Assert.Equal(BlockTypes.Code, blocks[7].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[7].Type); - - Assert.Equal("yay $x11", blocks[8].Content); - Assert.Equal("yay $x11", updatedBlocks[8].Content); - Assert.Equal(BlockTypes.Code, blocks[8].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[8].Type); - - Assert.Equal("food a='b' c = $d", blocks[9].Content); - Assert.Equal("food a='b' c = $d", updatedBlocks[9].Content); - Assert.Equal(BlockTypes.Code, blocks[9].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[9].Type); - - // Arrange - this._variables.Set("x11", "x11 value"); - this._variables.Set("a", "a value"); - this._variables.Set("_a", "_a value"); - this._variables.Set("c", "c value"); - this._variables.Set("d", "d value"); - - // Act - blocks = this._target.ExtractBlocks(template); - updatedBlocks = this._target.RenderVariables(blocks, this._variables); - - // Assert - Assert.Equal(10, blocks.Count); - Assert.Equal(10, updatedBlocks.Count); - - Assert.Equal("$x11", blocks[1].Content); - Assert.Equal("x11 value", updatedBlocks[1].Content); - Assert.Equal(BlockTypes.Variable, blocks[1].Type); - Assert.Equal(BlockTypes.Text, updatedBlocks[1].Type); - - Assert.Equal("$x11", blocks[3].Content); - Assert.Equal("x11 value", updatedBlocks[3].Content); - Assert.Equal(BlockTypes.Variable, blocks[3].Type); - Assert.Equal(BlockTypes.Text, updatedBlocks[3].Type); - - Assert.Equal("foo", blocks[5].Content); - Assert.Equal("foo", updatedBlocks[5].Content); - Assert.Equal(BlockTypes.Code, blocks[5].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[5].Type); - - Assert.Equal("bar $a", blocks[6].Content); - Assert.Equal("bar $a", updatedBlocks[6].Content); - Assert.Equal(BlockTypes.Code, blocks[6].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[6].Type); - - Assert.Equal("baz $_a", blocks[7].Content); - Assert.Equal("baz $_a", updatedBlocks[7].Content); - Assert.Equal(BlockTypes.Code, blocks[7].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[7].Type); - - Assert.Equal("yay $x11", blocks[8].Content); - Assert.Equal("yay $x11", updatedBlocks[8].Content); - Assert.Equal(BlockTypes.Code, blocks[8].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[8].Type); - - Assert.Equal("food a='b' c = $d", blocks[9].Content); - Assert.Equal("food a='b' c = $d", updatedBlocks[9].Content); - Assert.Equal(BlockTypes.Code, blocks[9].Type); - Assert.Equal(BlockTypes.Code, updatedBlocks[9].Type); - } - - [Fact] - public async Task ItRendersCodeUsingInputAsync() - { - // Arrange - string MyFunctionAsync(SKContext context) - { - this._logger.WriteLine("MyFunction call received, input: {0}", context.Variables.Input); - return $"F({context.Variables.Input})"; - } - - List functions = new() - { - SKFunction.FromNativeMethod(Method(MyFunctionAsync), this), - }; - - Assert.NotNull(functions[0]); - - this._variables.Update("INPUT-BAR"); - var template = "foo-{{function}}-baz"; - - this.MockFunctionRunner(functions[0]); - - var context = this.MockContext(); - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("foo-F(INPUT-BAR)-baz", result); - } - - [Fact] - public async Task ItRendersCodeUsingVariablesAsync() - { - // Arrange - string MyFunctionAsync(SKContext context) - { - this._logger.WriteLine("MyFunction call received, input: {0}", context.Variables.Input); - return $"F({context.Variables.Input})"; - } - - var func = SKFunction.FromNativeMethod(Method(MyFunctionAsync), this); - - Assert.NotNull(func); - - this._variables.Set("myVar", "BAR"); - var template = "foo-{{function $myVar}}-baz"; - - this.MockFunctionRunner(func); - var context = this.MockContext(); - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("foo-F(BAR)-baz", result); - } - - [Fact] - public async Task ItRendersCodeUsingNamedVariablesAsync() - { - // Arrange - string MyFunctionAsync( - [Description("Name"), SKName("input")] string name, - [Description("Age"), SKName("age")] int age, - [Description("Slogan"), SKName("slogan")] string slogan, - [Description("Date"), SKName("date")] DateTime date) - { - var dateStr = date.ToString(PromptTemplateEngineTests.DateFormat, CultureInfo.InvariantCulture); - this._logger.WriteLine("MyFunction call received, name: {0}, age: {1}, slogan: {2}, date: {3}", name, age, slogan, date); - return $"[{dateStr}] {name} ({age}): \"{slogan}\""; - } - - var func = SKFunction.FromNativeMethod(Method(MyFunctionAsync), this); - - Assert.NotNull(func); - - this._variables.Set("input", "Mario"); - this._variables.Set("someDate", "2023-08-25T00:00:00"); - var template = "foo-{{function input=$input age='42' slogan='Let\\'s-a go!' date=$someDate}}-baz"; - - this.MockFunctionRunner(func); - var context = this.MockContext(); - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("foo-[8/25/2023] Mario (42): \"Let's-a go!\"-baz", result); - } - - [Fact] - public async Task ItHandlesSyntaxErrorsAsync() - { - // Arrange - string MyFunctionAsync( - [Description("Name"), SKName("input")] string name, - [Description("Age"), SKName("age")] int age, - [Description("Slogan"), SKName("slogan")] string slogan, - [Description("Date"), SKName("date")] DateTime date) - { - var dateStr = date.ToString(PromptTemplateEngineTests.DateFormat, CultureInfo.InvariantCulture); - this._logger.WriteLine("MyFunction call received, name: {0}, age: {1}, slogan: {2}, date: {3}", name, age, slogan, date); - return $"[{dateStr}] {name} ({age}): \"{slogan}\""; - } - - ISKFunction func = SKFunction.FromNativeMethod(Method(MyFunctionAsync), this); - Assert.NotNull(func); - - this._variables.Set("input", "Mario"); - this._variables.Set("someDate", "2023-08-25T00:00:00"); - var template = "foo-{{function input=$input age=42 slogan='Let\\'s-a go!' date=$someDate}}-baz"; - var context = this.MockContext(); - - // Act - var result = await Assert.ThrowsAsync(() => this._target.RenderAsync(template, context)); - Assert.Equal($"Named argument values need to be prefixed with a quote or {Symbols.VarPrefix}.", result.Message); - } - - [Fact] - public async Task ItRendersCodeUsingImplicitInputAndNamedVariablesAsync() - { - // Arrange - string MyFunctionAsync( - [Description("Input"), SKName("input")] string name, - [Description("Age"), SKName("age")] int age, - [Description("Slogan"), SKName("slogan")] string slogan, - [Description("Date"), SKName("date")] DateTime date) - { - this._logger.WriteLine("MyFunction call received, name: {0}, age: {1}, slogan: {2}, date: {3}", name, age, slogan, date); - var dateStr = date.ToString(PromptTemplateEngineTests.DateFormat, CultureInfo.InvariantCulture); - return $"[{dateStr}] {name} ({age}): \"{slogan}\""; - } - - ISKFunction func = SKFunction.FromNativeMethod(Method(MyFunctionAsync), this); - - Assert.NotNull(func); - - this._variables.Set("input", "Mario"); - this._variables.Set("someDate", "2023-08-25T00:00:00"); - - var template = "foo-{{function $input age='42' slogan='Let\\'s-a go!' date=$someDate}}-baz"; - - this.MockFunctionRunner(func); - - var context = this.MockContext(); - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("foo-[8/25/2023] Mario (42): \"Let's-a go!\"-baz", result); - } - - [Fact] - public async Task ItRendersAsyncCodeUsingImmutableVariablesAsync() - { - // Arrange - var template = "{{func1}} {{func2}} {{func3 $myVar}}"; - this._variables.Update("BAR"); - this._variables.Set("myVar", "BAZ"); - - string MyFunction1Async(SKContext context) - { - this._logger.WriteLine("MyFunction1 call received, input: {0}", context.Variables.Input); - context.Variables.Update("foo"); - return "F(OUTPUT-FOO)"; - } - string MyFunction2Async(SKContext context) - { - // Input value should be "BAR" because the variable $input is immutable in MyFunction1 - this._logger.WriteLine("MyFunction2 call received, input: {0}", context.Variables.Input); - context.Variables.Set("myVar", "bar"); - return context.Variables.Input; - } - string MyFunction3Async(SKContext context) - { - // Input value should be "BAZ" because the variable $myVar is immutable in MyFunction2 - this._logger.WriteLine("MyFunction3 call received, input: {0}", context.Variables.Input); - return context.Variables.TryGetValue("myVar", out string? value) ? value : ""; - } - - var functions = new List() - { - SKFunction.FromNativeMethod(Method(MyFunction1Async), this, "func1"), - SKFunction.FromNativeMethod(Method(MyFunction2Async), this, "func2"), - SKFunction.FromNativeMethod(Method(MyFunction3Async), this, "func3") - }; - - this.MockFunctionRunner(functions); - - // Act - var result = await this._target.RenderAsync(template, this.MockContext()); - - // Assert - Assert.Equal("F(OUTPUT-FOO) BAR BAZ", result); - } - - [Fact] - public async Task ItRendersAsyncCodeUsingVariablesAsync() - { - // Arrange - Task MyFunctionAsync(SKContext context) - { - // Input value should be "BAR" because the variable $myVar is passed in - this._logger.WriteLine("MyFunction call received, input: {0}", context.Variables.Input); - return Task.FromResult(context.Variables.Input); - } - - ISKFunction func = SKFunction.FromNativeMethod(Method(MyFunctionAsync), this); - Assert.NotNull(func); - - this._variables.Set("myVar", "BAR"); - - var template = "foo-{{function $myVar}}-baz"; - var context = this.MockContext(); - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("foo-BAR-baz", result); - } - - private static MethodInfo Method(Delegate method) - { - return method.Method; - } - - private void MockFunctionRunner(ISKFunction function) - { - this._functionRunner.Setup(r => r.RunAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((pluginName, functionName, variables, cancellationToken) => - { - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables); - return function.InvokeAsync(context, null, cancellationToken); - }); - } - - private void MockFunctionRunner(List functions) - { - this._functionRunner.Setup(r => r.RunAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((pluginName, functionName, variables, cancellationToken) => - { - var context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, variables); - var function = functions.First(f => f.PluginName == functionName); - - return function.InvokeAsync(context, null, cancellationToken); - }); - } - - private SKContext MockContext() - { - return new SKContext( - this._functionRunner.Object, - this._serviceProvider.Object, - this._serviceSelector.Object, - this._variables, - this._functions.Object); - } -} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/TemplateTokenizerTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/TemplateTokenizerTests.cs deleted file mode 100644 index 53e36f3046fa..000000000000 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/TemplateTokenizerTests.cs +++ /dev/null @@ -1,262 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; -using Xunit; - -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt; - -public class TemplateTokenizerTests -{ - private readonly TemplateTokenizer _target; - - public TemplateTokenizerTests() - { - this._target = new TemplateTokenizer(); - } - - [Theory] - [InlineData(null, BlockTypes.Text)] - [InlineData("", BlockTypes.Text)] - [InlineData(" ", BlockTypes.Text)] - [InlineData(" ", BlockTypes.Text)] - [InlineData(" {} ", BlockTypes.Text)] - [InlineData(" {{} ", BlockTypes.Text)] - [InlineData(" {{ } } } ", BlockTypes.Text)] - [InlineData(" { { }} }", BlockTypes.Text)] - [InlineData("{{}}", BlockTypes.Text)] - [InlineData("{{ }}", BlockTypes.Text)] - [InlineData("{{ }}", BlockTypes.Text)] - [InlineData("{{ '}}x", BlockTypes.Text)] - [InlineData("{{ \"}}x", BlockTypes.Text)] - internal void ItParsesTextWithoutCode(string? text, BlockTypes type) - { - // Act - var blocks = this._target.Tokenize(text); - - // Assert - Assert.Single(blocks); - Assert.Equal(type, blocks[0].Type); - } - - [Theory] - [InlineData("", BlockTypes.Text)] - [InlineData(" ", BlockTypes.Text)] - [InlineData(" ", BlockTypes.Text)] - [InlineData(" aaa ", BlockTypes.Text)] - [InlineData("{{$}}", BlockTypes.Variable)] - [InlineData("{{$a}}", BlockTypes.Variable)] - [InlineData("{{ $a}}", BlockTypes.Variable)] - [InlineData("{{ $a }}", BlockTypes.Variable)] - [InlineData("{{ $a }}", BlockTypes.Variable)] - [InlineData("{{code}}", BlockTypes.Code)] - [InlineData("{{code }}", BlockTypes.Code)] - [InlineData("{{ code }}", BlockTypes.Code)] - [InlineData("{{ code }}", BlockTypes.Code)] - [InlineData("{{ code }}", BlockTypes.Code)] - [InlineData("{{''}}", BlockTypes.Value)] - [InlineData("{{' '}}", BlockTypes.Value)] - [InlineData("{{ ' '}}", BlockTypes.Value)] - [InlineData("{{ ' ' }}", BlockTypes.Value)] - [InlineData("{{ ' ' }}", BlockTypes.Value)] - [InlineData("{{ ' ' }}", BlockTypes.Value)] - internal void ItParsesBasicBlocks(string? text, BlockTypes type) - { - // Act - var blocks = this._target.Tokenize(text); - - // Assert - Assert.Single(blocks); - Assert.Equal(type, blocks[0].Type); - } - - [Theory] - [InlineData(null, 1)] - [InlineData("", 1)] - [InlineData("}}{{a}} {{b}}x", 5)] - [InlineData("}}{{ -a}} {{b}}x", 5)] - [InlineData("}}{{ -a\n}} {{b}}x", 5)] - [InlineData("}}{{ -a\n} } {{b}}x", 3)] - public void ItTokenizesTheRightTokenCount(string? template, int blockCount) - { - // Act - var blocks = this._target.Tokenize(template); - - // Assert - Assert.Equal(blockCount, blocks.Count); - } - - [Fact] - public void ItTokenizesEdgeCasesCorrectly1() - { - // Act - var blocks1 = this._target.Tokenize("{{{{a}}"); - var blocks2 = this._target.Tokenize("{{'{{a}}"); - var blocks3 = this._target.Tokenize("{{'a}}"); - var blocks4 = this._target.Tokenize("{{a'}}"); - - // Assert - Count - Assert.Equal(2, blocks1.Count); - Assert.Single(blocks2); - Assert.Single(blocks3); - Assert.Single(blocks4); - - // Assert - Type - Assert.Equal(BlockTypes.Text, blocks1[0].Type); - Assert.Equal(BlockTypes.Code, blocks1[1].Type); - - // Assert - Content - Assert.Equal("{{", blocks1[0].Content); - Assert.Equal("a", blocks1[1].Content); - } - - [Fact] - public void ItTokenizesEdgeCasesCorrectly2() - { - // Arrange - var template = "}}{{{ {$a}}}} {{b}}x}}"; - - // Act - var blocks = this._target.Tokenize(template); - - // Assert - Assert.Equal(5, blocks.Count); - - Assert.Equal("}}{", blocks[0].Content); - Assert.Equal(BlockTypes.Text, blocks[0].Type); - - Assert.Equal("{$a", blocks[1].Content); - Assert.Equal(BlockTypes.Code, blocks[1].Type); - - Assert.Equal("}} ", blocks[2].Content); - Assert.Equal(BlockTypes.Text, blocks[2].Type); - - Assert.Equal("b", blocks[3].Content); - Assert.Equal(BlockTypes.Code, blocks[3].Type); - - Assert.Equal("x}}", blocks[4].Content); - Assert.Equal(BlockTypes.Text, blocks[4].Type); - } - - [Fact] - public void ItTokenizesEdgeCasesCorrectly3() - { - // Arrange - var template = "}}{{{{$a}}}} {{b}}$x}}"; - - // Act - var blocks = this._target.Tokenize(template); - - // Assert - Assert.Equal(5, blocks.Count); - - Assert.Equal("}}{{", blocks[0].Content); - Assert.Equal(BlockTypes.Text, blocks[0].Type); - - Assert.Equal("$a", blocks[1].Content); - Assert.Equal(BlockTypes.Variable, blocks[1].Type); - - Assert.Equal("}} ", blocks[2].Content); - Assert.Equal(BlockTypes.Text, blocks[2].Type); - - Assert.Equal("b", blocks[3].Content); - Assert.Equal(BlockTypes.Code, blocks[3].Type); - - Assert.Equal("$x}}", blocks[4].Content); - Assert.Equal(BlockTypes.Text, blocks[4].Type); - } - - [Theory] - [InlineData("{{a$}}")] - [InlineData("{{a$a}}")] - [InlineData("{{a''}}")] - [InlineData("{{a\"\"}}")] - [InlineData("{{a'b'}}")] - [InlineData("{{a\"b\"}}")] - [InlineData("{{a'b' }}")] - [InlineData("{{a\"b\" }}")] - [InlineData("{{ asis 'f\\'oo' }}")] - public void ItTokenizesEdgeCasesCorrectly4(string template) - { - // Act - var blocks = this._target.Tokenize(template); - - // Assert - Assert.Single(blocks); - Assert.Equal(BlockTypes.Code, blocks[0].Type); - Assert.Equal(template[2..^2].Trim(), blocks[0].Content); - } - - [Fact] - public void ItTokenizesATypicalPrompt() - { - // Arrange - var template = "this is a {{ $prompt }} with {{$some}} variables " + - "and {{function $calls}} {{ and 'values' }}"; - - // Act - var blocks = this._target.Tokenize(template); - - // Assert - Assert.Equal(8, blocks.Count); - - Assert.Equal("this is a ", blocks[0].Content); - Assert.Equal(BlockTypes.Text, blocks[0].Type); - - Assert.Equal("$prompt", blocks[1].Content); - Assert.Equal(BlockTypes.Variable, blocks[1].Type); - - Assert.Equal(" with ", blocks[2].Content); - Assert.Equal(BlockTypes.Text, blocks[2].Type); - - Assert.Equal("$some", blocks[3].Content); - Assert.Equal(BlockTypes.Variable, blocks[3].Type); - - Assert.Equal(" variables and ", blocks[4].Content); - Assert.Equal(BlockTypes.Text, blocks[4].Type); - - Assert.Equal("function $calls", blocks[5].Content); - Assert.Equal(BlockTypes.Code, blocks[5].Type); - - Assert.Equal(" ", blocks[6].Content); - Assert.Equal(BlockTypes.Text, blocks[6].Type); - - Assert.Equal("and 'values'", blocks[7].Content); - Assert.Equal(BlockTypes.Code, blocks[7].Type); - } - - [Fact] - public void ItTokenizesAFunctionCallWithMultipleArguments() - { - // Arrange - var template = "this is a {{ function with='many' named=$arguments }}"; - - // Act - var blocks = this._target.Tokenize(template); - - // Assert - Assert.Equal(2, blocks.Count); - - Assert.Equal("this is a ", blocks[0].Content); - Assert.Equal(BlockTypes.Text, blocks[0].Type); - - Assert.Equal("function with='many' named=$arguments", blocks[1].Content); - Assert.Equal(BlockTypes.Code, blocks[1].Type); - } - - [Fact] - public void ItThrowsWhenCodeBlockStartsWithNamedArg() - { - // Arrange - var template = "{{ not='valid' }}"; - - // Assert - var ex = Assert.Throws(() => - { - // Act - this._target.Tokenize(template); - }); - Assert.Equal("Code tokenizer returned an incorrect first token type NamedArg", ex.Message); - } -} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/Extensions/HandlebarsKernelExtensions.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/Extensions/HandlebarsKernelExtensions.cs new file mode 100644 index 000000000000..52e7ef08408b --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/Extensions/HandlebarsKernelExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extensions methods for Handlebars functionality. +/// +public static class HandlebarsKernelExtensions +{ + private static readonly HandlebarsPromptTemplateFactory s_promptTemplateFactory = new(); + + /// + /// Invokes a prompt specified via a prompt template in the Handlebars prompt template format. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function, using Handlebars prompt template language + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The result of the function's execution. + public static Task InvokeHandlebarsPromptAsync( + this Kernel kernel, + string promptTemplate, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) => + kernel.InvokeAsync((KernelFunction)KernelFunctionFactory.CreateFromPrompt( + promptTemplate, + templateFormat: HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + promptTemplateFactory: s_promptTemplateFactory), arguments, cancellationToken); +} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs new file mode 100644 index 000000000000..ddd88b6df40b --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplate.cs @@ -0,0 +1,124 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using HandlebarsDotNet; +using HandlebarsDotNet.Helpers; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars.Helpers; + +namespace Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +/// +/// Represents a Handlebars prompt template. +/// +internal sealed class HandlebarsPromptTemplate : IPromptTemplate +{ + /// + /// Default options for built-in Handlebars helpers. + /// + /// TODO [@teresaqhoang]: Support override of default options + private readonly HandlebarsPromptTemplateOptions _options; + + /// + /// Constructor for Handlebars PromptTemplate. + /// + /// Prompt template configuration + /// Handlebars prompt template options + public HandlebarsPromptTemplate(PromptTemplateConfig promptConfig, HandlebarsPromptTemplateOptions? options = null) + { + this._loggerFactory ??= NullLoggerFactory.Instance; + this._logger = this._loggerFactory.CreateLogger(typeof(HandlebarsPromptTemplate)); + this._promptModel = promptConfig; + this._options = options ?? new(); + } + + /// +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + public async Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) +#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously + { + Verify.NotNull(kernel); + + arguments = this.GetVariables(arguments); + var handlebarsInstance = HandlebarsDotNet.Handlebars.Create(); + + // Register kernel, system, and any custom helpers + this.RegisterHelpers(handlebarsInstance, kernel, arguments, cancellationToken); + + var template = handlebarsInstance.Compile(this._promptModel.Template); + return System.Net.WebUtility.HtmlDecode(template(arguments).Trim()); + } + + #region private + + private readonly ILoggerFactory _loggerFactory; + private readonly ILogger _logger; + private readonly PromptTemplateConfig _promptModel; + + /// + /// Registers kernel, system, and any custom helpers. + /// + private void RegisterHelpers( + IHandlebars handlebarsInstance, + Kernel kernel, + KernelArguments arguments, + CancellationToken cancellationToken = default) + { + // Add SK's built-in system helpers + KernelSystemHelpers.Register(handlebarsInstance, kernel, arguments, this._options); + + // Add built-in helpers from the HandlebarsDotNet library + HandlebarsHelpers.Register(handlebarsInstance, optionsCallback: options => + { + options.PrefixSeparator = this._options.PrefixSeparator; + options.Categories = this._options.Categories; + options.UseCategoryPrefix = this._options.UseCategoryPrefix; + options.CustomHelperPaths = this._options.CustomHelperPaths; + }); + + // Add helpers for kernel functions + KernelFunctionHelpers.Register(handlebarsInstance, kernel, arguments, this._options.PrefixSeparator, cancellationToken); + + // Add any custom helpers + this._options.RegisterCustomHelpers?.Invoke( + (string name, HandlebarsReturnHelper customHelper) + => KernelHelpersUtils.RegisterHelperSafe(handlebarsInstance, name, customHelper), + this._options, + arguments); + } + + /// + /// Gets the variables for the prompt template, including setting any default values from the prompt config. + /// + private KernelArguments GetVariables(KernelArguments? arguments) + { + KernelArguments result = new(); + + foreach (var p in this._promptModel.InputVariables) + { + if (p.Default == null || (p.Default is string stringDefault && stringDefault.Length == 0)) + { + continue; + } + + result[p.Name] = p.Default; + } + + if (arguments is not null) + { + foreach (var kvp in arguments) + { + if (kvp.Value is not null) + { + result[kvp.Key] = kvp.Value; + } + } + } + + return result; + } + + #endregion +} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateFactory.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateFactory.cs new file mode 100644 index 000000000000..bb1e854e8baf --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateFactory.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +/// +/// Provides an for the handlebars prompt template format. +/// +public sealed class HandlebarsPromptTemplateFactory : IPromptTemplateFactory +{ + /// Gets the name of the Handlebars template format. + public static string HandlebarsTemplateFormat => "handlebars"; + + /// + /// Default options for built-in Handlebars helpers. + /// + /// TODO [@teresaqhoang]: Support override of default options + private readonly HandlebarsPromptTemplateOptions _options; + + /// + /// The character used to delimit plugin, function, or variable names in a Handlebars template. + /// + public string NameDelimiter => this._options.PrefixSeparator; + + /// + /// Initializes a new instance of the class. + /// + /// Handlebars promnpt template options + public HandlebarsPromptTemplateFactory(HandlebarsPromptTemplateOptions? options = null) + { + this._options = options ?? new(); + } + + /// + public bool TryCreate(PromptTemplateConfig templateConfig, [NotNullWhen(true)] out IPromptTemplate? result) + { + Verify.NotNull(templateConfig); + + if (templateConfig.TemplateFormat.Equals(HandlebarsTemplateFormat, System.StringComparison.Ordinal)) + { + result = new HandlebarsPromptTemplate(templateConfig, this._options); + return true; + } + + result = null; + return false; + } +} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs new file mode 100644 index 000000000000..2fbd155cd47e --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/HandlebarsPromptTemplateOptions.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using HandlebarsDotNet; +using HandlebarsDotNet.Helpers.Enums; +using HandlebarsDotNet.Helpers.Options; + +namespace Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +/// +/// Configuration for Handlebars helpers. +/// +public sealed class HandlebarsPromptTemplateOptions : HandlebarsHelpersOptions +{ + // TODO [@teresaqhoang]: Issue #3947 Add Categories filter for KernelSystemHelpers (i.e., KernelHelperCategories) + + /// + /// Delegate for registering custom Handlebars helpers with conflict resolution. + /// + /// The name of the helper. + /// The helper to register. + public delegate void RegisterHelperCallback(string name, HandlebarsReturnHelper helper); + + /// + /// Callback for registering custom helpers. + /// + /// + /// This callback allows users to register their custom helpers while ensuring + /// that they don't conflict with existing system or custom helpers. Users should + /// use the provided `registerHelper` callback when registering their custom helpers. + /// + /// + /// + /// HandlebarsPromptTemplateOptions.RegisterCustomHelpers = (RegisterHelperCallback registerHelper, HandlebarsPromptTemplateOptions options, KernelArguments variables) => + /// { + /// registerHelper("customHelper", (Context context, Arguments arguments) => + /// { + /// // Custom helper logic + /// }); + /// }; + /// + /// + /// + /// The callback takes three parameters: + /// 1. A callback representing the `RegisterHelperSafe` method to register new helpers with built-in conflict handling. + /// 2. A representing the configuration for helpers. + /// 3. A instance containing variables maintained by the Handlebars context. + /// + public Action? RegisterCustomHelpers { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// Categories only filters built-in dotnet helpers, the ones defined here: https://github.com/Handlebars-Net/Handlebars.Net.Helpers/wiki. + public HandlebarsPromptTemplateOptions() + { + this.PrefixSeparator = "-"; + this.Categories = new Category[] { + Category.Math, // Enables basic math operations (https://github.com/Handlebars-Net/Handlebars.Net.Helpers/wiki/Math) + Category.String // Enables string manipulation (https://github.com/Handlebars-Net/Handlebars.Net.Helpers/wiki/String) + }; + } +} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelperUtils.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelperUtils.cs new file mode 100644 index 000000000000..3902ffdf73b3 --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelperUtils.cs @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using HandlebarsDotNet; + +namespace Microsoft.SemanticKernel.PromptTemplates.Handlebars.Helpers; + +/// +/// Extension class to register additional helpers as Kernel System helpers. +/// +internal static class KernelHelpersUtils +{ + /// + /// Registers a helper with the Handlebars instance, throwing an exception if a helper with the same name is already registered. + /// + /// The -instance. + /// The name of the helper. + /// The helper to register. + internal static void RegisterHelperSafe(IHandlebars handlebarsInstance, string helperName, HandlebarsReturnHelper helper) + { + if (handlebarsInstance.Configuration.Helpers.ContainsKey(helperName)) + { + throw new InvalidOperationException($"A helper with the name '{helperName}' is already registered."); + } + + handlebarsInstance.RegisterHelper(helperName, helper); + } + + /// + /// Returns value if defined, else, tries to resolve value from given KernelArguments dictionary. + /// + /// Argument to process. + /// Dictionary of variables maintained by the Handlebars context. + internal static object? GetArgumentValue(object argument, KernelArguments kernelArguments) + { + // If the argument is of type UndefinedBindingResult, it means that Handlebars attempted to retrieve the value for a binding + // but was unable to do so because the variable was not defined or not passed to the template context at the time of render. + // Thus, we try to get the value from the kernel arguments dictionary. + if (argument is UndefinedBindingResult result) + { + return kernelArguments.TryGetValue(result.Value, out var variable) ? variable : null; + } + + return argument; + } + + /// + /// Processes arguments to resolve unbinded values. If argument was not bound to the Handlebars template at render time, get the value from the KernelArguments dictionary. + /// + /// Arguments to process. + /// Dictionary of variables maintained by the Handlebars context. + /// Arguments with processed values. + internal static Arguments ProcessArguments(Arguments arguments, KernelArguments kernelArguments) + { + var processedArguments = arguments.Select(arg => + { + return GetArgumentValue(arg, kernelArguments); + }); + + return new Arguments(processedArguments.ToArray()); + } + + /// + /// Determines whether the specified type is a numeric type. + /// + /// The type to check. + /// True if the type is a numeric type; otherwise, false. + public static bool IsNumericType(Type? type) + { + return type == typeof(nuint) + || type == typeof(nint) + || (type is not null && + Type.GetTypeCode(type) is + TypeCode.SByte or + TypeCode.Int16 or + TypeCode.Int32 or + TypeCode.Int64 or + TypeCode.Byte or + TypeCode.UInt16 or + TypeCode.UInt32 or + TypeCode.UInt64 or + TypeCode.Double or + TypeCode.Single or + TypeCode.Decimal); + } + + /// + /// Tries to parse the input as any of the numeric types. + /// + /// The input string to parse. + /// True if the input can be parsed as any of the numeric types; otherwise, false. + public static bool TryParseAnyNumber(string? input) + { + // Check if input can be parsed as any of these numeric types. + // We only need to check the largest types, as if they fail, the smaller types will also fail. + return long.TryParse(input, out _) || + ulong.TryParse(input, out _) || + double.TryParse(input, out _) || + decimal.TryParse(input, out _); + } + + /// + /// Tries to convert a object to a specific type. + /// + public static object? DeserializeJsonNode(JsonNode? jsonContent) + { + return jsonContent?.GetValueKind() switch + { + JsonValueKind.Array => jsonContent.AsArray(), + JsonValueKind.Object => jsonContent.AsObject(), + JsonValueKind.String => jsonContent.GetValue(), + _ => jsonContent + }; + } +} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs new file mode 100644 index 000000000000..4a372dde2dad --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using HandlebarsDotNet; +using HandlebarsDotNet.Compiler; + +namespace Microsoft.SemanticKernel.PromptTemplates.Handlebars.Helpers; + +/// +/// Utility class for registering kernel functions as helpers in Handlebars. +/// +internal static class KernelFunctionHelpers +{ + /// + /// Register all (default) or specific categories. + /// + /// The -context. + /// Kernel instance. + /// Kernel arguments maintained as the executing context. + /// The character used to delimit the plugin name and function name in a Handlebars template. + /// The to monitor for cancellation requests. The default is . + public static void Register( + IHandlebars handlebarsInstance, + Kernel kernel, + KernelArguments executionContext, + string nameDelimiter, + CancellationToken cancellationToken) + { + foreach (var function in kernel.Plugins.GetFunctionsMetadata()) + { + RegisterFunctionAsHelper(kernel, executionContext, handlebarsInstance, function, nameDelimiter, cancellationToken); + } + } + + #region private + + private static void RegisterFunctionAsHelper( + Kernel kernel, + KernelArguments executionContext, + IHandlebars handlebarsInstance, + KernelFunctionMetadata functionMetadata, + string nameDelimiter, + CancellationToken cancellationToken) + { + string fullyResolvedFunctionName = functionMetadata.PluginName + nameDelimiter + functionMetadata.Name; + + KernelHelpersUtils.RegisterHelperSafe( + handlebarsInstance, + fullyResolvedFunctionName, + (Context context, Arguments handlebarsArguments) => + { + // Get the parameters from the template arguments + if (handlebarsArguments.Length is not 0) + { + if (handlebarsArguments[0].GetType() == typeof(HashParameterDictionary)) + { + ProcessHashArguments(functionMetadata, executionContext, (IDictionary)handlebarsArguments[0], nameDelimiter); + } + else + { + ProcessPositionalArguments(functionMetadata, executionContext, handlebarsArguments); + } + } + else if (functionMetadata.Parameters.Any(p => p.IsRequired)) + { + throw new ArgumentException($"No arguments are provided for {fullyResolvedFunctionName}."); + } + + KernelFunction function = kernel.Plugins.GetFunction(functionMetadata.PluginName, functionMetadata.Name); + + // Invoke the function and write the result to the template + return InvokeKernelFunction(kernel, function, executionContext, cancellationToken); + }); + } + + /// + /// Checks if handlebars argument is a valid type for the function parameter. + /// Must satisfy one of the following: + /// Types are an exact match. + /// Argument is any kind of numeric type if function parameter requires a numeric type. + /// Argument type is an object (this covers complex types). + /// Function parameter is a generic type. + /// + /// Function parameter metadata. + /// Handlebar argument. + private static bool IsExpectedParameterType(KernelParameterMetadata parameterMetadata, object argument) + { + var actualParameterType = parameterMetadata.ParameterType is Type parameterType && Nullable.GetUnderlyingType(parameterType) is Type underlyingType + ? underlyingType + : parameterMetadata.ParameterType; + + bool parameterIsNumeric = KernelHelpersUtils.IsNumericType(actualParameterType) + || (parameterMetadata.Schema?.RootElement.TryGetProperty("type", out JsonElement typeProperty) == true && typeProperty.GetString() == "number"); + + bool argIsNumeric = KernelHelpersUtils.IsNumericType(argument.GetType()) + || KernelHelpersUtils.TryParseAnyNumber(argument.ToString()); + + return actualParameterType is null + || actualParameterType == argument.GetType() + || (argIsNumeric && parameterIsNumeric) + || actualParameterType == typeof(string); // The kernel should handle this conversion + } + + /// + /// Processes the hash arguments passed to a Handlebars helper function. + /// + /// Metadata for the function being invoked. + /// Arguments maintained in the executing context. + /// Arguments passed to the Handlebars helper. + /// The character used to delimit the plugin name and function name in a Handlebars template. + /// Thrown when a required parameter is missing. + private static void ProcessHashArguments( + KernelFunctionMetadata functionMetadata, + KernelArguments executionContext, + IDictionary? handlebarsArguments, + string nameDelimiter) + { + // Prepare the input parameters for the function + foreach (var param in functionMetadata.Parameters) + { + var fullyQualifiedParamName = functionMetadata.Name + nameDelimiter + param.Name; + if (handlebarsArguments is not null && (handlebarsArguments.TryGetValue(fullyQualifiedParamName, out var value) || handlebarsArguments.TryGetValue(param.Name, out value))) + { + value = KernelHelpersUtils.GetArgumentValue(value, executionContext); + if (value is not null && IsExpectedParameterType(param, value)) + { + executionContext[param.Name] = value; + } + else + { + throw new KernelException($"Invalid argument type for function {functionMetadata.Name}. Parameter {param.Name} expects type {param.ParameterType ?? (object?)param.Schema} but received {value?.GetType()}."); + } + } + else if (param.IsRequired) + { + throw new KernelException($"Parameter {param.Name} is required for function {functionMetadata.Name}."); + } + } + } + + /// + /// Processes the positional arguments passed to a Handlebars helper function. + /// + /// KernelFunctionMetadata for the function being invoked. + /// Arguments maintained in the executing context. + /// Arguments passed to the Handlebars helper. + /// Thrown when a required parameter is missing. + private static void ProcessPositionalArguments(KernelFunctionMetadata functionMetadata, KernelArguments executionContext, Arguments handlebarsArguments) + { + var requiredParameters = functionMetadata.Parameters.Where(p => p.IsRequired).ToList(); + + if (requiredParameters.Count <= handlebarsArguments.Length && handlebarsArguments.Length <= functionMetadata.Parameters.Count) + { + var argIndex = 0; + var arguments = KernelHelpersUtils.ProcessArguments(handlebarsArguments, executionContext); + foreach (var arg in arguments) + { + var param = functionMetadata.Parameters[argIndex++]; + if (IsExpectedParameterType(param, arg)) + { + executionContext[param.Name] = arg; + } + else + { + throw new KernelException($"Invalid parameter type for function {functionMetadata.Name}. Parameter {param.Name} expects type {param.ParameterType ?? (object?)param.Schema} but received {arg.GetType()}."); + } + } + } + else + { + throw new KernelException($"Invalid parameter count for function {functionMetadata.Name}. {handlebarsArguments.Length} were specified but {functionMetadata.Parameters.Count} are required."); + } + } + + /// + /// Invokes an SK function and returns a typed result, if specified. + /// + private static object? InvokeKernelFunction( + Kernel kernel, + KernelFunction function, + KernelArguments executionContext, + CancellationToken cancellationToken) + { +#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits + FunctionResult result = function.InvokeAsync(kernel, executionContext, cancellationToken: cancellationToken).GetAwaiter().GetResult(); +#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits + + return ParseResult(result); + } + + /// + /// Parse the into an object, extracting wrapped content as necessary. + /// + /// Function result. + /// Deserialized object + private static object? ParseResult(FunctionResult result) + { + var resultAsObject = result.GetValue(); + + // Extract content from wrapper types and deserialize as needed. + if (resultAsObject is ChatMessageContent chatMessageContent) + { + return chatMessageContent.Content; + } + + if (resultAsObject is RestApiOperationResponse restApiOperationResponse) + { + // Deserialize any JSON content or return the content as a string + if (string.Equals(restApiOperationResponse.ContentType, "application/json", StringComparison.OrdinalIgnoreCase)) + { + var parsedJson = JsonValue.Parse(restApiOperationResponse.Content.ToString()); + return KernelHelpersUtils.DeserializeJsonNode(parsedJson); + } + + return restApiOperationResponse.Content; + } + + if (result.ValueType is not null && result.ValueType != typeof(string)) + { + // Serialize then deserialize the result to ensure it is parsed as the correct type with appropriate property casing + var serializedResult = JsonSerializer.Serialize(resultAsObject); + return JsonSerializer.Deserialize(serializedResult, result.ValueType); + } + + return resultAsObject; + } + + #endregion +} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelSystemHelpers.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelSystemHelpers.cs new file mode 100644 index 000000000000..dd7036d19b9f --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelSystemHelpers.cs @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using HandlebarsDotNet; +using HandlebarsDotNet.Compiler; +using static Microsoft.SemanticKernel.PromptTemplates.Handlebars.Helpers.KernelHelpersUtils; + +namespace Microsoft.SemanticKernel.PromptTemplates.Handlebars.Helpers; + +/// +/// Extension class to register additional helpers as Kernel System helpers. +/// +internal static class KernelSystemHelpers +{ + /// + /// Register all (default) or specific categories of system helpers. + /// + /// The -instance. + /// Kernel instance. + /// Dictionary of variables maintained by the Handlebars context. + /// Handlebars prompt template options. + public static void Register( + IHandlebars handlebarsInstance, + Kernel kernel, + KernelArguments variables, + HandlebarsPromptTemplateOptions options) + { + RegisterSystemHelpers(handlebarsInstance, kernel, variables); + } + + /// + /// Register all system helpers. + /// + /// The -instance. + /// Kernel instance. + /// Dictionary of variables maintained by the Handlebars context. + /// Exception thrown when a message does not contain a defining role. + private static void RegisterSystemHelpers( + IHandlebars handlebarsInstance, + Kernel kernel, + KernelArguments variables) + { + // TODO [@teresaqhoang]: Issue #3947 Isolate Handlebars Kernel System helpers in their own class + // Should also consider standardizing the naming conventions for these helpers, i.e., 'Message' instead of 'message' + handlebarsInstance.RegisterHelper("message", static (writer, options, context, arguments) => + { + var parameters = (IDictionary)arguments[0]; + + // Verify that the message has a role + if (!parameters!.TryGetValue("role", out object? value)) + { + throw new KernelException("Message must have a role."); + } + + writer.Write($"<{value}~>", false); + options.Template(writer, context); + writer.Write($"", false); + }); + + handlebarsInstance.RegisterHelper("set", (writer, context, arguments) => + { + var name = string.Empty; + object? value = string.Empty; + if (arguments[0].GetType() == typeof(HashParameterDictionary)) + { + // Get the parameters from the template arguments + var parameters = (IDictionary)arguments[0]; + name = (string)parameters!["name"]; + value = GetArgumentValue(parameters!["value"], variables); + } + else + { + var args = ProcessArguments(arguments, variables); + name = args[0].ToString(); + value = args[1]; + } + + // Set the variable in the Handlebars context + variables[name] = value; + }); + + handlebarsInstance.RegisterHelper("json", (in HelperOptions options, in Context context, in Arguments arguments) => + { + if (arguments.Length == 0) + { + throw new HandlebarsRuntimeException("`json` helper requires a value to be passed in."); + } + + var args = ProcessArguments(arguments, variables); + object objectToSerialize = args[0]; + + return objectToSerialize switch + { + string stringObject => objectToSerialize, + _ => JsonSerializer.Serialize(objectToSerialize) + }; + }); + + handlebarsInstance.RegisterHelper("concat", (in HelperOptions options, in Context context, in Arguments arguments) => + { + var args = ProcessArguments(arguments, variables); + return string.Concat(args); + }); + + handlebarsInstance.RegisterHelper("array", (in HelperOptions options, in Context context, in Arguments arguments) => + { + var args = ProcessArguments(arguments, variables); + return args.ToArray(); + }); + + handlebarsInstance.RegisterHelper("raw", static (writer, options, context, arguments) => + { + options.Template(writer, null); + }); + + handlebarsInstance.RegisterHelper("range", (in HelperOptions options, in Context context, in Arguments arguments) => + { + var args = ProcessArguments(arguments, variables); + + // Create list with numbers from start to end (inclusive) + var start = int.Parse(args[0].ToString(), kernel.Culture); + var end = int.Parse(args[1].ToString(), kernel.Culture) + 1; + var count = end - start; + + return Enumerable.Range(start, count); + }); + + handlebarsInstance.RegisterHelper("or", (in HelperOptions options, in Context context, in Arguments arguments) => + { + var args = ProcessArguments(arguments, variables); + + return args.Any(arg => + { + return arg switch + { + bool booleanArg => booleanArg, + _ => arg is not null + }; + }); + }); + + handlebarsInstance.RegisterHelper("add", (in HelperOptions options, in Context context, in Arguments arguments) => + { + var args = ProcessArguments(arguments, variables); + return args.Sum(arg => decimal.Parse(arg.ToString(), kernel.Culture)); + }); + + handlebarsInstance.RegisterHelper("subtract", (in HelperOptions options, in Context context, in Arguments arguments) => + { + var args = ProcessArguments(arguments, variables); + return args.Aggregate((a, b) => decimal.Parse(a.ToString(), kernel.Culture) - decimal.Parse(b.ToString(), kernel.Culture)); + }); + + handlebarsInstance.RegisterHelper("equals", (in HelperOptions options, in Context context, in Arguments arguments) => + { + if (arguments.Length < 2) + { + return false; + } + + var args = ProcessArguments(arguments, variables); + object? left = args[0]; + object? right = args[1]; + + return left == right || (left is not null && left.Equals(right)); + }); + } +} diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj b/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj new file mode 100644 index 000000000000..4f9dabe5f089 --- /dev/null +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj @@ -0,0 +1,32 @@ + + + + + Microsoft.SemanticKernel.PromptTemplates.Handlebars + Microsoft.SemanticKernel.PromptTemplates.Handlebars + netstandard2.0 + true + + + + + + + + Semantic Kernel - Handlebars Prompt Template Engine + Semantic Kernel Handlebars Prompt Template Engine + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Extensions/Reliability.Basic/BasicHttpRetryHandler.cs b/dotnet/src/Extensions/Reliability.Basic/BasicHttpRetryHandler.cs deleted file mode 100644 index 69d1073c02fa..000000000000 --- a/dotnet/src/Extensions/Reliability.Basic/BasicHttpRetryHandler.cs +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; - -namespace Microsoft.SemanticKernel.Reliability.Basic; - -/// -/// Handler that retries HTTP requests based on a . -/// -public sealed class BasicHttpRetryHandler : DelegatingHandler -{ - /// - /// Initializes a new instance of the class. - /// - /// The retry configuration. - /// The to use for logging. If null, no logging will be performed. - internal BasicHttpRetryHandler(BasicRetryConfig? config = null, ILoggerFactory? loggerFactory = null) - : this(config ?? new(), loggerFactory, null, null) - { - } - - internal BasicHttpRetryHandler( - BasicRetryConfig config, - ILoggerFactory? loggerFactory = null, - IDelayProvider? delayProvider = null, - ITimeProvider? timeProvider = null) - { - this._config = config; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger() : NullLogger.Instance; - this._delayProvider = delayProvider ?? new TaskDelayProvider(); - this._timeProvider = timeProvider ?? new DefaultTimeProvider(); - } - - /// - /// Executes the action with retry logic - /// - /// - /// The request is retried if it throws an exception that is a retryable exception. - /// If the request throws an exception that is not a retryable exception, it is not retried. - /// If the request returns a response with a retryable error code, it is retried. - /// If the request returns a response with a non-retryable error code, it is not retried. - /// If the exception contains a RetryAfter header, the request is retried after the specified delay. - /// If configured to use exponential backoff, the delay is doubled for each retry. - /// - /// The request. - /// The cancellation token. - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - int retryCount = 0; - - var start = this._timeProvider.GetCurrentTime(); - while (true) - { - cancellationToken.ThrowIfCancellationRequested(); - - TimeSpan waitFor; - string reason; - HttpResponseMessage? response = null; - try - { - response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); - - // If the request does not require a retry then we're done - if (!this.ShouldRetry(response.StatusCode)) - { - return response; - } - - reason = response.StatusCode.ToString(); - - // If the retry count is greater than the max retry count then we'll - // just return - if (retryCount >= this._config.MaxRetryCount) - { - this._logger.LogError( - "Error executing request, max retry count reached. Reason: {0}", reason); - return response; - } - - // If the retry delay is longer than the total timeout, then we'll - // just return - if (!this.HasTimeForRetry(start, retryCount, response, out waitFor)) - { - var timeTaken = this._timeProvider.GetCurrentTime() - start; - this._logger.LogError( - "Error executing request, max total retry time reached. Reason: {0}. Time spent: {1}ms", reason, - timeTaken.TotalMilliseconds); - return response; - } - } - catch (Exception e) when (this.ShouldRetry(e) || this.ShouldRetry(e.InnerException)) - { - reason = e.GetType().ToString(); - if (retryCount >= this._config.MaxRetryCount) - { - this._logger.LogError(e, - "Error executing request, max retry count reached. Reason: {0}", reason); - throw; - } - else if (!this.HasTimeForRetry(start, retryCount, response, out waitFor)) - { - var timeTaken = this._timeProvider.GetCurrentTime() - start; - this._logger.LogError( - "Error executing request, max total retry time reached. Reason: {0}. Time spent: {1}ms", reason, - timeTaken.TotalMilliseconds); - throw; - } - } - - // If the request requires a retry then we'll retry - this._logger.LogWarning( - "Error executing action [attempt {0} of {1}]. Reason: {2}. Will retry after {3}ms", - retryCount + 1, - this._config.MaxRetryCount, - reason, - waitFor.TotalMilliseconds); - - // Increase retryCount - retryCount++; - - response?.Dispose(); - - // Delay - await this._delayProvider.DelayAsync(waitFor, cancellationToken).ConfigureAwait(false); - } - } - - /// - /// Interface for a delay provider, primarily to enable unit testing. - /// - internal interface IDelayProvider - { - Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken); - } - - internal sealed class TaskDelayProvider : IDelayProvider - { - public Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) - { - return Task.Delay(delay, cancellationToken); - } - } - - /// - /// Interface for a time provider, primarily to enable unit testing. - /// - internal interface ITimeProvider - { - DateTimeOffset GetCurrentTime(); - } - - internal sealed class DefaultTimeProvider : ITimeProvider - { - public DateTimeOffset GetCurrentTime() - { - return DateTimeOffset.UtcNow; - } - } - - private readonly BasicRetryConfig _config; - private readonly ILogger _logger; - private readonly IDelayProvider _delayProvider; - private readonly ITimeProvider _timeProvider; - - /// - /// Get the wait time for the next retry. - /// - /// Current retry count - /// The response message that potentially contains RetryAfter header. - private TimeSpan GetWaitTime(int retryCount, HttpResponseMessage? response) - { - // If the response contains a RetryAfter header, use that value - // Otherwise, use the configured min retry delay - var retryAfter = response?.Headers.RetryAfter?.Date.HasValue == true - ? response?.Headers.RetryAfter?.Date - this._timeProvider.GetCurrentTime() - : (response?.Headers.RetryAfter?.Delta) ?? this._config.MinRetryDelay; - retryAfter ??= this._config.MinRetryDelay; - - // If the retry delay is longer than the max retry delay, use the max retry delay - var timeToWait = retryAfter > this._config.MaxRetryDelay - ? this._config.MaxRetryDelay - : retryAfter < this._config.MinRetryDelay - ? this._config.MinRetryDelay - : retryAfter ?? default; - - // If exponential backoff is enabled, and the server didn't provide a RetryAfter header, double the delay for each retry - if (this._config.UseExponentialBackoff - && response?.Headers.RetryAfter?.Date is null - && response?.Headers.RetryAfter?.Delta is null) - { - for (var backoffRetryCount = 1; backoffRetryCount < retryCount + 1; backoffRetryCount++) - { - timeToWait = timeToWait.Add(timeToWait); - } - } - - return timeToWait; - } - - /// - /// Determines if there is time left for a retry. - /// - /// The start time of the original request. - /// The current retry count. - /// The response message that potentially contains RetryAfter header. - /// The wait time for the next retry. - /// True if there is time left for a retry, false otherwise. - private bool HasTimeForRetry(DateTimeOffset start, int retryCount, HttpResponseMessage? response, out TimeSpan waitFor) - { - waitFor = this.GetWaitTime(retryCount, response); - var currentTIme = this._timeProvider.GetCurrentTime(); - var result = currentTIme - start + waitFor; - - return result < this._config.MaxTotalRetryTime; - } - - private bool ShouldRetry(HttpStatusCode statusCode) - { - return this._config.RetryableStatusCodes.Contains(statusCode); - } - - private bool ShouldRetry(Exception? exception) - { - return exception != null && this._config.RetryableExceptionTypes.Contains(exception.GetType()); - } -} diff --git a/dotnet/src/Extensions/Reliability.Basic/BasicHttpRetryHandlerFactory.cs b/dotnet/src/Extensions/Reliability.Basic/BasicHttpRetryHandlerFactory.cs deleted file mode 100644 index dd87f9913265..000000000000 --- a/dotnet/src/Extensions/Reliability.Basic/BasicHttpRetryHandlerFactory.cs +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Reliability.Basic; - -/// -/// Internal factory for creating instances. -/// -public sealed class BasicHttpRetryHandlerFactory : HttpHandlerFactory -{ - /// - /// Gets the singleton instance of . - /// - public static BasicHttpRetryHandlerFactory Instance { get; } = new BasicHttpRetryHandlerFactory(); - - /// - /// Creates a new instance of with the provided configuration. - /// - /// Http retry configuration - internal BasicHttpRetryHandlerFactory(BasicRetryConfig? config = null) - { - this.Config = config ?? new(); - } - - /// - /// Creates a new instance of with the default configuration. - /// - /// Logger factory - /// Returns the created handler - public override DelegatingHandler Create(ILoggerFactory? loggerFactory = null) - { - return new BasicHttpRetryHandler(this.Config, loggerFactory); - } - - /// - /// Creates a new instance of with a specified configuration. - /// - /// Specific configuration - /// Logger factory - /// Returns the created handler - public DelegatingHandler Create(BasicRetryConfig config, ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(config, nameof(config)); - - return new BasicHttpRetryHandler(config, loggerFactory); - } - - /// - /// Default retry configuration used when creating a new instance of . - /// - internal BasicRetryConfig Config { get; } -} diff --git a/dotnet/src/Extensions/Reliability.Basic/BasicRetryConfig.cs b/dotnet/src/Extensions/Reliability.Basic/BasicRetryConfig.cs deleted file mode 100644 index a20d5a0f9c82..000000000000 --- a/dotnet/src/Extensions/Reliability.Basic/BasicRetryConfig.cs +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net; -using System.Net.Http; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Reliability.Basic; - -/// -/// Retry configuration for DefaultKernelRetryHandler that uses RetryAfter header when present. -/// -public sealed record BasicRetryConfig -{ - /// - /// Maximum number of retries. - /// - /// Thrown when value is negative. - public int MaxRetryCount - { - get => this._maxRetryCount; - set - { - if (value < 0) - { - throw new ArgumentOutOfRangeException(nameof(this.MaxRetryCount), "Max retry count cannot be negative."); - } - - this._maxRetryCount = value; - } - } - - /// - /// Minimum delay between retries. - /// - public TimeSpan MinRetryDelay { get; set; } = TimeSpan.FromSeconds(2); - - /// - /// Maximum delay between retries. - /// - public TimeSpan MaxRetryDelay { get; set; } = TimeSpan.FromSeconds(60); - - /// - /// Maximum total time spent retrying. - /// - public TimeSpan MaxTotalRetryTime { get; set; } = TimeSpan.FromMinutes(2); - - /// - /// Whether to use exponential backoff or not. - /// - public bool UseExponentialBackoff { get; set; } - - /// - /// List of status codes that should be retried. - /// - public List RetryableStatusCodes { get; set; } = new() - { - (HttpStatusCode)HttpStatusCodeType.RequestTimeout, - (HttpStatusCode)HttpStatusCodeType.ServiceUnavailable, - (HttpStatusCode)HttpStatusCodeType.GatewayTimeout, - (HttpStatusCode)HttpStatusCodeType.TooManyRequests, - (HttpStatusCode)HttpStatusCodeType.BadGateway, - }; - - /// - /// List of exception types that should be retried. - /// - public List RetryableExceptionTypes { get; set; } = new() - { - typeof(HttpRequestException) - }; - - private int _maxRetryCount = 1; -} diff --git a/dotnet/src/Extensions/Reliability.Basic/Reliability.Basic.csproj b/dotnet/src/Extensions/Reliability.Basic/Reliability.Basic.csproj deleted file mode 100644 index ccc1d232283c..000000000000 --- a/dotnet/src/Extensions/Reliability.Basic/Reliability.Basic.csproj +++ /dev/null @@ -1,32 +0,0 @@ - - - - - Microsoft.SemanticKernel.Reliability.Basic - Microsoft.SemanticKernel.Reliability.Basic - netstandard2.0 - - - - - - - - - - Semantic Kernel - Basic Reliability Extension - Semantic Kernel Basic Reliability Extension - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/src/Extensions/Reliability.Basic/ReliabilityBasicKernelBuilderExtensions.cs b/dotnet/src/Extensions/Reliability.Basic/ReliabilityBasicKernelBuilderExtensions.cs deleted file mode 100644 index e1c7dfabe436..000000000000 --- a/dotnet/src/Extensions/Reliability.Basic/ReliabilityBasicKernelBuilderExtensions.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Reliability.Basic; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of KernelConfig -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the . -/// -public static class ReliabilityBasicKernelBuilderExtensions -{ - /// - /// Sets the default retry configuration for any kernel http request. - /// - /// Target instance - /// Retry configuration - /// Self instance - public static KernelBuilder WithRetryBasic(this KernelBuilder builder, BasicRetryConfig? retryConfig = null) - { - return builder.WithHttpHandlerFactory(new BasicHttpRetryHandlerFactory(retryConfig)); - } -} diff --git a/dotnet/src/Extensions/Reliability.Polly/PollyHttpRetryHandler.cs b/dotnet/src/Extensions/Reliability.Polly/PollyHttpRetryHandler.cs deleted file mode 100644 index d36aa22c9533..000000000000 --- a/dotnet/src/Extensions/Reliability.Polly/PollyHttpRetryHandler.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Polly; - -namespace Microsoft.SemanticKernel.Reliability.Polly; - -/// -/// Customizable PollyHttpHandler that will follow the provided policy. -/// -public class PollyHttpRetryHandler : DelegatingHandler -{ - private readonly AsyncPolicy? _typedAsyncPolicy; - private readonly AsyncPolicy? _asyncPolicy; - - /// - /// Creates a new instance of . - /// - /// HttpResponseMessage typed AsyncPolicy dedicated for typed policies. - public PollyHttpRetryHandler(AsyncPolicy typedAsyncPolicy) - { - Verify.NotNull(typedAsyncPolicy); - - this._typedAsyncPolicy = typedAsyncPolicy; - } - - /// - /// Creates a new instance of dedicated for non-typed policies. - /// - /// A non-typed AsyncPolicy - public PollyHttpRetryHandler(AsyncPolicy asyncPolicy) - { - Verify.NotNull(asyncPolicy); - - this._asyncPolicy = asyncPolicy; - } - - /// - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (this._typedAsyncPolicy is not null) - { - return await this._typedAsyncPolicy.ExecuteAsync(async (cancelToken) => - { - var response = await base.SendAsync(request, cancelToken).ConfigureAwait(false); - return response; - }, cancellationToken).ConfigureAwait(false); - } - - return await this._asyncPolicy!.ExecuteAsync(async (cancelToken) => - { - var response = await base.SendAsync(request, cancelToken).ConfigureAwait(false); - return response; - }, cancellationToken).ConfigureAwait(false); - } -} diff --git a/dotnet/src/Extensions/Reliability.Polly/PollyHttpRetryHandlerFactory.cs b/dotnet/src/Extensions/Reliability.Polly/PollyHttpRetryHandlerFactory.cs deleted file mode 100644 index 4e77774c164f..000000000000 --- a/dotnet/src/Extensions/Reliability.Polly/PollyHttpRetryHandlerFactory.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; -using Polly; - -namespace Microsoft.SemanticKernel.Reliability.Polly; - -/// -/// Customizable PollyHttpHandlerFactory that will create handlers with the provided policy. -/// -public class PollyHttpRetryHandlerFactory : HttpHandlerFactory -{ - private readonly AsyncPolicy? _typedAsyncPolicy; - private readonly AsyncPolicy? _asyncPolicy; - - /// - /// Creates a new instance of . - /// - /// HttpResponseMessage typed AsyncPolicy dedicated for typed policies. - public PollyHttpRetryHandlerFactory(AsyncPolicy typedAsyncPolicy) - { - Verify.NotNull(typedAsyncPolicy); - - this._typedAsyncPolicy = typedAsyncPolicy; - } - - /// - /// Creates a new instance of dedicated for non-typed policies. - /// - /// A non-typed AsyncPolicy - public PollyHttpRetryHandlerFactory(AsyncPolicy asyncPolicy) - { - Verify.NotNull(asyncPolicy); - - this._asyncPolicy = asyncPolicy; - } - - /// - /// Creates a new instance of with the default configuration. - /// - /// Logger factory - /// Returns the created handler - public override DelegatingHandler Create(ILoggerFactory? loggerFactory = null) - { - if (this._typedAsyncPolicy is not null) - { - return new PollyHttpRetryHandler(this._typedAsyncPolicy); - } - - return new PollyHttpRetryHandler(this._asyncPolicy!); - } -} diff --git a/dotnet/src/Extensions/Reliability.Polly/Reliability.Polly.csproj b/dotnet/src/Extensions/Reliability.Polly/Reliability.Polly.csproj deleted file mode 100644 index aac4803037bc..000000000000 --- a/dotnet/src/Extensions/Reliability.Polly/Reliability.Polly.csproj +++ /dev/null @@ -1,36 +0,0 @@ - - - - - Microsoft.SemanticKernel.Reliability.Polly - Microsoft.SemanticKernel.Reliability.Polly - netstandard2.0 - - - - - - - - - - Semantic Kernel - Polly Reliability Extension - Semantic Kernel Polly Reliability Extension - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/src/Extensions/Reliability.Polly/ReliabilityPollyKernelBuilderExtensions.cs b/dotnet/src/Extensions/Reliability.Polly/ReliabilityPollyKernelBuilderExtensions.cs deleted file mode 100644 index 0236e2e81d8e..000000000000 --- a/dotnet/src/Extensions/Reliability.Polly/ReliabilityPollyKernelBuilderExtensions.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Microsoft.SemanticKernel.Reliability.Polly; -using Polly; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of KernelConfig -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the . -/// -public static class ReliabilityPollyKernelBuilderExtensions -{ - /// - /// Sets the default retry configuration for any kernel http request. - /// - /// Target instance - /// Provided AsyncPolicy - /// Returns target instance for fluent compatibility - public static KernelBuilder WithRetryPolly(this KernelBuilder kernelConfig, AsyncPolicy retryPolicy) - { - var pollyHandler = new PollyHttpRetryHandlerFactory(retryPolicy); - return kernelConfig.WithHttpHandlerFactory(pollyHandler); - } - - /// - /// Sets the default retry configuration for any kernel http request. - /// - /// Target instance - /// Provided HttpResponseMessage AsyncPolicy - /// Returns target instance for fluent compatibility - public static KernelBuilder WithRetryPolly(this KernelBuilder kernelConfig, AsyncPolicy retryPolicy) - { - var pollyHandler = new PollyHttpRetryHandlerFactory(retryPolicy); - return kernelConfig.WithHttpHandlerFactory(pollyHandler); - } -} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/BasicPromptTemplateEngine.cs b/dotnet/src/Extensions/TemplateEngine.Basic/BasicPromptTemplateEngine.cs deleted file mode 100644 index 2b6721f89bd7..000000000000 --- a/dotnet/src/Extensions/TemplateEngine.Basic/BasicPromptTemplateEngine.cs +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; - -namespace Microsoft.SemanticKernel.TemplateEngine.Basic; - -/// -/// Given a prompt, that might contain references to variables and functions: -/// - Get the list of references -/// - Resolve each reference -/// - Variable references are resolved using the context variables -/// - Function references are resolved invoking those functions -/// - Functions can be invoked passing in variables -/// - Functions do not receive the context variables, unless specified using a special variable -/// - Functions can be invoked in order and in parallel so the context variables must be immutable when invoked within the template -/// -public class BasicPromptTemplateEngine : IPromptTemplateEngine -{ - private readonly ILoggerFactory _loggerFactory; - private readonly ILogger _logger; - private readonly TemplateTokenizer _tokenizer; - - /// - /// Initializes a new instance of the class. - /// - /// The to use for logging. If null, no logging will be performed. - public BasicPromptTemplateEngine(ILoggerFactory? loggerFactory = null) - { - this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; - this._logger = this._loggerFactory.CreateLogger(typeof(BasicPromptTemplateEngine)); - this._tokenizer = new TemplateTokenizer(loggerFactory); - } - - /// - public async Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default) - { - this._logger.LogTrace("Rendering string template: {0}", templateText); - var blocks = this.ExtractBlocks(templateText); - return await this.RenderAsync(blocks, context, cancellationToken).ConfigureAwait(false); - } - - /// - /// Given a prompt template string, extract all the blocks (text, variables, function calls) - /// - /// Prompt template (see skprompt.txt files) - /// Whether to validate the blocks syntax, or just return the blocks found, which could contain invalid code - /// A list of all the blocks, ie the template tokenized in text, variables and function calls - internal IList ExtractBlocks(string? templateText, bool validate = true) - { - this._logger.LogTrace("Extracting blocks from template: {0}", templateText); - var blocks = this._tokenizer.Tokenize(templateText); - - if (validate) - { - foreach (var block in blocks) - { - if (!block.IsValid(out var error)) - { - throw new SKException(error); - } - } - } - - return blocks; - } - - /// - /// Given a list of blocks render each block and compose the final result. - /// - /// Template blocks generated by ExtractBlocks. - /// Access into the current kernel execution context. - /// The to monitor for cancellation requests. The default is . - /// The prompt template ready to be used for an AI request. - internal async Task RenderAsync(IList blocks, SKContext context, CancellationToken cancellationToken = default) - { - this._logger.LogTrace("Rendering list of {0} blocks", blocks.Count); - var tasks = new List>(blocks.Count); - foreach (var block in blocks) - { - switch (block) - { - case ITextRendering staticBlock: - tasks.Add(Task.FromResult(staticBlock.Render(context.Variables))); - break; - - case ICodeRendering dynamicBlock: - tasks.Add(dynamicBlock.RenderCodeAsync(context, cancellationToken)); - break; - - default: - const string Error = "Unexpected block type, the block doesn't have a rendering method"; - this._logger.LogError(Error); - throw new SKException(Error); - } - } - - var result = new StringBuilder(); - foreach (Task t in tasks) - { - result.Append(await t.ConfigureAwait(false)); - } - - // Sensitive data, logging as trace, disabled by default - this._logger.LogTrace("Rendered prompt: {0}", result); - - return result.ToString(); - } - - /// - /// Given a list of blocks, render the Variable Blocks, replacing placeholders with the actual value in memory. - /// - /// List of blocks, typically all the blocks found in a template. - /// Container of all the temporary variables known to the kernel. - /// An updated list of blocks where Variable Blocks have rendered to Text Blocks. - internal IList RenderVariables(IList blocks, ContextVariables? variables) - { - this._logger.LogTrace("Rendering variables"); - return blocks.Select(block => block.Type != BlockTypes.Variable - ? block - : new TextBlock(((ITextRendering)block).Render(variables), this._loggerFactory)).ToList(); - } -} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/CodeBlock.cs b/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/CodeBlock.cs deleted file mode 100644 index 280d34b1d25c..000000000000 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/CodeBlock.cs +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; - -#pragma warning disable CA2254 // error strings are used also internally, not just for logging -#pragma warning disable CA1031 // IsCriticalException is an internal utility and should not be used by extensions - -// ReSharper disable TemplateIsNotCompileTimeConstantProblem -internal sealed class CodeBlock : Block, ICodeRendering -{ - internal override BlockTypes Type => BlockTypes.Code; - - /// - /// Initializes a new instance of the class. - /// - /// Block content - /// The to use for logging. If null, no logging will be performed. - public CodeBlock(string? content, ILoggerFactory? loggerFactory) - : this(new CodeTokenizer(loggerFactory).Tokenize(content), content?.Trim(), loggerFactory) - { - } - - /// - /// Initializes a new instance of the class. - /// - /// A list of blocks - /// Block content - /// The to use for logging. If null, no logging will be performed. - public CodeBlock(List tokens, string? content, ILoggerFactory? loggerFactory) - : base(content?.Trim(), loggerFactory) - { - this._tokens = tokens; - } - - /// - public override bool IsValid(out string errorMsg) - { - errorMsg = ""; - - foreach (Block token in this._tokens) - { - if (!token.IsValid(out errorMsg)) - { - this.Logger.LogError(errorMsg); - return false; - } - } - - if (this._tokens.Count > 0 && this._tokens[0].Type == BlockTypes.NamedArg) - { - errorMsg = "Unexpected named argument found. Expected function name first."; - this.Logger.LogError(errorMsg); - return false; - } - - if (this._tokens.Count > 1 && !this.IsValidFunctionCall(out errorMsg)) - { - return false; - } - - this._validated = true; - - return true; - } - - /// - public async Task RenderCodeAsync(SKContext context, CancellationToken cancellationToken = default) - { - if (!this._validated && !this.IsValid(out var error)) - { - throw new SKException(error); - } - - this.Logger.LogTrace("Rendering code: `{Content}`", this.Content); - - switch (this._tokens[0].Type) - { - case BlockTypes.Value: - case BlockTypes.Variable: - return ((ITextRendering)this._tokens[0]).Render(context.Variables); - - case BlockTypes.FunctionId: - return await this.RenderFunctionCallAsync((FunctionIdBlock)this._tokens[0], context).ConfigureAwait(false); - } - - throw new SKException($"Unexpected first token type: {this._tokens[0].Type:G}"); - } - - #region private ================================================================================ - - private bool _validated; - private readonly List _tokens; - - private async Task RenderFunctionCallAsync(FunctionIdBlock fBlock, SKContext context) - { - // Clone the context to avoid unexpected variable mutations from the inner function execution - ContextVariables inputVariables = context.Variables.Clone(); - - // If the code syntax is {{functionName $varName}} use $varName instead of $input - // If the code syntax is {{functionName 'value'}} use "value" instead of $input - if (this._tokens.Count > 1) - { - inputVariables = this.PopulateContextWithFunctionArguments(inputVariables); - } - try - { - await context.Runner.RunAsync(fBlock.PluginName, fBlock.FunctionName, inputVariables).ConfigureAwait(false); - } - catch (Exception ex) - { - this.Logger.LogError(ex, "Function {Plugin}.{Function} execution failed with error {Error}", fBlock.PluginName, fBlock.FunctionName, ex.Message); - throw; - } - - return inputVariables.ToString(); - } - - private bool IsValidFunctionCall(out string errorMsg) - { - errorMsg = ""; - if (this._tokens[0].Type != BlockTypes.FunctionId) - { - errorMsg = $"Unexpected second token found: {this._tokens[1].Content}"; - this.Logger.LogError(errorMsg); - return false; - } - - if (this._tokens[1].Type is not BlockTypes.Value and not BlockTypes.Variable and not BlockTypes.NamedArg) - { - errorMsg = "The first arg of a function must be a quoted string, variable or named argument"; - this.Logger.LogError(errorMsg); - return false; - } - - for (int i = 2; i < this._tokens.Count; i++) - { - if (this._tokens[i].Type is not BlockTypes.NamedArg) - { - errorMsg = $"Functions only support named arguments after the first argument. Argument {i} is not named."; - this.Logger.LogError(errorMsg); - return false; - } - } - - return true; - } - - private ContextVariables PopulateContextWithFunctionArguments(ContextVariables variables) - { - // Clone the context to avoid unexpected and hard to test input mutation - var variablesClone = variables.Clone(); - var firstArg = this._tokens[1]; - - // Sensitive data, logging as trace, disabled by default - this.Logger.LogTrace("Passing variable/value: `{Content}`", firstArg.Content); - - var namedArgsStartIndex = 1; - if (firstArg.Type is not BlockTypes.NamedArg) - { - string input = ((ITextRendering)this._tokens[1]).Render(variablesClone); - // Keep previous trust information when updating the input - variablesClone.Update(input); - namedArgsStartIndex++; - } - - for (int i = namedArgsStartIndex; i < this._tokens.Count; i++) - { - var arg = this._tokens[i] as NamedArgBlock; - - // When casting fails because the block isn't a NamedArg, arg is null - if (arg == null) - { - var errorMsg = "Functions support up to one positional argument"; - this.Logger.LogError(errorMsg); - throw new SKException($"Unexpected first token type: {this._tokens[i].Type:G}"); - } - - // Sensitive data, logging as trace, disabled by default - this.Logger.LogTrace("Passing variable/value: `{Content}`", arg.Content); - - variablesClone.Set(arg.Name, arg.GetValue(variables)); - } - - return variablesClone; - } - #endregion -} -// ReSharper restore TemplateIsNotCompileTimeConstantProblem -#pragma warning restore CA2254 diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ICodeRendering.cs b/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ICodeRendering.cs deleted file mode 100644 index cbdf9ef9c577..000000000000 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ICodeRendering.cs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; - -/// -/// Interface of dynamic blocks that need async IO to be rendered. -/// -public interface ICodeRendering -{ - /// - /// Render the block using the given context, potentially using external I/O. - /// - /// SK execution context - /// The to monitor for cancellation requests. The default is . - /// Rendered content - public Task RenderCodeAsync(SKContext context, CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ITextRendering.cs b/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ITextRendering.cs deleted file mode 100644 index 1fa75694c03f..000000000000 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ITextRendering.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; - -/// -/// Interface of static blocks that don't need async IO to be rendered. -/// -public interface ITextRendering -{ - /// - /// Render the block using only the given variables. - /// - /// Optional variables used to render the block - /// Rendered content - public string Render(ContextVariables? variables); -} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/NamedArgBlock.cs b/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/NamedArgBlock.cs deleted file mode 100644 index 0847c8df48bb..000000000000 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/NamedArgBlock.cs +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.RegularExpressions; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; - -/// -/// A that represents a named argument for a function call. -/// For example, in the template {{ MyPlugin.MyFunction var1="foo" }}, var1="foo" is a named arg block. -/// -internal sealed class NamedArgBlock : Block, ITextRendering -{ - /// - /// Returns the . - /// - internal override BlockTypes Type => BlockTypes.NamedArg; - - /// - /// Gets the name of the function argument. - /// - internal string Name { get; } = string.Empty; - - /// - /// Initializes a new instance of the class. - /// - /// Raw text parsed from the prompt template. - /// The to use for logging. If null, no logging will be performed. - /// - public NamedArgBlock(string? text, ILoggerFactory? logger = null) - : base(NamedArgBlock.TrimWhitespace(text), logger) - { - var argParts = this.Content.Split(Symbols.NamedArgBlockSeparator); - if (argParts.Length != 2) - { - this.Logger.LogError("Invalid named argument `{Text}`", text); - throw new SKException($"A function named argument must contain a name and value separated by a '{Symbols.NamedArgBlockSeparator}' character."); - } - - this.Name = argParts[0]; - this._argNameAsVarBlock = new VarBlock($"{Symbols.VarPrefix}{argParts[0]}"); - var argValue = argParts[1]; - if (argValue.Length == 0) - { - this.Logger.LogError("Invalid named argument `{Text}`", text); - throw new SKException($"A function named argument must contain a quoted value or variable after the '{Symbols.NamedArgBlockSeparator}' character."); - } - - if (argValue[0] == Symbols.VarPrefix) - { - this._argValueAsVarBlock = new VarBlock(argValue); - } - else - { - this._valBlock = new ValBlock(argValue); - } - } - - /// - /// Gets the rendered value of the function argument. If the value is a , the value stays the same. - /// If the value is a , the value of the variable is determined by the context variables passed in. - /// - /// Variables to use for rendering the named argument value when the value is a . - /// - internal string GetValue(ContextVariables? variables) - { - var valueIsValidValBlock = this._valBlock != null && this._valBlock.IsValid(out var errorMessage); - if (valueIsValidValBlock) - { - return this._valBlock!.Render(variables); - } - - var valueIsValidVarBlock = this._argValueAsVarBlock != null && this._argValueAsVarBlock.IsValid(out var errorMessage2); - if (valueIsValidVarBlock) - { - return this._argValueAsVarBlock!.Render(variables); - } - - return string.Empty; - } - - /// - /// Renders the named arg block. - /// - /// - /// - public string Render(ContextVariables? variables) - { - return this.Content; - } - - /// - /// Returns whether the named arg block has valid syntax. - /// - /// An error message that gets set when the named arg block is not valid. - /// -#pragma warning disable CA2254 // error strings are used also internally, not just for logging - public override bool IsValid(out string errorMsg) - { - errorMsg = string.Empty; - if (string.IsNullOrEmpty(this.Name)) - { - errorMsg = "A named argument must have a name"; - this.Logger.LogError(errorMsg); - return false; - } - - if (this._valBlock != null && !this._valBlock.IsValid(out var valErrorMsg)) - { - errorMsg = $"There was an issue with the named argument value for '{this.Name}': {valErrorMsg}"; - this.Logger.LogError(errorMsg); - return false; - } - else if (this._argValueAsVarBlock != null && !this._argValueAsVarBlock.IsValid(out var variableErrorMsg)) - { - errorMsg = $"There was an issue with the named argument value for '{this.Name}': {variableErrorMsg}"; - this.Logger.LogError(errorMsg); - return false; - } - else if (this._valBlock == null && this._argValueAsVarBlock == null) - { - errorMsg = "A named argument must have a value"; - this.Logger.LogError(errorMsg); - return false; - } - - // Argument names share the same validation as variables - if (!this._argNameAsVarBlock.IsValid(out var argNameErrorMsg)) - { - errorMsg = Regex.Replace(argNameErrorMsg, "a variable", "An argument", RegexOptions.IgnoreCase); - errorMsg = Regex.Replace(errorMsg, "the variable", "The argument", RegexOptions.IgnoreCase); - return false; - } - - return true; - } -#pragma warning restore CA2254 - - #region private ================================================================================ - - private readonly VarBlock _argNameAsVarBlock; - private readonly ValBlock? _valBlock; - private readonly VarBlock? _argValueAsVarBlock; - - private static string? TrimWhitespace(string? text) - { - if (text == null) - { - return text; - } - - string[] trimmedParts = NamedArgBlock.GetTrimmedParts(text); - switch (trimmedParts?.Length) - { - case (2): - return $"{trimmedParts[0]}{Symbols.NamedArgBlockSeparator}{trimmedParts[1]}"; - case (1): - return trimmedParts[0]; - default: - return null; - } - } - - private static string[] GetTrimmedParts(string? text) - { - if (text == null) - { - return System.Array.Empty(); - } - - string[] parts = text.Split(new char[] { Symbols.NamedArgBlockSeparator }, 2); - string[] result = new string[parts.Length]; - if (parts.Length > 0) - { - result[0] = parts[0].Trim(); - } - - if (parts.Length > 1) - { - result[1] = parts[1].Trim(); - } - - return result; - } - - #endregion -} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/TemplateEngine.Basic.csproj b/dotnet/src/Extensions/TemplateEngine.Basic/TemplateEngine.Basic.csproj deleted file mode 100644 index defc4bf9a7e7..000000000000 --- a/dotnet/src/Extensions/TemplateEngine.Basic/TemplateEngine.Basic.csproj +++ /dev/null @@ -1,25 +0,0 @@ - - - - - Microsoft.SemanticKernel.TemplateEngine.Basic - Microsoft.SemanticKernel.TemplateEngine.Basic - netstandard2.0 - - - - - - - Semantic Kernel - Basic Prompt Template Engine - Semantic Kernel Basic Prompt Template Engine - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.Grpc/AssemblyInfo.cs b/dotnet/src/Functions/Functions.Grpc/AssemblyInfo.cs new file mode 100644 index 000000000000..a7534ccf9f38 --- /dev/null +++ b/dotnet/src/Functions/Functions.Grpc/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0040")] diff --git a/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs new file mode 100644 index 000000000000..00a88fcc1fb9 --- /dev/null +++ b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcKernelExtensions.cs @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Plugins.Grpc.Model; +using Microsoft.SemanticKernel.Plugins.Grpc.Protobuf; + +namespace Microsoft.SemanticKernel.Plugins.Grpc; + +/// +/// extensions methods for gRPC functionality. +/// +public static class GrpcKernelExtensions +{ + // TODO: Revise XML comments and validate shape of methods is as desired + + /// + /// Imports gRPC document from a directory. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Directory containing the plugin directory. + /// Name of the directory containing the selected plugin. + /// A list of all the prompt functions representing the plugin. + public static KernelPlugin ImportPluginFromGrpcDirectory( + this Kernel kernel, + string parentDirectory, + string pluginDirectoryName) + { + KernelPlugin plugin = CreatePluginFromGrpcDirectory(kernel, parentDirectory, pluginDirectoryName); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Imports gRPC document from a file. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// File path to .proto document. + /// Name of the plugin to register. + /// A list of all the prompt functions representing the plugin. + public static KernelPlugin ImportPluginFromGrpcFile( + this Kernel kernel, + string filePath, + string pluginName) + { + KernelPlugin plugin = CreatePluginFromGrpcFile(kernel, filePath, pluginName); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Registers an gRPC plugin. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// .proto document stream. + /// Plugin name. + /// A list of all the prompt functions representing the plugin. + public static KernelPlugin ImportPluginFromGrpc( + this Kernel kernel, + Stream documentStream, + string pluginName) + { + KernelPlugin plugin = CreatePluginFromGrpc(kernel, documentStream, pluginName); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Imports gRPC document from a directory. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Directory containing the plugin directory. + /// Name of the directory containing the selected plugin. + /// A list of all the prompt functions representing the plugin. + public static KernelPlugin CreatePluginFromGrpcDirectory( + this Kernel kernel, + string parentDirectory, + string pluginDirectoryName) + { + const string ProtoFile = "grpc.proto"; + + Verify.ValidPluginName(pluginDirectoryName, kernel.Plugins); + + var pluginDir = Path.Combine(parentDirectory, pluginDirectoryName); + Verify.DirectoryExists(pluginDir); + + var filePath = Path.Combine(pluginDir, ProtoFile); + if (!File.Exists(filePath)) + { + throw new FileNotFoundException($"No .proto document for the specified path - {filePath} is found."); + } + + if (kernel.LoggerFactory.CreateLogger(typeof(GrpcKernelExtensions)) is ILogger logger && + logger.IsEnabled(LogLevel.Trace)) + { + logger.LogTrace("Registering gRPC functions from {0} .proto document", filePath); + } + + using var stream = File.OpenRead(filePath); + + return kernel.CreatePluginFromGrpc(stream, pluginDirectoryName); + } + + /// + /// Imports gRPC document from a file. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// File path to .proto document. + /// Name of the plugin to register. + /// A list of all the prompt functions representing the plugin. + public static KernelPlugin CreatePluginFromGrpcFile( + this Kernel kernel, + string filePath, + string pluginName) + { + if (!File.Exists(filePath)) + { + throw new FileNotFoundException($"No .proto document for the specified path - {filePath} is found."); + } + + if (kernel.LoggerFactory.CreateLogger(typeof(GrpcKernelExtensions)) is ILogger logger && + logger.IsEnabled(LogLevel.Trace)) + { + logger.LogTrace("Registering gRPC functions from {0} .proto document", filePath); + } + + using var stream = File.OpenRead(filePath); + + return kernel.CreatePluginFromGrpc(stream, pluginName); + } + + /// + /// Registers an gRPC plugin. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// .proto document stream. + /// Plugin name. + /// A list of all the prompt functions representing the plugin. + public static KernelPlugin CreatePluginFromGrpc( + this Kernel kernel, + Stream documentStream, + string pluginName) + { + Verify.NotNull(kernel); + Verify.ValidPluginName(pluginName, kernel.Plugins); + + // Parse + var parser = new ProtoDocumentParser(); + + var operations = parser.Parse(documentStream, pluginName); + + var functions = new List(); + + ILoggerFactory loggerFactory = kernel.LoggerFactory; + + var client = HttpClientProvider.GetHttpClient(kernel.Services.GetService()); + + var runner = new GrpcOperationRunner(client); + + ILogger logger = loggerFactory.CreateLogger(typeof(GrpcKernelExtensions)) ?? NullLogger.Instance; + foreach (var operation in operations) + { + try + { + logger.LogTrace("Registering gRPC function {0}.{1}", pluginName, operation.Name); + functions.Add(CreateGrpcFunction(runner, operation, loggerFactory)); + } + catch (Exception ex) when (!ex.IsCriticalException()) + { + //Logging the exception and keep registering other gRPC functions + logger.LogWarning(ex, "Something went wrong while rendering the gRPC function. Function: {0}.{1}. Error: {2}", + pluginName, operation.Name, ex.Message); + } + } + + return KernelPluginFactory.CreateFromFunctions(pluginName, null, functions); + } + + #region private + + /// + /// Registers KernelFunctionFactory for a gRPC operation. + /// + /// gRPC operation runner. + /// The gRPC operation. + /// The logger factory. + /// An instance of class. + private static KernelFunction CreateGrpcFunction( + GrpcOperationRunner runner, + GrpcOperation operation, + ILoggerFactory loggerFactory) + { + var operationParameters = operation.GetParameters(); + + async Task ExecuteAsync(KernelArguments arguments, CancellationToken cancellationToken) + { + try + { + return await runner.RunAsync(operation, arguments, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!ex.IsCriticalException() && loggerFactory.CreateLogger(typeof(GrpcKernelExtensions)) is ILogger logger && logger.IsEnabled(LogLevel.Warning)) + { + logger.LogWarning(ex, "Something went wrong while rendering the gRPC function. Function: {0}. Error: {1}", operation.Name, ex.Message); + throw; + } + } + + return KernelFunctionFactory.CreateFromMethod( + method: ExecuteAsync, + parameters: operationParameters.ToList(), + description: operation.Name, + functionName: operation.Name, + loggerFactory: loggerFactory); + } + + #endregion +} diff --git a/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcOperationExtensions.cs b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcOperationExtensions.cs index dc5f033792d0..ea6029a71da2 100644 --- a/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcOperationExtensions.cs +++ b/dotnet/src/Functions/Functions.Grpc/Extensions/GrpcOperationExtensions.cs @@ -1,10 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; -using Microsoft.SemanticKernel.Functions.Grpc.Model; +using Microsoft.SemanticKernel.Plugins.Grpc.Model; -// ReSharper disable once CheckNamespace -namespace Microsoft.SemanticKernel.Functions.Grpc.Extensions; +namespace Microsoft.SemanticKernel.Plugins.Grpc; #pragma warning disable RCS1175 // Unused 'this' parameter 'operation'. @@ -18,19 +17,21 @@ internal static class GrpcOperationExtensions /// TODO: not an extension method, `operation` is never used. /// /// The list of parameters. - public static IReadOnlyList GetParameters(this GrpcOperation operation) + public static IReadOnlyList GetParameters(this GrpcOperation operation) { - var parameters = new ParameterView[] + var parameters = new KernelParameterMetadata[] { // Register the "address" parameter so that it's possible to override it if needed. - new ParameterView(GrpcOperation.AddressArgumentName, - "Address for gRPC channel to use.", - string.Empty), + new(GrpcOperation.AddressArgumentName) + { + Description = "Address for gRPC channel to use.", + }, // Register the "payload" parameter to be used as gRPC operation request message. - new ParameterView(GrpcOperation.PayloadArgumentName, - "gRPC request message.", - string.Empty) + new(GrpcOperation.PayloadArgumentName) + { + Description = "gRPC request message.", + }, }; return parameters; diff --git a/dotnet/src/Functions/Functions.Grpc/Extensions/KernelGrpcExtensions.cs b/dotnet/src/Functions/Functions.Grpc/Extensions/KernelGrpcExtensions.cs deleted file mode 100644 index 399098b69bf3..000000000000 --- a/dotnet/src/Functions/Functions.Grpc/Extensions/KernelGrpcExtensions.cs +++ /dev/null @@ -1,242 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.Grpc.Model; -using Microsoft.SemanticKernel.Functions.Grpc.Protobuf; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Functions.Grpc.Extensions; - -/// -/// extensions methods for gRPC functionality. -/// -public static class KernelGrpcExtensions -{ - /// - /// Imports gRPC document from a directory. - /// - /// Semantic Kernel instance. - /// Directory containing the plugin directory. - /// Name of the directory containing the selected plugin. - /// HttpClient to use for sending requests. - /// A list of all the semantic functions representing the plugin. - public static IDictionary ImportGrpcFunctionsFromDirectory( - this IKernel kernel, - string parentDirectory, - string pluginDirectoryName, - HttpClient? httpClient = null) - { - const string ProtoFile = "grpc.proto"; - - Verify.ValidPluginName(pluginDirectoryName); - - var pluginDir = Path.Combine(parentDirectory, pluginDirectoryName); - Verify.DirectoryExists(pluginDir); - - var filePath = Path.Combine(pluginDir, ProtoFile); - if (!File.Exists(filePath)) - { - throw new FileNotFoundException($"No .proto document for the specified path - {filePath} is found."); - } - - kernel.LoggerFactory.CreateLogger(typeof(KernelGrpcExtensions)).LogTrace("Registering gRPC functions from {0} .proto document", filePath); - - using var stream = File.OpenRead(filePath); - - return kernel.RegisterGrpcFunctions(stream, pluginDirectoryName, httpClient); - } - - /// - /// Imports gRPC document from a file. - /// - /// Semantic Kernel instance. - /// Name of the plugin to register. - /// File path to .proto document. - /// HttpClient to use for sending requests. - /// A list of all the semantic functions representing the plugin. - public static IDictionary ImportGrpcFunctionsFromFile( - this IKernel kernel, - string pluginName, - string filePath, - HttpClient? httpClient = null) - { - if (!File.Exists(filePath)) - { - throw new FileNotFoundException($"No .proto document for the specified path - {filePath} is found."); - } - - kernel.LoggerFactory.CreateLogger(typeof(KernelGrpcExtensions)).LogTrace("Registering gRPC functions from {0} .proto document", filePath); - - using var stream = File.OpenRead(filePath); - - return kernel.RegisterGrpcFunctions(stream, pluginName, httpClient); - } - - /// - /// Registers an gRPC plugin. - /// - /// Semantic Kernel instance. - /// .proto document stream. - /// Plugin name. - /// HttpClient to use for sending requests. - /// A list of all the semantic functions representing the plugin. - public static IDictionary RegisterGrpcFunctions( - this IKernel kernel, - Stream documentStream, - string pluginName, - HttpClient? httpClient = null) - { - Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName); - - // Parse - var parser = new ProtoDocumentParser(); - - var operations = parser.Parse(documentStream, pluginName); - - var plugin = new Dictionary(); - - var client = HttpClientProvider.GetHttpClient(kernel.HttpHandlerFactory, httpClient, kernel.LoggerFactory); - - var runner = new GrpcOperationRunner(client); - - ILogger logger = kernel.LoggerFactory.CreateLogger(typeof(KernelGrpcExtensions)); - foreach (var operation in operations) - { - try - { - logger.LogTrace("Registering gRPC function {0}.{1}", pluginName, operation.Name); - var function = kernel.RegisterGrpcFunction(runner, pluginName, operation); - plugin[function.Name] = function; - } - catch (Exception ex) when (!ex.IsCriticalException()) - { - //Logging the exception and keep registering other gRPC functions - logger.LogWarning(ex, "Something went wrong while rendering the gRPC function. Function: {0}.{1}. Error: {2}", - pluginName, operation.Name, ex.Message); - } - } - - return plugin; - } - - #region private - - /// - /// Registers SKFunction for a gRPC operation. - /// - /// Semantic Kernel instance. - /// gRPC operation runner. - /// Plugin name. - /// The gRPC operation. - /// An instance of class. - private static ISKFunction RegisterGrpcFunction( - this IKernel kernel, - GrpcOperationRunner runner, - string pluginName, - GrpcOperation operation) - { - var operationParameters = operation.GetParameters(); - - async Task ExecuteAsync(SKContext context) - { - try - { - var arguments = new Dictionary(); - - //Extract function arguments from context - foreach (var parameter in operationParameters) - { - //A try to resolve argument parameter name. - if (context.Variables.TryGetValue(parameter.Name, out string? value)) - { - arguments.Add(parameter.Name, value); - continue; - } - - throw new KeyNotFoundException($"No variable found in context to use as an argument for the '{parameter.Name}' parameter of the '{pluginName}.{operation.Name}' gRPC function."); - } - - //SKFunction should be extended to pass cancellation token for delegateFunction calls. - var result = await runner.RunAsync(operation, arguments, CancellationToken.None).ConfigureAwait(false); - - if (result != null) - { - context.Variables.Update(result.ToString()); - } - } - catch (Exception ex) when (!ex.IsCriticalException()) - { - kernel.LoggerFactory.CreateLogger(typeof(KernelGrpcExtensions)).LogWarning(ex, "Something went wrong while rendering the gRPC function. Function: {0}.{1}. Error: {2}", pluginName, operation.Name, - ex.Message); - throw; - } - - return context; - } - - var function = SKFunction.FromNativeFunction( - nativeFunction: ExecuteAsync, - parameters: operationParameters.ToList(), - description: operation.Name, - pluginName: pluginName, - functionName: operation.Name, - loggerFactory: kernel.LoggerFactory); - - return kernel.RegisterCustomFunction(function); - } - - #endregion - - #region obsolete - - [Obsolete("Methods and classes which includes Skill in the name have been renamed to use Plugin. Use Kernel.ImportGrpcFunctionsFromDirectory instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public static IDictionary ImportGrpcSkillFromDirectory( - this IKernel kernel, - string parentDirectory, - string skillDirectoryName, - HttpClient? httpClient = null) - { - return kernel.ImportGrpcFunctionsFromDirectory(parentDirectory, skillDirectoryName, httpClient); - } -#pragma warning restore CS1591 - - [Obsolete("Methods and classes which includes Skill in the name have been renamed to use Plugin. Use Kernel.ImportGrpcFunctionsFromFile instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public static IDictionary ImportGrpcSkillFromFile( - this IKernel kernel, - string skillName, - string filePath, - HttpClient? httpClient = null) - { - return kernel.ImportGrpcFunctionsFromFile(skillName, filePath, httpClient); - } -#pragma warning restore CS1591 - - [Obsolete("Methods and classes which includes Skill in the name have been renamed to use Plugin. Use Kernel.RegisterGrpcFunctions instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public static IDictionary RegisterGrpcSkill( - this IKernel kernel, - Stream documentStream, - string skillName, - HttpClient? httpClient = null) - { - return kernel.RegisterGrpcFunctions(documentStream, skillName, httpClient); - } -#pragma warning restore CS1591 - - #endregion -} diff --git a/dotnet/src/Functions/Functions.Grpc/Functions.Grpc.csproj b/dotnet/src/Functions/Functions.Grpc/Functions.Grpc.csproj index 4c09330e9237..c47b33b812b6 100644 --- a/dotnet/src/Functions/Functions.Grpc/Functions.Grpc.csproj +++ b/dotnet/src/Functions/Functions.Grpc/Functions.Grpc.csproj @@ -2,9 +2,10 @@ - Microsoft.SemanticKernel.Functions.Grpc + Microsoft.SemanticKernel.Plugins.Grpc $(AssemblyName) netstandard2.0 + alpha @@ -12,8 +13,8 @@ - Semantic Kernel - gRPC Functions - Semantic Kernel gRPC Functions + Semantic Kernel - gRPC Plugins + Semantic Kernel gRPC Plugins @@ -32,7 +33,6 @@ - diff --git a/dotnet/src/Functions/Functions.Grpc/GrpcOperationRunner.cs b/dotnet/src/Functions/Functions.Grpc/GrpcOperationRunner.cs index ca3fb3bb3fb2..35a86334e43e 100644 --- a/dotnet/src/Functions/Functions.Grpc/GrpcOperationRunner.cs +++ b/dotnet/src/Functions/Functions.Grpc/GrpcOperationRunner.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using System.Globalization; using System.IO; +using System.Linq; using System.Net.Http; using System.Reflection; using System.Reflection.Emit; @@ -13,17 +14,20 @@ using System.Threading.Tasks; using Grpc.Core; using Grpc.Net.Client; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.Grpc.Model; +using Microsoft.SemanticKernel.Plugins.Grpc.Model; using ProtoBuf; -namespace Microsoft.SemanticKernel.Functions.Grpc; +namespace Microsoft.SemanticKernel.Plugins.Grpc; /// /// Runs gRPC operation runner. /// internal sealed class GrpcOperationRunner { + /// Serialization options that use a camel casing naming policy. + private static readonly JsonSerializerOptions s_camelCaseOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + /// Deserialization options that use case-insensitive property names. + private static readonly JsonSerializerOptions s_propertyCaseInsensitiveOptions = new() { PropertyNameCaseInsensitive = true }; /// /// An instance of the HttpClient class. /// @@ -45,12 +49,14 @@ public GrpcOperationRunner(HttpClient httpClient) /// The operation arguments. /// The cancellation token. /// The result of the operation run. - public async Task RunAsync(GrpcOperation operation, IDictionary arguments, CancellationToken cancellationToken = default) + public async Task RunAsync(GrpcOperation operation, KernelArguments arguments, CancellationToken cancellationToken = default) { Verify.NotNull(operation); Verify.NotNull(arguments); - var address = this.GetAddress(operation, arguments); + var stringArgument = CastToStringArguments(arguments, operation); + + var address = this.GetAddress(operation, stringArgument); var channelOptions = new GrpcChannelOptions { HttpClient = this._httpClient, DisposeHttpClient = false }; @@ -71,7 +77,7 @@ public async Task RunAsync(GrpcOperation operation, IDictionary RunAsync(GrpcOperation operation, IDictionary + /// Casts argument values of type object to string. + /// + /// The kernel arguments to be cast. + /// The gRPC operation. + /// A dictionary of arguments with string values. + /// Thrown when an argument has an unsupported, non-string type. + private static Dictionary CastToStringArguments(KernelArguments arguments, GrpcOperation operation) + { + return arguments.ToDictionary(item => item.Key, item => + { + if (item.Value is string stringValue) + { + return stringValue; + } + + throw new KernelException($"Non-string gRPC operation arguments are not supported in Release Candidate 1. This feature will be available soon, but for now, please ensure that all arguments are strings. Operation '{operation.Name}' argument '{item.Key}' is of type '{item.Value?.GetType()}'."); + }); + } + /// /// Converts gRPC response. /// @@ -87,7 +113,7 @@ public async Task RunAsync(GrpcOperation operation, IDictionaryThe converted response. private static JsonObject ConvertResponse(object response, Type responseType) { - var content = JsonSerializer.Serialize(response, responseType, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var content = JsonSerializer.Serialize(response, responseType, s_camelCaseOptions); //First iteration allowing to associate additional metadata with the returned content. var result = new JsonObject(); @@ -102,7 +128,7 @@ private static JsonObject ConvertResponse(object response, Type responseType) /// The gRPC operation. /// The gRPC operation arguments. /// The channel address. - private string GetAddress(GrpcOperation operation, IDictionary arguments) + private string GetAddress(GrpcOperation operation, Dictionary arguments) { if (!arguments.TryGetValue(GrpcOperation.AddressArgumentName, out string? address)) { @@ -111,7 +137,7 @@ private string GetAddress(GrpcOperation operation, IDictionary a if (string.IsNullOrEmpty(address)) { - throw new SKException($"No address provided for the '{operation.Name}' gRPC operation."); + throw new KernelException($"No address provided for the '{operation.Name}' gRPC operation."); } return address!; @@ -150,22 +176,18 @@ T Deserialize(byte[] source) /// The operation request data type. /// The operation arguments. /// The operation request instance. - private object GenerateOperationRequest(GrpcOperation operation, Type type, IDictionary arguments) + private object GenerateOperationRequest(GrpcOperation operation, Type type, Dictionary arguments) { //Getting 'payload' argument to by used as gRPC request message - if (!arguments.TryGetValue(GrpcOperation.PayloadArgumentName, out var payload)) + if (!arguments.TryGetValue(GrpcOperation.PayloadArgumentName, out string? payload) || + string.IsNullOrEmpty(payload)) { - throw new SKException($"No '{GrpcOperation.PayloadArgumentName}' argument representing gRPC request message is found for the '{operation.Name}' gRPC operation."); + throw new KernelException($"No '{GrpcOperation.PayloadArgumentName}' argument representing gRPC request message is found for the '{operation.Name}' gRPC operation."); } //Deserializing JSON payload to gRPC request message - var instance = JsonSerializer.Deserialize(payload, type, new JsonSerializerOptions { PropertyNameCaseInsensitive = true }); - if (instance == null) - { - throw new SKException($"Impossible to create gRPC request message for the '{operation.Name}' gRPC operation."); - } - - return instance; + return JsonSerializer.Deserialize(payload!, type, s_propertyCaseInsensitiveOptions) ?? + throw new KernelException($"Unable to create gRPC request message for the '{operation.Name}' gRPC operation."); } /// @@ -215,21 +237,16 @@ private static TypeInfo BuildGrpcOperationDataContractType(GrpcOperationDataCont propertyBuilder.SetSetMethod(setterBuilder); //Add ProtoMember attribute to the data contract with tag/number - var dataMemberAttributeBuilder = new CustomAttributeBuilder(typeof(ProtoMemberAttribute).GetConstructor(new[] { typeof(int) }), new object[] { field.Number }); + var dataMemberAttributeBuilder = new CustomAttributeBuilder(typeof(ProtoMemberAttribute).GetConstructor(new[] { typeof(int) })!, new object[] { field.Number }); propertyBuilder.SetCustomAttribute(dataMemberAttributeBuilder); } //Add ProtoContract attribute to the data contract - var dataContractAttributeBuilder = new CustomAttributeBuilder(typeof(ProtoContractAttribute).GetConstructor(Type.EmptyTypes), Array.Empty()); + var dataContractAttributeBuilder = new CustomAttributeBuilder(typeof(ProtoContractAttribute).GetConstructor(Type.EmptyTypes)!, Array.Empty()); typeBuilder.SetCustomAttribute(dataContractAttributeBuilder); - var type = typeBuilder.CreateTypeInfo(); - if (type == null) - { - throw new SKException($"Impossible to create type for '{dataContractMetadata.Name}' data contract."); - } - - return type; + return typeBuilder.CreateTypeInfo() ?? + throw new KernelException($"Impossible to create type for '{dataContractMetadata.Name}' data contract."); } /// @@ -237,42 +254,24 @@ private static TypeInfo BuildGrpcOperationDataContractType(GrpcOperationDataCont /// /// The protobuf data type name. /// The .net type. - private static Type GetNetType(string type) - { - switch (type) + private static Type GetNetType(string type) => + type switch { - case "TYPE_DOUBLE": - return typeof(double); - case "TYPE_FLOAT": - return typeof(float); - case "TYPE_INT64": - return typeof(long); - case "TYPE_UINT64": - return typeof(ulong); - case "TYPE_INT32": - return typeof(int); - case "TYPE_FIXED64": - return typeof(ulong); - case "TYPE_FIXED32": - return typeof(uint); - case "TYPE_BOOL": - return typeof(bool); - case "TYPE_STRING": - return typeof(string); - case "TYPE_BYTES": - return typeof(byte[]); - case "TYPE_UINT32": - return typeof(uint); - case "TYPE_SFIXED32": - return typeof(int); - case "TYPE_SFIXED64": - return typeof(long); - case "TYPE_SINT32": - return typeof(int); - case "TYPE_SINT64": - return typeof(long); - default: - throw new ArgumentException($"Unknown type {type}", nameof(type)); - } - } + "TYPE_DOUBLE" => typeof(double), + "TYPE_FLOAT" => typeof(float), + "TYPE_INT64" => typeof(long), + "TYPE_UINT64" => typeof(ulong), + "TYPE_INT32" => typeof(int), + "TYPE_FIXED64" => typeof(ulong), + "TYPE_FIXED32" => typeof(uint), + "TYPE_BOOL" => typeof(bool), + "TYPE_STRING" => typeof(string), + "TYPE_BYTES" => typeof(byte[]), + "TYPE_UINT32" => typeof(uint), + "TYPE_SFIXED32" => typeof(int), + "TYPE_SFIXED64" => typeof(long), + "TYPE_SINT32" => typeof(int), + "TYPE_SINT64" => typeof(long), + _ => throw new ArgumentException($"Unknown type {type}", nameof(type)), + }; } diff --git a/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperation.cs b/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperation.cs index 81e1ee8a8400..64afb6ae0f94 100644 --- a/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperation.cs +++ b/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperation.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Functions.Grpc.Model; +namespace Microsoft.SemanticKernel.Plugins.Grpc.Model; /// /// The gRPC operation. diff --git a/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractType.cs b/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractType.cs index b859ed5aecfa..7be7599cec7a 100644 --- a/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractType.cs +++ b/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractType.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; -namespace Microsoft.SemanticKernel.Functions.Grpc.Model; +namespace Microsoft.SemanticKernel.Plugins.Grpc.Model; /// /// The gRPC operation data contract. diff --git a/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractTypeFiled.cs b/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractTypeFiled.cs index a98db15d77a4..d296961ec802 100644 --- a/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractTypeFiled.cs +++ b/dotnet/src/Functions/Functions.Grpc/Model/GrpcOperationDataContractTypeFiled.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Functions.Grpc.Model; +namespace Microsoft.SemanticKernel.Plugins.Grpc.Model; /// /// The gRPC operation data contract field. diff --git a/dotnet/src/Functions/Functions.Grpc/Protobuf/ProtoDocumentParser.cs b/dotnet/src/Functions/Functions.Grpc/Protobuf/ProtoDocumentParser.cs index 077feed9f473..08f9ab35ca87 100644 --- a/dotnet/src/Functions/Functions.Grpc/Protobuf/ProtoDocumentParser.cs +++ b/dotnet/src/Functions/Functions.Grpc/Protobuf/ProtoDocumentParser.cs @@ -5,11 +5,10 @@ using System.IO; using System.Linq; using Google.Protobuf.Reflection; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.Grpc.Model; +using Microsoft.SemanticKernel.Plugins.Grpc.Model; using ProtoBuf; -namespace Microsoft.SemanticKernel.Functions.Grpc.Protobuf; +namespace Microsoft.SemanticKernel.Plugins.Grpc.Protobuf; /// /// Parser for .proto definition documents. @@ -36,7 +35,7 @@ public IList Parse(Stream protoDocument, string protoFileName) var errors = descriptor.GetErrors(); if (errors != null && errors.Length != 0) { - throw new SKException($"Parsing of '{protoFileName}' .proto document has failed. Details: {string.Join(";", errors.AsEnumerable())}"); + throw new KernelException($"Parsing of '{protoFileName}' .proto document has failed. Details: {string.Join(";", errors.AsEnumerable())}"); } return this.GetGrpcOperations(descriptor.Files.Single()); @@ -91,7 +90,7 @@ private GrpcOperationDataContractType CreateDataContract(IList var messageType = allMessageTypes.SingleOrDefault(mt => mt.Name == fullTypeName || mt.Name == typeName); if (messageType == null) { - throw new SKException($"No '{fullTypeName}' message type is found while resolving data contracts for the '{methodName}' method."); + throw new KernelException($"No '{fullTypeName}' message type is found while resolving data contracts for the '{methodName}' method."); } var fields = this.GetDataContractFields(messageType.Fields); @@ -126,15 +125,16 @@ private List GetDataContractFields(List + + + + Microsoft.SemanticKernel.Markdown + $(AssemblyName) + netstandard2.0 + alpha + + + + + + + + + Semantic Kernel - Support for Markdown Function Definitions + Semantic Kernel Markdown Functions + + + + + + + + + + + + + + + diff --git a/dotnet/src/Functions/Functions.Markdown/KernelFunctionMarkdown.cs b/dotnet/src/Functions/Functions.Markdown/KernelFunctionMarkdown.cs new file mode 100644 index 000000000000..9753051aea4e --- /dev/null +++ b/dotnet/src/Functions/Functions.Markdown/KernelFunctionMarkdown.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using Markdig; +using Markdig.Syntax; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel; + +/// +/// Factory methods for creating instances. +/// +public static class KernelFunctionMarkdown +{ + /// + /// Creates a instance for a prompt function using the specified markdown text. + /// + /// Markdown representation of the to use to create the prompt function. + /// The name of the function. + /// + /// The to use when interpreting the prompt template configuration into a . + /// If null, a default factory will be used. + /// + /// The to use for logging. If null, no logging will be performed. + /// The created . + public static KernelFunction FromPromptMarkdown( + string text, + string functionName, + IPromptTemplateFactory? promptTemplateFactory = null, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(text); + Verify.NotNull(functionName); + + return KernelFunctionFactory.CreateFromPrompt( + CreateFromPromptMarkdown(text, functionName), + promptTemplateFactory, + loggerFactory); + } + + #region Private methods + internal static PromptTemplateConfig CreateFromPromptMarkdown(string text, string functionName) + { + PromptTemplateConfig promptFunctionModel = new() { Name = functionName }; + + foreach (Block block in Markdown.Parse(text)) + { + if (block is FencedCodeBlock codeBlock) + { + switch (codeBlock.Info) + { + case "sk.prompt": + promptFunctionModel.Template = codeBlock.Lines.ToString(); + break; + + case "sk.execution_settings": + var modelSettings = codeBlock.Lines.ToString(); + var settingsDictionary = JsonSerializer.Deserialize>(modelSettings); + if (settingsDictionary is not null) + { + foreach (var keyValue in settingsDictionary) + { + promptFunctionModel.ExecutionSettings.Add(keyValue.Key, keyValue.Value); + } + } + break; + } + } + } + + return promptFunctionModel; + } + #endregion +} diff --git a/dotnet/src/Functions/Functions.Markdown/MarkdownKernelExtensions.cs b/dotnet/src/Functions/Functions.Markdown/MarkdownKernelExtensions.cs new file mode 100644 index 000000000000..6b34fb961982 --- /dev/null +++ b/dotnet/src/Functions/Functions.Markdown/MarkdownKernelExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// Class for extensions methods to define functions using prompt markdown format. +/// +public static class MarkdownKernelExtensions +{ + /// + /// Creates a instance for a prompt function using the specified markdown text. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// YAML representation of the to use to create the prompt function + /// The name of the function. + /// + /// The to use when interpreting the prompt template configuration into a . + /// If null, a default factory will be used. + /// + /// The created . + public static KernelFunction CreateFunctionFromMarkdown( + this Kernel kernel, + string text, + string functionName, + IPromptTemplateFactory? promptTemplateFactory = null) + { + Verify.NotNull(kernel); + Verify.NotNull(text); + Verify.NotNull(functionName); + + return KernelFunctionMarkdown.FromPromptMarkdown(text, functionName, promptTemplateFactory, kernel.LoggerFactory); + } +} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Authentication/AuthenticateRequestAsyncCallback.cs b/dotnet/src/Functions/Functions.OpenAPI/Authentication/AuthenticateRequestAsyncCallback.cs deleted file mode 100644 index dda578e223ab..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Authentication/AuthenticateRequestAsyncCallback.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; - -/// -/// Represents a delegate that defines the method signature for asynchronously authenticating an HTTP request. -/// -/// The to authenticate. -/// A representing the asynchronous operation. -public delegate Task AuthenticateRequestAsyncCallback(HttpRequestMessage request); diff --git a/dotnet/src/Functions/Functions.OpenAPI/Authentication/BasicAuthenticationProvider.cs b/dotnet/src/Functions/Functions.OpenAPI/Authentication/BasicAuthenticationProvider.cs deleted file mode 100644 index 640cc5d18cd4..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Authentication/BasicAuthenticationProvider.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Text; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; - -/// -/// Retrieves authentication content (e.g. username/password, API key) via the provided delegate and -/// applies it to HTTP requests using the "basic" authentication scheme. -/// -public class BasicAuthenticationProvider -{ - private readonly Func> _credentials; - - /// - /// Creates an instance of the class. - /// - /// Delegate for retrieving credentials. - public BasicAuthenticationProvider(Func> credentials) - { - this._credentials = credentials; - } - - /// - /// Applies the authentication content to the provided HTTP request message. - /// - /// The HTTP request message. - public async Task AuthenticateRequestAsync(HttpRequestMessage request) - { - // Base64 encode - string encodedContent = Convert.ToBase64String(Encoding.UTF8.GetBytes(await this._credentials().ConfigureAwait(false))); - request.Headers.Authorization = new AuthenticationHeaderValue("Basic", encodedContent); - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Authentication/BearerAuthenticationProvider.cs b/dotnet/src/Functions/Functions.OpenAPI/Authentication/BearerAuthenticationProvider.cs deleted file mode 100644 index 68591f32f5dc..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Authentication/BearerAuthenticationProvider.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; - -/// -/// Retrieves a token via the provided delegate and applies it to HTTP requests using the -/// "bearer" authentication scheme. -/// -public class BearerAuthenticationProvider -{ - private readonly Func> _bearerToken; - - /// - /// Creates an instance of the class. - /// - /// Delegate to retrieve the bearer token. - public BearerAuthenticationProvider(Func> bearerToken) - { - this._bearerToken = bearerToken; - } - - /// - /// Applies the token to the provided HTTP request message. - /// - /// The HTTP request message. - public async Task AuthenticateRequestAsync(HttpRequestMessage request) - { - var token = await this._bearerToken().ConfigureAwait(false); - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Authentication/CustomAuthenticationProvider.cs b/dotnet/src/Functions/Functions.OpenAPI/Authentication/CustomAuthenticationProvider.cs deleted file mode 100644 index 3769b4877ba8..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Authentication/CustomAuthenticationProvider.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; - -/// -/// Retrieves authentication content (scheme and value) via the provided delegate and applies it to HTTP requests. -/// -public sealed class CustomAuthenticationProvider -{ - private readonly Func> _header; - private readonly Func> _value; - - /// - /// Creates an instance of the class. - /// - /// Delegate for retrieving the header name. - /// Delegate for retrieving the value. - public CustomAuthenticationProvider(Func> header, Func> value) - { - this._header = header; - this._value = value; - } - - /// - /// Applies the header and value to the provided HTTP request message. - /// - /// The HTTP request message. - /// - public async Task AuthenticateRequestAsync(HttpRequestMessage request) - { - var header = await this._header().ConfigureAwait(false); - var value = await this._value().ConfigureAwait(false); - request.Headers.Add(header, value); - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Authentication/InteractiveMsalAuthenticationProvider.cs b/dotnet/src/Functions/Functions.OpenAPI/Authentication/InteractiveMsalAuthenticationProvider.cs deleted file mode 100644 index ea49380224f8..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Authentication/InteractiveMsalAuthenticationProvider.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Identity.Client; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; - -/// -/// Uses the Microsoft Authentication Library (MSAL) to authenticate HTTP requests. -/// -public class InteractiveMsalAuthenticationProvider : BearerAuthenticationProvider -{ - /// - /// Creates an instance of the class. - /// - /// Client ID of the caller. - /// Tenant ID of the target resource. - /// Requested scopes. - /// Redirect URI. - public InteractiveMsalAuthenticationProvider(string clientId, string tenantId, string[] scopes, Uri redirectUri) - : base(() => GetTokenAsync(clientId, tenantId, scopes, redirectUri)) - { - } - - /// - /// Gets an access token using the Microsoft Authentication Library (MSAL). - /// - /// Client ID of the caller. - /// Tenant ID of the target resource. - /// Requested scopes. - /// Redirect URI. - /// Access token. - private static async Task GetTokenAsync(string clientId, string tenantId, string[] scopes, Uri redirectUri) - { - IPublicClientApplication app = PublicClientApplicationBuilder.Create(clientId) - .WithRedirectUri(redirectUri.ToString()) - .WithTenantId(tenantId) - .Build(); - - IEnumerable accounts = await app.GetAccountsAsync().ConfigureAwait(false); - AuthenticationResult result; - try - { - result = await app.AcquireTokenSilent(scopes, accounts.FirstOrDefault()) - .ExecuteAsync().ConfigureAwait(false); - } - catch (MsalUiRequiredException) - { - // A MsalUiRequiredException happened on AcquireTokenSilent. - // This indicates you need to call AcquireTokenInteractive to acquire a token - result = await app.AcquireTokenInteractive(scopes) - .ExecuteAsync().ConfigureAwait(false); - } - - return result.AccessToken; - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Authentication/README.md b/dotnet/src/Functions/Functions.OpenAPI/Authentication/README.md deleted file mode 100644 index 7df11ca60cdf..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Authentication/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# Authentication for the OpenAPI Functions - -The Semantic Kernel OpenAPI Function enables developers to take any REST API that follows the OpenAPI specification and import it as a plugin to the Semantic Kernel. However, the Kernel needs to be able to authenticate outgoing requests per the requirements of the target API. This document outlines the authentication model for the OpenAPI plugin as well as the reference implementations provided by the Semantic Kernel. - -## The `AuthenticateRequestAsyncCallback` delegate - -[`AuthenticateRequestAsyncCallback`](AuthenticateRequestAsyncCallback.cs) is a delegate type that serves as a callback function for adding authentication information to HTTP requests sent by the OpenAPI plugin. - -```csharp -public delegate Task AuthenticateRequestAsyncCallback(HttpRequestMessage request); -``` - -Developers may optionally provide an implementation of this delegate when importing an OpenAPI plugin to the Kernel. The delegate is then passed through to the `RestApiOperationRunner`, which is responsible for building the HTTP payload and sending the request for each REST API operation. Before the API request is sent, the delegate is executed with the HTTP request message as the parameter, allowing the request message to be updated with any necessary authentication information. - -This pattern was designed to be flexible enough to support a wide variety of authentication frameworks. Developers can provide the delegate function directly or define a class or interface that exposes one or more implementations. They have the option of writing their own custom implementation or using one of the Semantic Kernel's reference authentication providers as a starting point. - -## Reference Authentication Providers - -### [`BasicAuthenticationProvider`](./BasicAuthenticationProvider.cs) -This class implements the HTTP "basic" authentication scheme. The constructor accepts a `Func` which defines how to retrieve the user's credentials. When the `AuthenticateRequestAsync` method is called, it retrieves the credentials, encodes them as a UTF-8 encoded Base64 string, and adds them to the `HttpRequestMessage`'s authorization header. - -The following code demonstrates how to use this provider: -```csharp -var basicAuthProvider = new BasicAuthenticationProvider(() => -{ - // JIRA API expects credentials in the format "email:apikey" - return Task.FromResult( - Env.Var("MY_EMAIL_ADDRESS") + ":" + Env.Var("JIRA_API_KEY") - ); -}); -var plugin = kernel.ImportOpenApiPluginFromResource(PluginResourceNames.Jira, new OpenApiPluginExecutionParameters { AuthCallback = basicAuthProvider.AuthenticateRequestAsync } ); -``` - -### [`BearerAuthenticationProvider`](./BearerAuthenticationProvider.cs) -This class implements the HTTP "bearer" authentication scheme. The constructor accepts a `Func` which defines how to retrieve the bearer token. When the `AuthenticateRequestAsync` method is called, it retrieves the token and adds it to the `HttpRequestMessage`'s authorization header. - -The following code demonstrates how to use this provider: -```csharp -var bearerAuthProvider = new BearerAuthenticationProvider(() => -{ - return Task.FromResult(Env.Var("AZURE_KEYVAULT_TOKEN")); -}); -var plugin = kernel.ImportOpenApiPluginFromResource(PluginResourceNames.AzureKeyVault, new OpenApiPluginExecutionParameters { AuthCallback = bearerAuthProvider.AuthenticateRequestAsync } ) -``` - -### [`InteractiveMsalAuthenticationProvider`](./InteractiveMsalAuthenticationProvider.cs) - -This class uses the [Microsoft Authentication Library (MSAL)](https://learn.microsoft.com/en-us/azure/active-directory/develop/msal-overview)'s .NET library to authenticate the user and acquire an OAuth token. It follows the interactive [authorization code flow](https://learn.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-auth-code-flow), requiring the user to sign in with a Microsoft or Azure identity. This is particularly useful for authenticating requests to the Microsoft Graph or Azure APIs. - -Once the token is acquired, it is added to the HTTP authentication header via the `AuthenticateRequestAsync` method, which is inherited from `BearerAuthenticationProvider`. - -To construct this provider, the caller must specify: -- *Client ID* – identifier of the calling application. This is acquired by [registering your application with the Microsoft Identity platform](https://learn.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app). -- *Tenant ID* – identifier of the target service tenant, or “common” -- *Scopes* – permissions being requested -- *Redirect URI* – for redirecting the user back to the application. (When running locally, this is typically http://localhost.) - -```csharp -var msalAuthProvider = new InteractiveMsalAuthenticationProvider( - Env.Var("AZURE_KEYVAULT_CLIENTID"), // clientId - Env.Var("AZURE_KEYVAULT_TENANTID"), // tenantId - new string[] { ".default" }, // scopes - new Uri("http://localhost") // redirectUri -); -var plugin = kernel.ImportOpenApiPluginFromResource(PluginResourceNames.AzureKeyVault, new OpenApiPluginExecutionParameters { AuthCallback = msalAuthProvider.AuthenticateRequestAsync } ) -``` \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.OpenAPI/Builders/QueryStringBuilder.cs b/dotnet/src/Functions/Functions.OpenAPI/Builders/QueryStringBuilder.cs deleted file mode 100644 index 9bb2885a446f..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Builders/QueryStringBuilder.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Builders; - -/// -/// Represents a query string builder for REST API operations. -/// -internal static class QueryStringBuilder -{ - /// - /// Query string parameter serializers. - /// - private static readonly Dictionary> s_queryStringParameterSerializers = new() - { - { RestApiOperationParameterStyle.Form, FormStyleParameterSerializer.Serialize }, - { RestApiOperationParameterStyle.SpaceDelimited, SpaceDelimitedStyleParameterSerializer.Serialize }, - { RestApiOperationParameterStyle.PipeDelimited, PipeDelimitedStyleParameterSerializer.Serialize } - }; - - /// - public static string BuildQueryString(this RestApiOperation operation, IDictionary arguments) - { - var segments = new List(); - - var parameters = operation.Parameters.Where(p => p.Location == RestApiOperationParameterLocation.Query); - - foreach (var parameter in parameters) - { - if (!arguments.TryGetValue(parameter.Name, out var argument)) - { - //Throw an exception if the parameter is a required one but no value is provided. - if (parameter.IsRequired) - { - throw new SKException($"No argument found for the `{parameter.Name}` required parameter"); - } - - //Skipping not required parameter if no argument provided for it. - continue; - } - - var parameterStyle = parameter.Style ?? RestApiOperationParameterStyle.Form; - - if (!s_queryStringParameterSerializers.TryGetValue(parameterStyle, out var serializer)) - { - throw new SKException($"The query string parameter `{parameterStyle}` serialization style is not supported."); - } - - //Serializing the parameter and adding it to the query string if there's an argument for it. - segments.Add(serializer.Invoke(parameter, argument)); - } - - return string.Join("&", segments); - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/FormStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/FormStyleParameterSerializer.cs deleted file mode 100644 index 12a97a494df7..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/FormStyleParameterSerializer.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Nodes; -using System.Web; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; - -/// -/// Serializes REST API operation parameter of the 'Form' style. -/// -internal static class FormStyleParameterSerializer -{ - /// - /// Serializes a REST API operation `Form` style parameter. - /// - /// The REST API operation parameter to serialize. - /// The parameter argument. - /// The serialized parameter. - public static string Serialize(RestApiOperationParameter parameter, string argument) - { - const string ArrayType = "array"; - - if (parameter is null) - { - throw new ArgumentNullException(nameof(parameter)); - } - - if (parameter.Style != RestApiOperationParameterStyle.Form) - { - throw new SKException($"Unexpected Rest Api operation parameter style - `{parameter.Style}`"); - } - - // Handling parameters of array type. - if (parameter.Type == ArrayType) - { - return SerializeArrayParameter(parameter, argument); - } - - // Handling parameters of primitive - integer, string, etc type. - return $"{parameter.Name}={HttpUtility.UrlEncode(argument)}"; - } - - /// - /// Serializes an array-type parameter. - /// - /// The REST API operation parameter to serialize. - /// The argument value. - /// The serialized parameter string. - private static string SerializeArrayParameter(RestApiOperationParameter parameter, string argument) - { - if (JsonNode.Parse(argument) is not JsonArray array) - { - throw new SKException($"Can't deserialize parameter name `{parameter.Name}` argument `{argument}` to JSON array"); - } - - if (parameter.Expand) - { - return ArrayParameterValueSerializer.SerializeArrayAsSeparateParameters(parameter.Name, array, delimiter: "&"); //id=1&id=2&id=3 - } - - return $"{parameter.Name}={ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: ",")}"; //id=1,2,3 - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/PipeDelimitedStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/PipeDelimitedStyleParameterSerializer.cs deleted file mode 100644 index 8d2a148d68f7..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/PipeDelimitedStyleParameterSerializer.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Nodes; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; - -/// -/// Serializes REST API operation parameter of the 'PipeDelimited' style. -/// -internal static class PipeDelimitedStyleParameterSerializer -{ - /// - /// Serializes a REST API operation `PipeDelimited` style parameter. - /// - /// The REST API operation parameter to serialize. - /// The parameter argument. - /// The serialized parameter. - public static string Serialize(RestApiOperationParameter parameter, string argument) - { - const string ArrayType = "array"; - - if (parameter is null) - { - throw new ArgumentNullException(nameof(parameter)); - } - - if (parameter.Style != RestApiOperationParameterStyle.PipeDelimited) - { - throw new SKException($"Unexpected Rest Api operation parameter style `{parameter.Style}`. Parameter name `{parameter.Name}`."); - } - - if (parameter.Type != ArrayType) - { - throw new SKException($"Serialization of Rest API operation parameters of type `{parameter.Type}` is not supported for the `{RestApiOperationParameterStyle.PipeDelimited}` style parameters. Parameter name `{parameter.Name}`."); - } - - return SerializeArrayParameter(parameter, argument); - } - - /// - /// Serializes an array-type parameter. - /// - /// The REST API operation parameter to serialize. - /// The argument value. - /// The serialized parameter string. - private static string SerializeArrayParameter(RestApiOperationParameter parameter, string argument) - { - if (JsonNode.Parse(argument) is not JsonArray array) - { - throw new SKException($"Can't deserialize parameter name `{parameter.Name}` argument `{argument}` to JSON array."); - } - - if (parameter.Expand) - { - return ArrayParameterValueSerializer.SerializeArrayAsSeparateParameters(parameter.Name, array, delimiter: "&"); //id=1&id=2&id=3 - } - - return $"{parameter.Name}={ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: "|")}"; //id=1|2|3 - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/SpaceDelimitedStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/SpaceDelimitedStyleParameterSerializer.cs deleted file mode 100644 index 43afd123ad21..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/SpaceDelimitedStyleParameterSerializer.cs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json.Nodes; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; - -/// -/// Serializes REST API operation parameter of the 'SpaceDelimited' style. -/// -internal static class SpaceDelimitedStyleParameterSerializer -{ - /// - /// Serializes a REST API operation `SpaceDelimited` style parameter. - /// - /// The REST API operation parameter to serialize. - /// The parameter argument. - /// The serialized parameter. - public static string Serialize(RestApiOperationParameter parameter, string argument) - { - const string ArrayType = "array"; - - if (parameter is null) - { - throw new ArgumentNullException(nameof(parameter)); - } - - if (parameter.Style != RestApiOperationParameterStyle.SpaceDelimited) - { - throw new SKException($"Unexpected Rest Api operation parameter style `{parameter.Style}`. Parameter name `{parameter.Name}`."); - } - - if (parameter.Type != ArrayType) - { - throw new SKException($"Serialization of Rest API operation parameters of type `{parameter.Type}` is not supported for the `{RestApiOperationParameterStyle.SpaceDelimited}` style parameters. Parameter name `{parameter.Name}`."); - } - - return SerializeArrayParameter(parameter, argument); - } - - /// - /// Serializes an array-type parameter. - /// - /// The REST API operation parameter to serialize. - /// The argument value. - /// The serialized parameter string. - private static string SerializeArrayParameter(RestApiOperationParameter parameter, string argument) - { - if (JsonNode.Parse(argument) is not JsonArray array) - { - throw new SKException($"Can't deserialize parameter name `{parameter.Name}` argument `{argument}` to JSON array."); - } - - if (parameter.Expand) - { - return ArrayParameterValueSerializer.SerializeArrayAsSeparateParameters(parameter.Name, array, delimiter: "&"); //id=1&id=2&id=3 - } - - return $"{parameter.Name}={ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: "%20")}"; //id=1%202%203 - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Extensions/KernelAIPluginExtensions.cs b/dotnet/src/Functions/Functions.OpenAPI/Extensions/KernelAIPluginExtensions.cs deleted file mode 100644 index ff0ba2a22f55..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Extensions/KernelAIPluginExtensions.cs +++ /dev/null @@ -1,459 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Text.Json.Nodes; -using System.Text.RegularExpressions; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Functions.OpenAPI.OpenApi; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; - -/// -/// Provides extension methods for importing AI plugins exposed as OpenAPI v3 endpoints or through OpenAI's ChatGPT format. -/// -public static class KernelAIPluginExtensions -{ - [Obsolete("Methods and classes which includes Skill in the name have been renamed to use Plugin. Use Kernel.ImportPluginFunctionsAsync instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public static async Task> ImportAIPluginAsync( - this IKernel kernel, - string pluginName, - string filePath, - OpenApiFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - return await kernel.ImportPluginFunctionsAsync(pluginName, filePath, executionParameters, cancellationToken).ConfigureAwait(false); - } -#pragma warning restore CS1591 - - /// - /// Imports an AI plugin that is exposed as an OpenAPI v3 endpoint or through OpenAI's ChatGPT format. - /// - /// Semantic Kernel instance. - /// Plugin name. - /// The file path to the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task> ImportPluginFunctionsAsync( - this IKernel kernel, - string pluginName, - string filePath, - OpenApiFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName); - -#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. - var httpClient = HttpClientProvider.GetHttpClient(kernel.HttpHandlerFactory, executionParameters?.HttpClient, kernel.LoggerFactory); -#pragma warning restore CA2000 - - var pluginContents = await LoadDocumentFromFilePathAsync( - kernel, - filePath, - executionParameters, - httpClient, - cancellationToken).ConfigureAwait(false); - - return await CompleteImportAsync( - kernel, - pluginContents, - pluginName, - httpClient, - executionParameters, - cancellationToken: cancellationToken).ConfigureAwait(false); - } - - [Obsolete("Methods and classes which includes Skill in the name have been renamed to use Plugin. Use Kernel.ImportPluginFunctionsAsync instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public static async Task> ImportAIPluginAsync( - this IKernel kernel, - string pluginName, - Uri uri, - OpenApiFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - return await kernel.ImportPluginFunctionsAsync(pluginName, uri, executionParameters, cancellationToken).ConfigureAwait(false); - } -#pragma warning restore CS1591 - - /// - /// Imports an AI plugin that is exposed as an OpenAPI v3 endpoint or through OpenAI's ChatGPT format. - /// - /// Semantic Kernel instance. - /// Plugin name. - /// A local or remote URI referencing the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task> ImportPluginFunctionsAsync( - this IKernel kernel, - string pluginName, - Uri uri, - OpenApiFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName); - -#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. - var httpClient = HttpClientProvider.GetHttpClient(kernel.HttpHandlerFactory, executionParameters?.HttpClient, kernel.LoggerFactory); -#pragma warning restore CA2000 - - var pluginContents = await LoadDocumentFromUriAsync( - kernel, - uri, - executionParameters, - httpClient, - cancellationToken).ConfigureAwait(false); - - return await CompleteImportAsync( - kernel, - pluginContents, - pluginName, - httpClient, - executionParameters, - uri, - cancellationToken).ConfigureAwait(false); - } - - /// - /// Imports an AI plugin that is exposed as an OpenAPI v3 endpoint or through OpenAI's ChatGPT format. - /// - /// Semantic Kernel instance. - /// Plugin name. - /// A stream representing the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task> ImportPluginFunctionsAsync( - this IKernel kernel, - string pluginName, - Stream stream, - OpenApiFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName); - -#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. - var httpClient = HttpClientProvider.GetHttpClient(kernel.HttpHandlerFactory, executionParameters?.HttpClient, kernel.LoggerFactory); -#pragma warning restore CA2000 - - var pluginContents = await LoadDocumentFromStreamAsync(kernel, stream).ConfigureAwait(false); - - return await CompleteImportAsync( - kernel, - pluginContents, - pluginName, - httpClient, - executionParameters, - cancellationToken: cancellationToken).ConfigureAwait(false); - } - - #region private - - private static async Task> CompleteImportAsync( - IKernel kernel, - string pluginContents, - string pluginName, - HttpClient httpClient, - OpenApiFunctionExecutionParameters? executionParameters, - Uri? documentUri = null, - CancellationToken cancellationToken = default) - { - if (TryParseAIPluginForUrl(pluginContents, out var openApiUrl)) - { - return await kernel - .ImportPluginFunctionsAsync( - pluginName, - new Uri(openApiUrl), - executionParameters, - cancellationToken: cancellationToken) - .ConfigureAwait(false); - } - - return await LoadPluginAsync( - kernel, - pluginName, - executionParameters, - httpClient, - pluginContents, - documentUri, - cancellationToken).ConfigureAwait(false); - } - - private static async Task> LoadPluginAsync( - IKernel kernel, - string pluginName, - OpenApiFunctionExecutionParameters? executionParameters, - HttpClient httpClient, - string pluginJson, - Uri? documentUri = null, - CancellationToken cancellationToken = default) - { - var parser = new OpenApiDocumentParser(kernel.LoggerFactory); - - using (var documentStream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(pluginJson))) - { - var operations = await parser.ParseAsync(documentStream, executionParameters?.IgnoreNonCompliantErrors ?? false, cancellationToken).ConfigureAwait(false); - - var runner = new RestApiOperationRunner( - httpClient, - executionParameters?.AuthCallback, - executionParameters?.UserAgent, - executionParameters?.EnableDynamicPayload ?? false, - executionParameters?.EnablePayloadNamespacing ?? false); - - var plugin = new Dictionary(); - - ILogger logger = kernel.LoggerFactory.CreateLogger(typeof(KernelAIPluginExtensions)); - foreach (var operation in operations) - { - try - { - logger.LogTrace("Registering Rest function {0}.{1}", pluginName, operation.Id); - var function = kernel.RegisterRestApiFunction(pluginName, runner, operation, executionParameters, documentUri, cancellationToken); - plugin[function.Name] = function; - } - catch (Exception ex) when (!ex.IsCriticalException()) - { - //Logging the exception and keep registering other Rest functions - logger.LogWarning(ex, "Something went wrong while rendering the Rest function. Function: {0}.{1}. Error: {2}", - pluginName, operation.Id, ex.Message); - } - } - - return plugin; - } - } - - private static async Task LoadDocumentFromUriAsync( - IKernel kernel, - Uri uri, - OpenApiFunctionExecutionParameters? executionParameters, - HttpClient httpClient, - CancellationToken cancellationToken) - { - using var requestMessage = new HttpRequestMessage(HttpMethod.Get, uri.ToString()); - - requestMessage.Headers.UserAgent.Add(ProductInfoHeaderValue.Parse(executionParameters?.UserAgent ?? Telemetry.HttpUserAgent)); - - using var response = await httpClient.SendWithSuccessCheckAsync(requestMessage, cancellationToken).ConfigureAwait(false); - - return await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - } - - private static async Task LoadDocumentFromFilePathAsync( - IKernel kernel, - string filePath, - OpenApiFunctionExecutionParameters? executionParameters, - HttpClient httpClient, - CancellationToken cancellationToken) - { - var pluginJson = string.Empty; - - if (!File.Exists(filePath)) - { - throw new FileNotFoundException($"Invalid URI. The specified path '{filePath}' does not exist."); - } - - kernel.LoggerFactory.CreateLogger(typeof(KernelAIPluginExtensions)).LogTrace("Importing AI Plugin from {0}", filePath); - - using (var sr = File.OpenText(filePath)) - { - return await sr.ReadToEndAsync().ConfigureAwait(false); //must await here to avoid stream reader being disposed before the string is read - } - } - - private static async Task LoadDocumentFromStreamAsync( - IKernel kernel, - Stream stream) - { - using StreamReader reader = new(stream); - return await reader.ReadToEndAsync().ConfigureAwait(false); - } - - private static bool TryParseAIPluginForUrl(string gptPluginJson, out string? openApiUrl) - { - try - { - JsonNode? gptPlugin = JsonNode.Parse(gptPluginJson); - - string? apiType = gptPlugin?["api"]?["type"]?.ToString(); - - if (string.IsNullOrWhiteSpace(apiType) || apiType != "openapi") - { - openApiUrl = null; - - return false; - } - - openApiUrl = gptPlugin?["api"]?["url"]?.ToString(); - - if (string.IsNullOrWhiteSpace(openApiUrl)) - { - return false; - } - - return true; - } - catch (System.Text.Json.JsonException) - { - openApiUrl = null; - - return false; - } - } - - /// - /// Registers SKFunction for a REST API operation. - /// - /// Semantic Kernel instance. - /// Plugin name. - /// The REST API operation runner. - /// The REST API operation. - /// Function execution parameters. - /// The URI of OpenApi document. - /// The cancellation token. - /// An instance of class. - private static ISKFunction RegisterRestApiFunction( - this IKernel kernel, - string pluginName, - RestApiOperationRunner runner, - RestApiOperation operation, - OpenApiFunctionExecutionParameters? executionParameters, - Uri? documentUri = null, - CancellationToken cancellationToken = default) - { - var restOperationParameters = operation.GetParameters( - executionParameters?.ServerUrlOverride, - executionParameters?.EnableDynamicPayload ?? false, - executionParameters?.EnablePayloadNamespacing ?? false, - documentUri - ); - - var logger = kernel.LoggerFactory is not null ? kernel.LoggerFactory.CreateLogger(typeof(KernelAIPluginExtensions)) : NullLogger.Instance; - - async Task ExecuteAsync(SKContext context) - { - try - { - // Extract function arguments from context - var arguments = new Dictionary(); - foreach (var parameter in restOperationParameters) - { - // A try to resolve argument by alternative parameter name - if (!string.IsNullOrEmpty(parameter.AlternativeName) && context.Variables.TryGetValue(parameter.AlternativeName!, out string? value)) - { - arguments.Add(parameter.Name, value); - continue; - } - - // A try to resolve argument by original parameter name - if (context.Variables.TryGetValue(parameter.Name, out value)) - { - arguments.Add(parameter.Name, value); - continue; - } - - if (parameter.IsRequired) - { - throw new KeyNotFoundException( - $"No variable found in context to use as an argument for the '{parameter.Name}' parameter of the '{pluginName}.{operation.Id}' Rest function."); - } - } - - var options = new RestApiOperationRunOptions - { - ServerUrlOverride = executionParameters?.ServerUrlOverride, - ApiHostUrl = documentUri is not null ? new Uri(documentUri.GetLeftPart(UriPartial.Authority)) : null - }; - - return await runner.RunAsync(operation, arguments, options, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (!ex.IsCriticalException()) - { - logger.LogError(ex, "RestAPI function {Plugin}.{Name} execution failed with error {Error}", pluginName, operation.Id, ex.Message); - throw; - } - } - - var parameters = restOperationParameters - .Select(p => new ParameterView(p.AlternativeName ?? p.Name) - { - Description = $"{p.Description ?? p.Name}{(p.IsRequired ? " (required)" : string.Empty)}", - DefaultValue = p.DefaultValue ?? string.Empty, - Type = string.IsNullOrEmpty(p.Type) ? null : new ParameterViewType(p.Type), - IsRequired = p.IsRequired, - }) - .ToList(); - - var function = SKFunction.FromNativeFunction( - nativeFunction: ExecuteAsync, - parameters: parameters, - description: operation.Description, - pluginName: pluginName, - functionName: ConvertOperationIdToValidFunctionName(operation.Id, logger), - loggerFactory: kernel.LoggerFactory); - - return kernel.RegisterCustomFunction(function); - } - - /// - /// Converts operation id to valid SK Function name. - /// A function name can contain only ASCII letters, digits, and underscores. - /// - /// The operation id. - /// The logger. - /// Valid SK Function name. - private static string ConvertOperationIdToValidFunctionName(string operationId, ILogger logger) - { - try - { - Verify.ValidFunctionName(operationId); - return operationId; - } - catch (SKException) - { - } - - // Tokenize operation id on forward and back slashes - string[] tokens = operationId.Split('/', '\\'); - string result = string.Empty; - - foreach (string token in tokens) - { - // Removes all characters that are not ASCII letters, digits, and underscores. - string formattedToken = s_removeInvalidCharsRegex.Replace(token, ""); - result += CultureInfo.CurrentCulture.TextInfo.ToTitleCase(formattedToken.ToLower(CultureInfo.CurrentCulture)); - } - - logger.LogInformation("Operation name \"{0}\" converted to \"{1}\" to comply with SK Function name requirements. Use \"{2}\" when invoking function.", operationId, result, result); - - return result; - } - - /// - /// Used to convert operationId to SK function names. - /// - private static readonly Regex s_removeInvalidCharsRegex = new("[^0-9A-Za-z_]"); - - #endregion -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Functions.OpenAPI.csproj b/dotnet/src/Functions/Functions.OpenAPI/Functions.OpenAPI.csproj deleted file mode 100644 index fc8b5511a03c..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Functions.OpenAPI.csproj +++ /dev/null @@ -1,41 +0,0 @@ - - - - - Microsoft.SemanticKernel.Functions.OpenAPI - $(AssemblyName) - netstandard2.0 - - - - - - - - Semantic Kernel - OpenAPI Functions - Semantic Kernel OpenAPI Functions - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dotnet/src/Functions/Functions.OpenAPI/HttpContentFactory.cs b/dotnet/src/Functions/Functions.OpenAPI/HttpContentFactory.cs deleted file mode 100644 index 633477bd2743..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/HttpContentFactory.cs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI; - -/// -/// Represents a delegate for creating HTTP content for a REST API operation. -/// -/// The operation payload metadata. -/// The operation arguments. -/// The HTTP content representing the operation payload. -internal delegate HttpContent HttpContentFactory(RestApiOperationPayload? payload, IDictionary arguments); diff --git a/dotnet/src/Functions/Functions.OpenAPI/JsonPathPlugin.cs b/dotnet/src/Functions/Functions.OpenAPI/JsonPathPlugin.cs deleted file mode 100644 index 1f45e283837b..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/JsonPathPlugin.cs +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Linq; -using Microsoft.SemanticKernel.Orchestration; -using Newtonsoft.Json; -using Newtonsoft.Json.Linq; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI; - -/// -/// Provides methods to retrieve JSON elements from a JSON string using JsonPath queries. -/// -public sealed class JsonPathPlugin -{ - /// - /// parameter names. - /// - public static class Parameters - { - /// - /// JSON path. - /// - public const string JsonPath = "jsonpath"; - } - - /// - /// Retrieve the value of a JSON element from a JSON string using a JsonPath query. - /// - /// The JSON string to query. - /// The JsonPath query to use. - /// The value of the JSON element as a string. - /// Thrown when the provided JSON string is null or whitespace. - [SKFunction, Description("Retrieve the value of a JSON element from a JSON string using a JsonPath query.")] - public string GetJsonElementValue( - [Description("JSON string")] string json, - [Description("JSON path query.")] string jsonPath) - { - if (string.IsNullOrWhiteSpace(json)) - { - throw new ArgumentException("Variable was null or whitespace", nameof(json)); - } - - JObject jsonObject = JObject.Parse(json); - - JToken? token = jsonObject.SelectToken(jsonPath); - - return token?.Value() ?? string.Empty; - } - - /// - /// Retrieve a collection of JSON elements from a JSON string using a JsonPath query. - /// - /// The JSON string to query. - /// The JsonPath query to use. - /// A JSON string representing the collection of JSON elements. - /// Thrown when the provided JSON string is null or whitespace. - [SKFunction, Description("Retrieve a collection of JSON elements from a JSON string using a JsonPath query.")] - public string GetJsonElements( - [Description("JSON string")] string json, - [Description("JSON path query.")] string jsonPath) - { - if (string.IsNullOrWhiteSpace(json)) - { - throw new ArgumentException("Variable was null or whitespace", nameof(json)); - } - - JObject jsonObject = JObject.Parse(json); - - JToken[] tokens = jsonObject.SelectTokens(jsonPath).ToArray(); - - return JsonConvert.SerializeObject(tokens, Formatting.None); - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperation.cs b/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperation.cs deleted file mode 100644 index 7c7d50b03bab..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperation.cs +++ /dev/null @@ -1,243 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Text.RegularExpressions; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -/// -/// The REST API operation. -/// -public sealed class RestApiOperation -{ - /// - /// An artificial parameter that is added to be able to override REST API operation server url. - /// - public const string ServerUrlArgumentName = "server-url"; - - /// - /// An artificial parameter to be used for operation having "text/plain" payload media type. - /// - public const string PayloadArgumentName = "payload"; - - /// - /// An artificial parameter to be used for indicate payload media-type if it's missing in payload metadata. - /// - public const string ContentTypeArgumentName = "content-type"; - - /// - /// The operation identifier. - /// - public string Id { get; } - - /// - /// The operation description. - /// - public string Description { get; } - - /// - /// The operation path. - /// - public string Path { get; } - - /// - /// The operation method - GET, POST, PUT, DELETE. - /// - public HttpMethod Method { get; } - - /// - /// The server URL. - /// - public Uri? ServerUrl { get; } - - /// - /// The operation headers. - /// - public IDictionary Headers { get; } - - /// - /// The operation parameters. - /// - public IList Parameters { get; } - - /// - /// The operation payload. - /// - public RestApiOperationPayload? Payload { get; } - - /// - /// Creates an instance of a class. - /// - /// The operation identifier. - /// The server URL. - /// The operation path. - /// The operation method. - /// The operation description. - /// The operation parameters. - /// The operation headers. - /// The operation payload. - public RestApiOperation( - string id, - Uri? serverUrl, - string path, - HttpMethod method, - string description, - IList parameters, - IDictionary headers, - RestApiOperationPayload? payload = null) - { - this.Id = id; - this.ServerUrl = serverUrl; - this.Path = path; - this.Method = method; - this.Description = description; - this.Parameters = parameters; - this.Headers = headers; - this.Payload = payload; - } - - /// - /// Builds operation Url. - /// - /// The operation arguments. - /// Override for REST API operation server url. - /// The URL of REST API host. - /// The operation Url. - public Uri BuildOperationUrl(IDictionary arguments, Uri? serverUrlOverride = null, Uri? apiHostUrl = null) - { - var serverUrl = this.GetServerUrl(arguments, serverUrlOverride, apiHostUrl); - - var path = this.ReplacePathParameters(this.Path, arguments); - - return new Uri(serverUrl, $"{path.TrimStart('/')}"); - } - - /// - /// Renders operation request headers. - /// - /// The operation arguments. - /// The rendered request headers. - public IDictionary RenderHeaders(IDictionary arguments) - { - var headers = new Dictionary(); - - foreach (var header in this.Headers) - { - var headerName = header.Key; - var headerValue = header.Value; - - //A try to resolve header value in arguments. - if (arguments.TryGetValue(headerName, out var value)) - { - headers.Add(headerName, value); - continue; - } - - //Header value is already supplied. - if (!string.IsNullOrEmpty(headerValue)) - { - headers.Add(headerName, headerValue); - continue; - } - - //Getting metadata for the header - var headerMetadata = this.Parameters.FirstOrDefault(p => p.Location == RestApiOperationParameterLocation.Header && p.Name == headerName) - ?? throw new SKException($"No value for the '{headerName} header is found.'"); - - //If parameter is required it's value should always be provided. - if (headerMetadata.IsRequired) - { - throw new SKException($"No value for the '{headerName} header is found.'"); - } - - //Parameter is not required and no default value provided. - if (string.IsNullOrEmpty(headerMetadata.DefaultValue)) - { - continue; - } - - //Using default value. - headers.Add(headerName, headerMetadata.DefaultValue!); - } - - return headers; - } - - #region private - - /// - /// Replaces path parameters by corresponding arguments. - /// - /// Operation path to replace parameters in. - /// Arguments to replace parameters by. - /// Path with replaced parameters - private string ReplacePathParameters(string path, IDictionary arguments) - { - string ReplaceParameter(Match match) - { - var parameterName = match.Groups[1].Value; - - //A try to find parameter value in arguments - if (arguments.TryGetValue(parameterName, out var value)) - { - return value; - } - - //A try to find default value for the parameter - var parameterMetadata = this.Parameters.First(p => p.Location == RestApiOperationParameterLocation.Path && p.Name == parameterName); - if (parameterMetadata?.DefaultValue == null) - { - throw new SKException($"No argument found for parameter - '{parameterName}' for operation - '{this.Id}'"); - } - - return parameterMetadata.DefaultValue; - } - - return s_urlParameterMatch.Replace(path, ReplaceParameter); - } - - /// - /// Returns operation server Url. - /// - /// The operation arguments. - /// Override for REST API operation server url. - /// The URL of REST API host. - /// The operation server url. - private Uri GetServerUrl(IDictionary arguments, Uri? serverUrlOverride, Uri? apiHostUrl) - { - string serverUrlString; - - if (serverUrlOverride is not null) - { - serverUrlString = serverUrlOverride.AbsoluteUri; - } - else if (arguments.TryGetValue(ServerUrlArgumentName, out string serverUrlFromArgument)) - { - // Override defined server url - https://api.example.com/v1 by the one from arguments. - serverUrlString = serverUrlFromArgument; - } - else - { - serverUrlString = - this.ServerUrl?.AbsoluteUri ?? - apiHostUrl?.AbsoluteUri ?? - throw new InvalidOperationException($"Server url is not defined for operation {this.Id}"); - } - - // make sure base url ends with trailing slash - if (!serverUrlString.EndsWith("/", StringComparison.OrdinalIgnoreCase)) - { - serverUrlString += "/"; - } - - return new Uri(serverUrlString); - } - - private static readonly Regex s_urlParameterMatch = new(@"\{([\w-]+)\}"); - - # endregion -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationPayloadProperty.cs b/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationPayloadProperty.cs deleted file mode 100644 index 258b9a598c3a..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationPayloadProperty.cs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -/// -/// The REST API operation payload property. -/// -public sealed class RestApiOperationPayloadProperty -{ - /// - /// The property name. - /// - public string Name { get; } - - /// - /// The property type. - /// - public string Type { get; } - - /// - /// The property description. - /// - public string? Description { get; } - - /// - /// Flag specifying if the property is required or not. - /// - public bool IsRequired { get; } - - /// - /// The properties. - /// - public IList Properties { get; } - - /// - /// Creates an instance of a class. - /// - /// Property name. - /// Property type. - /// Flag specifying if the property is required or not. - /// Properties. - /// Property description. - public RestApiOperationPayloadProperty( - string name, - string type, - bool isRequired, - IList properties, - string? description = null) - { - this.Name = name; - this.Type = type; - this.IsRequired = isRequired; - this.Description = description; - this.Properties = properties; - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationResponse.cs b/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationResponse.cs deleted file mode 100644 index e2c4776c2c03..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationResponse.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.ComponentModel; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -/// -/// The REST API operation response. -/// -[TypeConverterAttribute(typeof(RestApiOperationResponseConverter))] -public sealed class RestApiOperationResponse -{ - /// - /// Gets the content of the response. - /// - public object Content { get; } - - /// - /// Gets the content type of the response. - /// - public string ContentType { get; } - - /// - /// Initializes a new instance of the class. - /// - /// The content of the response. - /// The content type of the response. - public RestApiOperationResponse(object content, string contentType) - { - this.Content = content; - this.ContentType = contentType; - } -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/OpenApi/IOpenApiDocumentParser.cs b/dotnet/src/Functions/Functions.OpenAPI/OpenApi/IOpenApiDocumentParser.cs deleted file mode 100644 index 90b1fe571b48..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/OpenApi/IOpenApiDocumentParser.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.OpenApi; - -/// -/// Interface for OpenApi document parser classes. -/// -internal interface IOpenApiDocumentParser -{ - /// - /// Parses OpenAPI document. - /// - /// Stream containing OpenAPI document to parse. - /// Flag indicating whether to ignore non-compliant errors. - /// If set to true, the parser will not throw exceptions for non-compliant documents. - /// Please note that enabling this option may result in incomplete or inaccurate parsing results. - /// The cancellation token. - /// List of rest operations. - Task> ParseAsync(Stream stream, bool ignoreNonCompliantErrors = false, CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/OpenApi/OpenApiDocumentParser.cs b/dotnet/src/Functions/Functions.OpenAPI/OpenApi/OpenApiDocumentParser.cs deleted file mode 100644 index 455c5ace0a67..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/OpenApi/OpenApiDocumentParser.cs +++ /dev/null @@ -1,407 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.OpenApi.Any; -using Microsoft.OpenApi.Models; -using Microsoft.OpenApi.Readers; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.OpenApi; - -/// -/// Parser for OpenAPI documents. -/// -internal sealed class OpenApiDocumentParser : IOpenApiDocumentParser -{ - /// - /// Initializes a new instance of the class. - /// - /// The to use for logging. If null, no logging will be performed. - public OpenApiDocumentParser(ILoggerFactory? loggerFactory = null) - { - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(OpenApiDocumentParser)) : NullLogger.Instance; - } - - /// - public async Task> ParseAsync(Stream stream, bool ignoreNonCompliantErrors = false, CancellationToken cancellationToken = default) - { - var jsonObject = await this.DowngradeDocumentVersionToSupportedOneAsync(stream, cancellationToken).ConfigureAwait(false); - - using var memoryStream = new MemoryStream(Json.SerializeToUtf8Bytes(jsonObject)); - - var result = await this._openApiReader.ReadAsync(memoryStream, cancellationToken).ConfigureAwait(false); - - this.AssertReadingSuccessful(result, ignoreNonCompliantErrors); - - return ExtractRestApiOperations(result.OpenApiDocument); - } - - #region private - - /// - /// Max depth to traverse down OpenApi schema to discover payload properties. - /// - private const int PayloadPropertiesHierarchyMaxDepth = 10; - - /// - /// Name of property that contains OpenAPI document version. - /// - private const string OpenApiVersionPropertyName = "openapi"; - - /// - /// Latest supported version of OpenAPI document. - /// - private static readonly Version s_latestSupportedVersion = new(3, 0, 1); - - /// - /// List of supported Media Types. - /// - private static readonly List s_supportedMediaTypes = new() - { - "application/json", - "text/plain" - }; - - private readonly OpenApiStreamReader _openApiReader = new(); - private readonly ILogger _logger; - - /// - /// Downgrades the version of an OpenAPI document to the latest supported one - 3.0.1. - /// This class relies on Microsoft.OpenAPI.NET library to work with OpenApi documents. - /// The library, at the moment, does not support 3.1 spec, and the latest supported version is 3.0.1. - /// There's an open issue tracking the support progress - https://github.com/microsoft/OpenAPI.NET/issues/795 - /// This method should be removed/revised as soon the support is added. - /// - /// The original OpenAPI document stream. - /// The cancellation token. - /// OpenAPI document with downgraded document version. - private async Task DowngradeDocumentVersionToSupportedOneAsync(Stream stream, CancellationToken cancellationToken) - { - var jsonObject = await ConvertContentToJsonAsync(stream, cancellationToken).ConfigureAwait(false); - if (jsonObject == null) - { - // The document is malformed. - throw new SKException("Parsing of OpenAPI document failed."); - } - - if (!jsonObject.TryGetPropertyValue(OpenApiVersionPropertyName, out var propertyNode)) - { - // The document is either malformed or has 2.x version that specifies document version in the 'swagger' property rather than in the 'openapi' one. - return jsonObject; - } - - if (propertyNode is not JsonValue value) - { - // The 'openapi' property has unexpected type. - return jsonObject; - } - - if (!Version.TryParse(value.ToString(), out var version)) - { - // The 'openapi' property is malformed. - return jsonObject; - } - - if (version > s_latestSupportedVersion) - { - jsonObject[OpenApiVersionPropertyName] = s_latestSupportedVersion.ToString(); - } - - return jsonObject; - } - - /// - /// Converts YAML content to JSON content. - /// The method uses SharpYaml library that comes as a not-direct dependency of Microsoft.OpenAPI.NET library. - /// Should be replaced later when there's more convenient way to convert YAML content to JSON one. - /// - /// The YAML/JSON content stream. - /// The to monitor for cancellation requests. The default is . - /// JSON content stream. - private static async Task ConvertContentToJsonAsync(Stream stream, CancellationToken cancellationToken = default) - { - var serializer = new SharpYaml.Serialization.Serializer(); - - var obj = serializer.Deserialize(stream); - - using var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(obj))); - - return await JsonSerializer.DeserializeAsync(memoryStream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - - /// - /// Parses an OpenApi document and extracts REST API operations. - /// - /// The OpenApi document. - /// List of Rest operations. - private static List ExtractRestApiOperations(OpenApiDocument document) - { - var result = new List(); - - var serverUrl = document.Servers.FirstOrDefault()?.Url; - - foreach (var pathPair in document.Paths) - { - var operations = CreateRestApiOperations(serverUrl, pathPair.Key, pathPair.Value); - - result.AddRange(operations); - } - - return result; - } - - /// - /// Creates REST API operation. - /// - /// The server url. - /// Rest resource path. - /// Rest resource metadata. - /// Rest operation. - private static List CreateRestApiOperations(string? serverUrl, string path, OpenApiPathItem pathItem) - { - var operations = new List(); - - foreach (var operationPair in pathItem.Operations) - { - var method = operationPair.Key.ToString(); - - var operationItem = operationPair.Value; - - var operation = new RestApiOperation( - operationItem.OperationId, - string.IsNullOrEmpty(serverUrl) ? null : new Uri(serverUrl), - path, - new HttpMethod(method), - string.IsNullOrEmpty(operationItem.Description) ? operationItem.Summary : operationItem.Description, - CreateRestApiOperationParameters(operationItem.OperationId, operationItem.Parameters), - CreateRestApiOperationHeaders(operationItem.Parameters), - CreateRestApiOperationPayload(operationItem.OperationId, operationItem.RequestBody) - ); - - operations.Add(operation); - } - - return operations; - } - - /// - /// Creates REST API operation parameters. - /// - /// The operation id. - /// The OpenApi parameters. - /// The parameters. - private static List CreateRestApiOperationParameters(string operationId, IList parameters) - { - var result = new List(); - - foreach (var parameter in parameters) - { - if (parameter.In == null) - { - throw new SKException($"Parameter location of {parameter.Name} parameter of {operationId} operation is undefined."); - } - - if (parameter.Style == null) - { - throw new SKException($"Parameter style of {parameter.Name} parameter of {operationId} operation is undefined."); - } - - var restParameter = new RestApiOperationParameter( - parameter.Name, - parameter.Schema.Type, - parameter.Required, - parameter.Explode, - (RestApiOperationParameterLocation)Enum.Parse(typeof(RestApiOperationParameterLocation), parameter.In.ToString()), - (RestApiOperationParameterStyle)Enum.Parse(typeof(RestApiOperationParameterStyle), parameter.Style.ToString()), - parameter.Schema.Items?.Type, - GetParameterValue(parameter.Name, parameter.Schema.Default), - parameter.Description - ); - - result.Add(restParameter); - } - - return result; - } - - /// - /// Creates REST API operation headers. - /// - /// The OpenApi parameters - /// The headers. - private static Dictionary CreateRestApiOperationHeaders(IList parameters) - { - return parameters.Where(p => p.In == ParameterLocation.Header).ToDictionary(p => p.Name, p => string.Empty); - } - - /// - /// Creates REST API operation payload. - /// - /// The operation id. - /// The OpenApi request body. - /// The REST API operation payload. - private static RestApiOperationPayload? CreateRestApiOperationPayload(string operationId, OpenApiRequestBody requestBody) - { - if (requestBody?.Content == null) - { - return null; - } - - var mediaType = s_supportedMediaTypes.FirstOrDefault(smt => requestBody.Content.ContainsKey(smt)); - if (mediaType == null) - { - throw new SKException($"Neither of the media types of {operationId} is supported."); - } - - var mediaTypeMetadata = requestBody.Content[mediaType]; - - var payloadProperties = GetPayloadProperties(operationId, mediaTypeMetadata.Schema, mediaTypeMetadata.Schema?.Required ?? new HashSet()); - - return new RestApiOperationPayload(mediaType, payloadProperties, requestBody.Description); - } - - /// - /// Returns REST API operation payload properties. - /// - /// The operation id. - /// An OpenApi document schema representing request body properties. - /// List of required properties. - /// Current level in OpenApi schema. - /// The REST API operation payload properties. - private static List GetPayloadProperties(string operationId, OpenApiSchema? schema, ISet requiredProperties, - int level = 0) - { - if (schema == null) - { - return new List(); - } - - if (level > PayloadPropertiesHierarchyMaxDepth) - { - throw new SKException($"Max level {PayloadPropertiesHierarchyMaxDepth} of traversing payload properties of {operationId} operation is exceeded."); - } - - var result = new List(); - - foreach (var propertyPair in schema.Properties) - { - var propertyName = propertyPair.Key; - - var propertySchema = propertyPair.Value; - - var property = new RestApiOperationPayloadProperty( - propertyName, - propertySchema.Type, - requiredProperties.Contains(propertyName), - GetPayloadProperties(operationId, propertySchema, requiredProperties, level + 1), - propertySchema.Description); - - result.Add(property); - } - - return result; - } - - /// - /// Returns parameter value. - /// - /// The parameter name. - /// The value metadata. - /// The parameter value. - private static string? GetParameterValue(string name, IOpenApiAny valueMetadata) - { - if (valueMetadata is not IOpenApiPrimitive value) - { - return null; - } - - switch (value.PrimitiveType) - { - case PrimitiveType.Integer: - var intValue = (OpenApiInteger)value; - return intValue.Value.ToString(CultureInfo.InvariantCulture); - - case PrimitiveType.Long: - var longValue = (OpenApiLong)value; - return longValue.Value.ToString(CultureInfo.InvariantCulture); - - case PrimitiveType.Float: - var floatValue = (OpenApiFloat)value; - return floatValue.Value.ToString(CultureInfo.InvariantCulture); - - case PrimitiveType.Double: - var doubleValue = (OpenApiDouble)value; - return doubleValue.Value.ToString(CultureInfo.InvariantCulture); - - case PrimitiveType.String: - var stringValue = (OpenApiString)value; - return stringValue.Value.ToString(CultureInfo.InvariantCulture); - - case PrimitiveType.Byte: - var byteValue = (OpenApiByte)value; - return Convert.ToBase64String(byteValue.Value); - - case PrimitiveType.Binary: - var binaryValue = (OpenApiBinary)value; - return Encoding.UTF8.GetString(binaryValue.Value); - - case PrimitiveType.Boolean: - var boolValue = (OpenApiBoolean)value; - return boolValue.Value.ToString(CultureInfo.InvariantCulture); - - case PrimitiveType.Date: - var dateValue = (OpenApiDate)value; - return dateValue.Value.ToString("o").Substring(0, 10); - - case PrimitiveType.DateTime: - var dateTimeValue = (OpenApiDateTime)value; - return dateTimeValue.Value.ToString(CultureInfo.InvariantCulture); - - case PrimitiveType.Password: - var passwordValue = (OpenApiPassword)value; - return passwordValue.Value.ToString(CultureInfo.InvariantCulture); - - default: - throw new SKException($"The value type - {value.PrimitiveType} is not supported."); - } - } - - /// - /// Asserts the successful reading of OpenAPI document. - /// - /// The reading results to be checked. - /// Flag indicating whether to ignore non-compliant errors. - /// If set to true, the parser will not throw exceptions for non-compliant documents. - /// Please note that enabling this option may result in incomplete or inaccurate parsing results. - /// - private void AssertReadingSuccessful(ReadResult readResult, bool ignoreNonCompliantErrors) - { - if (readResult.OpenApiDiagnostic.Errors.Any()) - { - var message = $"Parsing of '{readResult.OpenApiDocument.Info?.Title}' OpenAPI document complete with the following errors: {string.Join(";", readResult.OpenApiDiagnostic.Errors)}"; - - this._logger.LogWarning("{Message}", message); - - if (!ignoreNonCompliantErrors) - { - throw new SKException(message); - } - } - } - - #endregion -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Plugins/PluginResourceNames.cs b/dotnet/src/Functions/Functions.OpenAPI/Plugins/PluginResourceNames.cs deleted file mode 100644 index d02c38675198..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/Plugins/PluginResourceNames.cs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Plugins; - -/// -/// Plugin resource names. -/// -public static class PluginResourceNames -{ - /// - /// Azure KeyVault plugin name. - /// - public const string AzureKeyVault = "AzureKeyVaultPlugin"; -} diff --git a/dotnet/src/Functions/Functions.OpenAPI/RestApiOperationRunner.cs b/dotnet/src/Functions/Functions.OpenAPI/RestApiOperationRunner.cs deleted file mode 100644 index 71acaaa0c00a..000000000000 --- a/dotnet/src/Functions/Functions.OpenAPI/RestApiOperationRunner.cs +++ /dev/null @@ -1,416 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Globalization; -using System.Linq; -using System.Net.Http; -using System.Text; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Microsoft.SemanticKernel.Functions.OpenAPI.Builders; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; - -namespace Microsoft.SemanticKernel.Functions.OpenAPI; - -/// -/// Runs REST API operation represented by RestApiOperation model class. -/// -internal sealed class RestApiOperationRunner -{ - private const string MediaTypeApplicationJson = "application/json"; - private const string MediaTypeTextPlain = "text/plain"; - - /// - /// List of payload builders/factories. - /// - private readonly Dictionary _payloadFactoryByMediaType; - - /// - /// A dictionary containing the content type as the key and the corresponding content serializer as the value. - /// - private static readonly Dictionary s_serializerByContentType = new() - { - { "image", async (content) => await content.ReadAsByteArrayAndTranslateExceptionAsync().ConfigureAwait(false) }, - { "text", async (content) => await content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false) }, - { "application/json", async (content) => await content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false)}, - { "application/xml", async (content) => await content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false)} - }; - - /// - /// An instance of the HttpClient class. - /// - private readonly HttpClient _httpClient; - - /// - /// Delegate for authorizing the HTTP request. - /// - private readonly AuthenticateRequestAsyncCallback _authCallback; - - /// - /// Request-header field containing information about the user agent originating the request - /// - private readonly string? _userAgent; - - /// - /// Determines whether the operation payload is constructed dynamically based on operation payload metadata. - /// If false, the operation payload must be provided via the 'payload' property. - /// - private readonly bool _enableDynamicPayload; - - /// - /// Determines whether payload parameters are resolved from the arguments by - /// full name (parameter name prefixed with the parent property name). - /// - private readonly bool _enablePayloadNamespacing; - - /// - /// Creates an instance of the class. - /// - /// An instance of the HttpClient class. - /// Optional callback for adding auth data to the API requests. - /// Optional request-header field containing information about the user agent originating the request. - /// Determines whether the operation payload is constructed dynamically based on operation payload metadata. - /// If false, the operation payload must be provided via the 'payload' property. - /// - /// Determines whether payload parameters are resolved from the arguments by - /// full name (parameter name prefixed with the parent property name). - public RestApiOperationRunner( - HttpClient httpClient, - AuthenticateRequestAsyncCallback? authCallback = null, - string? userAgent = null, - bool enableDynamicPayload = false, - bool enablePayloadNamespacing = false) - { - this._httpClient = httpClient; - this._userAgent = userAgent ?? Telemetry.HttpUserAgent; - this._enableDynamicPayload = enableDynamicPayload; - this._enablePayloadNamespacing = enablePayloadNamespacing; - - // If no auth callback provided, use empty function - if (authCallback is null) - { - this._authCallback = _ => Task.CompletedTask; - } - else - { - this._authCallback = authCallback; - } - - this._payloadFactoryByMediaType = new() - { - { MediaTypeApplicationJson, this.BuildJsonPayload }, - { MediaTypeTextPlain, this.BuildPlainTextPayload } - }; - } - - /// - /// Executes the specified asynchronously, using the provided . - /// - /// The REST API operation to execute. - /// The dictionary of arguments to be passed to the operation. - /// Options for REST API operation run. - /// The cancellation token. - /// The task execution result. - public Task RunAsync( - RestApiOperation operation, - IDictionary arguments, - RestApiOperationRunOptions? options = null, - CancellationToken cancellationToken = default) - { - var url = this.BuildsOperationUrl(operation, arguments, options?.ServerUrlOverride, options?.ApiHostUrl); - - var headers = operation.RenderHeaders(arguments); - - var payload = this.BuildOperationPayload(operation, arguments); - - return this.SendAsync(url, operation.Method, headers, payload, cancellationToken); - } - - #region private - - /// - /// Sends an HTTP request. - /// - /// The url to send request to. - /// The HTTP request method. - /// Headers to include into the HTTP request. - /// HTTP request payload. - /// The cancellation token. - /// Response content and content type - private async Task SendAsync( - Uri url, - HttpMethod method, - IDictionary? headers = null, - HttpContent? payload = null, - CancellationToken cancellationToken = default) - { - using var requestMessage = new HttpRequestMessage(method, url); - - await this._authCallback(requestMessage).ConfigureAwait(false); - - if (payload != null) - { - requestMessage.Content = payload; - } - - requestMessage.Headers.Add("User-Agent", !string.IsNullOrWhiteSpace(this._userAgent) - ? this._userAgent - : Telemetry.HttpUserAgent); - - if (headers != null) - { - foreach (var header in headers) - { - requestMessage.Headers.Add(header.Key, header.Value); - } - } - - using var responseMessage = await this._httpClient.SendWithSuccessCheckAsync(requestMessage, cancellationToken).ConfigureAwait(false); - - return await SerializeResponseContentAsync(responseMessage.Content).ConfigureAwait(false); - } - - /// - /// Serializes the response content of an HTTP request. - /// - /// The HttpContent object containing the response content to be serialized. - /// The serialized content. - private static async Task SerializeResponseContentAsync(HttpContent content) - { - var contentType = content.Headers.ContentType; - - var mediaType = contentType.MediaType; - - // Obtain the content serializer by media type (e.g., text/plain, application/json, image/jpg) - if (!s_serializerByContentType.TryGetValue(mediaType, out var serializer)) - { - // Split the media type into a primary-type and a sub-type - var mediaTypeParts = mediaType.Split('/'); - if (mediaTypeParts.Length != 2) - { - throw new SKException($"The string `{mediaType}` is not a valid media type."); - } - - var primaryMediaType = mediaTypeParts.First(); - - // Try to obtain the content serializer by the primary type (e.g., text, application, image) - if (!s_serializerByContentType.TryGetValue(primaryMediaType, out serializer)) - { - throw new SKException($"The content type `{mediaType}` is not supported."); - } - } - - // Serialize response content and return it - var serializedContent = await serializer.Invoke(content).ConfigureAwait(false); - - return new RestApiOperationResponse(serializedContent, contentType.ToString()); - } - - /// - /// Builds operation payload. - /// - /// The operation. - /// The payload arguments. - /// The HttpContent representing the payload. - private HttpContent? BuildOperationPayload(RestApiOperation operation, IDictionary arguments) - { - if (operation?.Method != HttpMethod.Put && operation?.Method != HttpMethod.Post) - { - return null; - } - - var mediaType = operation.Payload?.MediaType; - - // A try to resolve payload content type from the operation arguments if it's missing in the payload metadata. - if (string.IsNullOrEmpty(mediaType)) - { - if (!arguments.TryGetValue(RestApiOperation.ContentTypeArgumentName, out mediaType)) - { - throw new SKException($"No content type is provided for the {operation.Id} operation."); - } - } - - if (!this._payloadFactoryByMediaType.TryGetValue(mediaType!, out var payloadFactory)) - { - throw new SKException($"The media type {mediaType} of the {operation.Id} operation is not supported by {nameof(RestApiOperationRunner)}."); - } - - return payloadFactory.Invoke(operation.Payload, arguments); - } - - /// - /// Builds "application/json" payload. - /// - /// The payload meta-data. - /// The payload arguments. - /// The HttpContent representing the payload. - private HttpContent BuildJsonPayload(RestApiOperationPayload? payloadMetadata, IDictionary arguments) - { - //Build operation payload dynamically - if (this._enableDynamicPayload) - { - if (payloadMetadata == null) - { - throw new SKException("Payload can't be built dynamically due to the missing payload metadata."); - } - - var payload = this.BuildJsonObject(payloadMetadata.Properties, arguments); - - return new StringContent(payload.ToJsonString(), Encoding.UTF8, MediaTypeApplicationJson); - } - - //Get operation payload content from the 'payload' argument if dynamic payload building is not required. - if (!arguments.TryGetValue(RestApiOperation.PayloadArgumentName, out var content)) - { - throw new SKException($"No argument is found for the '{RestApiOperation.PayloadArgumentName}' payload content."); - } - - return new StringContent(content, Encoding.UTF8, MediaTypeApplicationJson); - } - - /// - /// Builds a JSON object from a list of RestAPI operation payload properties. - /// - /// The properties. - /// The arguments. - /// The namespace to add to the property name. - /// The JSON object. - private JsonObject BuildJsonObject(IList properties, IDictionary arguments, string? propertyNamespace = null) - { - var result = new JsonObject(); - - foreach (var propertyMetadata in properties) - { - var argumentName = this.GetArgumentNameForPayload(propertyMetadata.Name, propertyNamespace); - - if (propertyMetadata.Type == "object") - { - var node = this.BuildJsonObject(propertyMetadata.Properties, arguments, argumentName); - result.Add(propertyMetadata.Name, node); - continue; - } - - if (arguments.TryGetValue(argumentName, out var propertyValue)) - { - result.Add(propertyMetadata.Name, ConvertJsonPropertyValueType(propertyValue, propertyMetadata)); - continue; - } - - if (propertyMetadata.IsRequired) - { - throw new SKException($"No argument is found for the '{propertyMetadata.Name}' payload property."); - } - } - - return result; - } - - /// - /// Converts the JSON property value to the REST API type specified in metadata. - /// - /// The value of the property to be converted. - /// The metadata of the property. - /// A JsonNode representing the converted property value. - private static JsonNode? ConvertJsonPropertyValueType(string propertyValue, RestApiOperationPayloadProperty propertyMetadata) - { - switch (propertyMetadata.Type) - { - case "number": - { - if (long.TryParse(propertyValue, out var intValue)) - { - return JsonValue.Create(intValue); - } - - return JsonValue.Create(double.Parse(propertyValue, CultureInfo.InvariantCulture)); - } - - case "boolean": - { - return JsonValue.Create(bool.Parse(propertyValue)); - } - - case "integer": - { - return JsonValue.Create(int.Parse(propertyValue, CultureInfo.InvariantCulture)); - } - - case "array": - { - if (JsonArray.Parse(propertyValue) is JsonArray array) - { - return array; - } - - throw new SKException($"Can't convert OpenAPI property - {propertyMetadata.Name} value - {propertyValue} of 'array' type to JSON array."); - } - - case "string": - { - return JsonValue.Create(propertyValue); - } - - default: - { - throw new SKException($"Unexpected OpenAPI data type - {propertyMetadata.Type}"); - } - } - } - - /// - /// Builds "text/plain" payload. - /// - /// The payload meta-data. - /// The payload arguments. - /// The HttpContent representing the payload. - private HttpContent BuildPlainTextPayload(RestApiOperationPayload? payloadMetadata, IDictionary arguments) - { - if (!arguments.TryGetValue(RestApiOperation.PayloadArgumentName, out var propertyValue)) - { - throw new SKException($"No argument is found for the '{RestApiOperation.PayloadArgumentName}' payload content."); - } - - return new StringContent(propertyValue, Encoding.UTF8, MediaTypeTextPlain); - } - - /// - /// Retrieves the argument name for a payload property. - /// - /// The name of the property. - /// The namespace to add to the property name (optional). - /// The argument name for the payload property. - private string GetArgumentNameForPayload(string propertyName, string? propertyNamespace) - { - if (!this._enablePayloadNamespacing) - { - return propertyName; - } - - return string.IsNullOrEmpty(propertyNamespace) ? propertyName : $"{propertyNamespace}.{propertyName}"; - } - - /// - /// Builds operation Url. - /// - /// The REST API operation. - /// The operation arguments. - /// Override for REST API operation server url. - /// The URL of REST API host. - /// The operation Url. - private Uri BuildsOperationUrl(RestApiOperation operation, IDictionary arguments, Uri? serverUrlOverride = null, Uri? apiHostUrl = null) - { - var url = operation.BuildOperationUrl(arguments, serverUrlOverride, apiHostUrl); - - var urlBuilder = new UriBuilder(url); - - urlBuilder.Query = operation.BuildQueryString(arguments); - - return urlBuilder.Uri; - } - - #endregion -} diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Functions.OpenApi.Extensions.csproj b/dotnet/src/Functions/Functions.OpenApi.Extensions/Functions.OpenApi.Extensions.csproj new file mode 100644 index 000000000000..2a87acb8719d --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Functions.OpenApi.Extensions.csproj @@ -0,0 +1,20 @@ + + + + Microsoft.SemanticKernel.Plugins.OpenApi.Extensions + $(AssemblyName) + netstandard2.0 + alpha + + + + + + Semantic Kernel - OpenAPI Plugin Extensions + Semantic Kernel OpenAPI Plugin Extensions + false + + + + + \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.OpenApi/AssemblyInfo.cs b/dotnet/src/Functions/Functions.OpenApi/AssemblyInfo.cs new file mode 100644 index 000000000000..6448fb951d15 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0042")] diff --git a/dotnet/src/Functions/Functions.OpenApi/Authentication/AuthenticateRequestAsyncCallback.cs b/dotnet/src/Functions/Functions.OpenApi/Authentication/AuthenticateRequestAsyncCallback.cs new file mode 100644 index 000000000000..d39277691f8d --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Authentication/AuthenticateRequestAsyncCallback.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Represents a delegate that defines the method signature for asynchronously authenticating an HTTP request. +/// +/// The to authenticate. +/// The cancellation token. +/// A representing the asynchronous operation. +public delegate Task AuthenticateRequestAsyncCallback(HttpRequestMessage request, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Functions/Functions.OpenApi/DocumentLoader.cs b/dotnet/src/Functions/Functions.OpenApi/DocumentLoader.cs new file mode 100644 index 000000000000..80f8b54b8780 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/DocumentLoader.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +internal static class DocumentLoader +{ + internal static async Task LoadDocumentFromUriAsync( + Uri uri, + ILogger logger, + HttpClient httpClient, + AuthenticateRequestAsyncCallback? authCallback, + string? userAgent, + CancellationToken cancellationToken) + { + using var request = new HttpRequestMessage(HttpMethod.Get, uri.ToString()); + request.Headers.UserAgent.Add(ProductInfoHeaderValue.Parse(userAgent ?? HttpHeaderValues.UserAgent)); + + if (authCallback is not null) + { + await authCallback(request, cancellationToken).ConfigureAwait(false); + } + + logger.LogTrace("Importing document from {0}", uri); + + using var response = await httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); + return await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + } + + internal static async Task LoadDocumentFromFilePathAsync( + string filePath, + ILogger logger, + CancellationToken cancellationToken) + { + var pluginJson = string.Empty; + + if (!File.Exists(filePath)) + { + throw new FileNotFoundException($"Invalid URI. The specified path '{filePath}' does not exist."); + } + + logger.LogTrace("Importing document from {0}", filePath); + + using (var sr = File.OpenText(filePath)) + { + return await sr.ReadToEndAsync().ConfigureAwait(false); // must await here to avoid stream reader being disposed before the string is read + } + } + + internal static async Task LoadDocumentFromStreamAsync(Stream stream) + { + using StreamReader reader = new(stream); + return await reader.ReadToEndAsync().ConfigureAwait(false); + } +} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Extensions/OpenApiFunctionExecutionParameters.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs similarity index 84% rename from dotnet/src/Functions/Functions.OpenAPI/Extensions/OpenApiFunctionExecutionParameters.cs rename to dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs index 5d7a045b1998..3b1d600f8025 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Extensions/OpenApiFunctionExecutionParameters.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiFunctionExecutionParameters.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Net.Http; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; +using Microsoft.SemanticKernel.Http; -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// OpenAPI function execution parameters. @@ -54,6 +54,11 @@ public class OpenApiFunctionExecutionParameters /// public bool EnablePayloadNamespacing { get; set; } + /// + /// Optional list of HTTP operations to skip when importing the OpenAPI document. + /// + public IList OperationsToExclude { get; set; } + /// /// Initializes a new instance of the class. /// @@ -68,21 +73,24 @@ public class OpenApiFunctionExecutionParameters /// If false, the operation payload must be provided via the 'payload' context variable. /// Determines whether payload parameter names are augmented with namespaces. /// Namespaces prevent naming conflicts by adding the parent parameter name as a prefix, separated by dots. + /// Optional list of operations not to import, e.g. in case they are not supported public OpenApiFunctionExecutionParameters( HttpClient? httpClient = null, AuthenticateRequestAsyncCallback? authCallback = null, Uri? serverUrlOverride = null, - string userAgent = Telemetry.HttpUserAgent, + string? userAgent = null, bool ignoreNonCompliantErrors = false, - bool enableDynamicOperationPayload = false, - bool enablePayloadNamespacing = false) + bool enableDynamicOperationPayload = true, + bool enablePayloadNamespacing = false, + IList? operationsToExclude = null) { this.HttpClient = httpClient; this.AuthCallback = authCallback; this.ServerUrlOverride = serverUrlOverride; - this.UserAgent = userAgent; + this.UserAgent = userAgent ?? HttpHeaderValues.UserAgent; this.IgnoreNonCompliantErrors = ignoreNonCompliantErrors; this.EnableDynamicPayload = enableDynamicOperationPayload; this.EnablePayloadNamespacing = enablePayloadNamespacing; + this.OperationsToExclude = operationsToExclude ?? new List(); } } diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs new file mode 100644 index 000000000000..4b574df4f45e --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs @@ -0,0 +1,388 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Provides extension methods for importing plugins exposed as OpenAPI v3 endpoints. +/// +public static class OpenApiKernelExtensions +{ + // TODO: Revise XML comments + + /// + /// Creates a plugin from an OpenAPI v3 endpoint and adds it to the kernel's plugins collection. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// The file path to the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task ImportPluginFromOpenApiAsync( + this Kernel kernel, + string pluginName, + string filePath, + OpenApiFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + KernelPlugin plugin = await kernel.CreatePluginFromOpenApiAsync(pluginName, filePath, executionParameters, cancellationToken).ConfigureAwait(false); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Creates a plugin from an OpenAPI v3 endpoint and adds it to the kernel's plugins collection. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A local or remote URI referencing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task ImportPluginFromOpenApiAsync( + this Kernel kernel, + string pluginName, + Uri uri, + OpenApiFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + KernelPlugin plugin = await kernel.CreatePluginFromOpenApiAsync(pluginName, uri, executionParameters, cancellationToken).ConfigureAwait(false); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Creates a plugin from an OpenAPI v3 endpoint and adds it to the kernel's plugins collection. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A stream representing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task ImportPluginFromOpenApiAsync( + this Kernel kernel, + string pluginName, + Stream stream, + OpenApiFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + KernelPlugin plugin = await kernel.CreatePluginFromOpenApiAsync(pluginName, stream, executionParameters, cancellationToken).ConfigureAwait(false); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Creates a plugin from an OpenAPI v3 endpoint. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// The file path to the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task CreatePluginFromOpenApiAsync( + this Kernel kernel, + string pluginName, + string filePath, + OpenApiFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + Verify.ValidPluginName(pluginName, kernel.Plugins); + +#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. + var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); +#pragma warning restore CA2000 + + var openApiSpec = await DocumentLoader.LoadDocumentFromFilePathAsync( + filePath, + kernel.LoggerFactory.CreateLogger(typeof(OpenApiKernelExtensions)) ?? NullLogger.Instance, + cancellationToken).ConfigureAwait(false); + + return await CreateOpenApiPluginAsync( + kernel, + pluginName, + executionParameters, + httpClient, + openApiSpec, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates a plugin from an OpenAPI v3 endpoint. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A local or remote URI referencing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task CreatePluginFromOpenApiAsync( + this Kernel kernel, + string pluginName, + Uri uri, + OpenApiFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + Verify.ValidPluginName(pluginName, kernel.Plugins); + +#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. + var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); +#pragma warning restore CA2000 + + var openApiSpec = await DocumentLoader.LoadDocumentFromUriAsync( + uri, + kernel.LoggerFactory.CreateLogger(typeof(OpenApiKernelExtensions)) ?? NullLogger.Instance, + httpClient, + executionParameters?.AuthCallback, + executionParameters?.UserAgent, + cancellationToken).ConfigureAwait(false); + + return await CreateOpenApiPluginAsync( + kernel, + pluginName, + executionParameters, + httpClient, + openApiSpec, + uri, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates a plugin from an OpenAPI v3 endpoint. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A stream representing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task CreatePluginFromOpenApiAsync( + this Kernel kernel, + string pluginName, + Stream stream, + OpenApiFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + Verify.ValidPluginName(pluginName, kernel.Plugins); + +#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. + var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); +#pragma warning restore CA2000 + + var openApiSpec = await DocumentLoader.LoadDocumentFromStreamAsync(stream).ConfigureAwait(false); + + return await CreateOpenApiPluginAsync( + kernel, + pluginName, + executionParameters, + httpClient, + openApiSpec, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + #region private + + private static async Task CreateOpenApiPluginAsync( + Kernel kernel, + string pluginName, + OpenApiFunctionExecutionParameters? executionParameters, + HttpClient httpClient, + string pluginJson, + Uri? documentUri = null, + CancellationToken cancellationToken = default) + { + using var documentStream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(pluginJson)); + + ILoggerFactory loggerFactory = kernel.LoggerFactory; + + var parser = new OpenApiDocumentParser(loggerFactory); + + var operations = await parser.ParseAsync( + documentStream, + executionParameters?.IgnoreNonCompliantErrors ?? false, + executionParameters?.OperationsToExclude, + cancellationToken).ConfigureAwait(false); + + var runner = new RestApiOperationRunner( + httpClient, + executionParameters?.AuthCallback, + executionParameters?.UserAgent, + executionParameters?.EnableDynamicPayload ?? true, + executionParameters?.EnablePayloadNamespacing ?? false); + + var functions = new List(); + ILogger logger = loggerFactory.CreateLogger(typeof(OpenApiKernelExtensions)) ?? NullLogger.Instance; + foreach (var operation in operations) + { + try + { + logger.LogTrace("Registering Rest function {0}.{1}", pluginName, operation.Id); + functions.Add(CreateRestApiFunction(pluginName, runner, operation, executionParameters, documentUri, loggerFactory)); + } + catch (Exception ex) when (!ex.IsCriticalException()) + { + //Logging the exception and keep registering other Rest functions + logger.LogWarning(ex, "Something went wrong while rendering the Rest function. Function: {0}.{1}. Error: {2}", + pluginName, operation.Id, ex.Message); + } + } + + return KernelPluginFactory.CreateFromFunctions(pluginName, null, functions); + } + + /// + /// Registers KernelFunctionFactory for a REST API operation. + /// + /// Plugin name. + /// The REST API operation runner. + /// The REST API operation. + /// Function execution parameters. + /// The URI of OpenAPI document. + /// The logger factory. + /// An instance of class. + private static KernelFunction CreateRestApiFunction( + string pluginName, + RestApiOperationRunner runner, + RestApiOperation operation, + OpenApiFunctionExecutionParameters? executionParameters, + Uri? documentUri = null, + ILoggerFactory? loggerFactory = null) + { + IReadOnlyList restOperationParameters = operation.GetParameters( + executionParameters?.EnableDynamicPayload ?? true, + executionParameters?.EnablePayloadNamespacing ?? false + ); + + var logger = loggerFactory?.CreateLogger(typeof(OpenApiKernelExtensions)) ?? NullLogger.Instance; + + async Task ExecuteAsync(KernelArguments variables, CancellationToken cancellationToken) + { + try + { + // Extract function arguments from context + var arguments = new KernelArguments(); + foreach (var parameter in restOperationParameters) + { + // A try to resolve argument by alternative parameter name + if (!string.IsNullOrEmpty(parameter.AlternativeName) && + variables.TryGetValue(parameter.AlternativeName!, out object? value) && + value is not null) + { + arguments.Add(parameter.Name, value); + continue; + } + + // A try to resolve argument by original parameter name + if (variables.TryGetValue(parameter.Name, out value) && + value is not null) + { + arguments.Add(parameter.Name, value); + continue; + } + + if (parameter.IsRequired) + { + throw new KeyNotFoundException( + $"No variable found in context to use as an argument for the '{parameter.Name}' parameter of the '{pluginName}.{operation.Id}' Rest function."); + } + } + + var options = new RestApiOperationRunOptions + { + ServerUrlOverride = executionParameters?.ServerUrlOverride, + ApiHostUrl = documentUri is not null ? new Uri(documentUri.GetLeftPart(UriPartial.Authority)) : null + }; + + return await runner.RunAsync(operation, arguments, options, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!ex.IsCriticalException()) + { + logger!.LogError(ex, "RestAPI function {Plugin}.{Name} execution failed with error {Error}", pluginName, operation.Id, ex.Message); + throw; + } + } + + var parameters = restOperationParameters + .Select(p => new KernelParameterMetadata(p.AlternativeName ?? p.Name) + { + Description = $"{p.Description ?? p.Name}", + DefaultValue = p.DefaultValue ?? string.Empty, + IsRequired = p.IsRequired, + ParameterType = p.Type switch { "string" => typeof(string), "boolean" => typeof(bool), _ => null }, + Schema = p.Schema ?? (p.Type is null ? null : KernelJsonSchema.Parse($"{{\"type\":\"{p.Type}\"}}")), + }) + .ToList(); + + var returnParameter = operation.GetDefaultReturnParameter(); + + return KernelFunctionFactory.CreateFromMethod( + method: ExecuteAsync, + parameters: parameters, + returnParameter: returnParameter, + description: operation.Description, + functionName: ConvertOperationIdToValidFunctionName(operation.Id, logger), + loggerFactory: loggerFactory); + } + + /// + /// Converts operation id to valid SK Function name. + /// A function name can contain only ASCII letters, digits, and underscores. + /// + /// The operation id. + /// The logger. + /// Valid SK Function name. + private static string ConvertOperationIdToValidFunctionName(string operationId, ILogger logger) + { + try + { + Verify.ValidFunctionName(operationId); + return operationId; + } + catch (ArgumentException) + { + // The exception indicates that the operationId is not a valid function name. + // To comply with the SK Function name requirements, it needs to be converted or sanitized. + // Therefore, it should not be re-thrown, but rather swallowed to allow the conversion below. + } + + // Tokenize operation id on forward and back slashes + string[] tokens = operationId.Split('/', '\\'); + string result = string.Empty; + + foreach (string token in tokens) + { + // Removes all characters that are not ASCII letters, digits, and underscores. + string formattedToken = s_removeInvalidCharsRegex.Replace(token, ""); + result += CultureInfo.CurrentCulture.TextInfo.ToTitleCase(formattedToken.ToLower(CultureInfo.CurrentCulture)); + } + + logger.LogInformation("Operation name \"{0}\" converted to \"{1}\" to comply with SK Function name requirements. Use \"{2}\" when invoking function.", operationId, result, result); + + return result; + } + + /// + /// Used to convert operationId to SK function names. + /// + private static readonly Regex s_removeInvalidCharsRegex = new("[^0-9A-Za-z_]"); + + #endregion +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiSchemaExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiSchemaExtensions.cs new file mode 100644 index 000000000000..f9e0d83feb35 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiSchemaExtensions.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Globalization; +using System.IO; +using System.Text; +using Microsoft.OpenApi.Models; +using Microsoft.OpenApi.Writers; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +internal static class OpenApiSchemaExtensions +{ + /// + /// Gets a JSON serialized representation of an + /// + /// The schema. + /// An instance of that contains the JSON Schema. + internal static KernelJsonSchema ToJsonSchema(this OpenApiSchema schema) + { + var schemaBuilder = new StringBuilder(); + var jsonWriter = new OpenApiJsonWriter(new StringWriter(schemaBuilder, CultureInfo.InvariantCulture)); + jsonWriter.Settings.InlineLocalReferences = true; + schema.SerializeAsV3(jsonWriter); + return KernelJsonSchema.Parse(schemaBuilder.ToString()); + } +} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Extensions/RestApiOperationExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationExtensions.cs similarity index 75% rename from dotnet/src/Functions/Functions.OpenAPI/Extensions/RestApiOperationExtensions.cs rename to dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationExtensions.cs index b69dc88d8b00..86786c08b8a8 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Extensions/RestApiOperationExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationExtensions.cs @@ -1,16 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Text.RegularExpressions; -using Microsoft.SemanticKernel.Diagnostics; -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Class for extensions methods for the class. @@ -21,7 +16,6 @@ internal static class RestApiOperationExtensions /// Returns list of REST API operation parameters. /// /// The REST API operation. - /// The server URL override. /// Determines whether to include the operation payload parameters from payload metadata. /// If false, the 'payload' and 'content-type' artificial parameters are added instead. /// @@ -31,37 +25,15 @@ internal static class RestApiOperationExtensions /// would be resolved from the same 'email' argument, which is incorrect. However, by employing namespaces, /// the parameters 'sender.email' and 'receiver.mail' will be correctly resolved from arguments with the same names. /// - /// The URI of OpenApi document. /// The list of parameters. public static IReadOnlyList GetParameters( this RestApiOperation operation, - Uri? serverUrlOverride = null, - bool addPayloadParamsFromMetadata = false, - bool enablePayloadNamespacing = false, - Uri? documentUri = null) + bool addPayloadParamsFromMetadata = true, + bool enablePayloadNamespacing = false) { - string? serverUrlString = null; - Uri? serverUrl = serverUrlOverride ?? operation.ServerUrl ?? documentUri; + var parameters = new List(operation.Parameters); - if (serverUrl is not null) - { - serverUrlString = $"{serverUrl.GetLeftPart(UriPartial.Authority)}/"; - } - - var parameters = new List(operation.Parameters) - { - // Register the "server-url" parameter if override is provided - new RestApiOperationParameter( - name: RestApiOperation.ServerUrlArgumentName, - type: "string", - isRequired: false, - expand: false, - RestApiOperationParameterLocation.Path, - RestApiOperationParameterStyle.Simple, - defaultValue: serverUrlString) - }; - - //Add payload parameters + // Add payload parameters if (operation.Method == HttpMethod.Put || operation.Method == HttpMethod.Post) { parameters.AddRange(GetPayloadParameters(operation, addPayloadParamsFromMetadata, enablePayloadNamespacing)); @@ -76,6 +48,42 @@ public static IReadOnlyList GetParameters( return parameters; } + /// + /// Returns the default return parameter metadata for a given REST API operation. + /// + /// The REST API operation object with Responses to parse. + /// A list of preferred response codes to use when selecting the default response. + /// The default return parameter metadata, if any. + public static KernelReturnParameterMetadata? GetDefaultReturnParameter(this RestApiOperation operation, string[]? preferredResponses = null) + { + RestApiOperationExpectedResponse? restOperationResponse = GetDefaultResponse(operation.Responses, preferredResponses ??= s_preferredResponses); + + var returnParameter = + restOperationResponse is not null ? new KernelReturnParameterMetadata { Description = restOperationResponse.Description, Schema = restOperationResponse.Schema } : null; + + return returnParameter; + } + + /// + /// Retrieves the default response for a given REST API operation. + /// + /// The REST API operation responses to parse. + /// The preferred response codes to use when selecting the default response. + /// The default response, if any. + private static RestApiOperationExpectedResponse? GetDefaultResponse(IDictionary responses, string[] preferredResponses) + { + foreach (var code in preferredResponses) + { + if (responses.TryGetValue(code, out var response)) + { + return response; + } + } + + // If no appropriate response is found, return null or throw an exception + return null; + } + /// /// Retrieves the payload parameters for a given REST API operation. /// @@ -90,7 +98,7 @@ private static List GetPayloadParameters(RestApiOpera { if (operation.Payload is null) { - throw new SKException($"Payload parameters cannot be retrieved from the '{operation.Id}' operation payload metadata because it is missing."); + throw new KernelException($"Payload parameters cannot be retrieved from the '{operation.Id}' operation payload metadata because it is missing."); } // The 'text/plain' content type payload metadata does not contain parameter names. @@ -141,7 +149,8 @@ private static RestApiOperationParameter CreatePayloadArtificialParameter(RestAp expand: false, RestApiOperationParameterLocation.Body, RestApiOperationParameterStyle.Simple, - description: operation.Payload?.Description ?? "REST API request body."); + description: operation.Payload?.Description ?? "REST API request body.", + schema: operation.Payload?.Schema); } /// @@ -170,7 +179,9 @@ private static List GetParametersFromPayloadMetadata( expand: false, RestApiOperationParameterLocation.Body, RestApiOperationParameterStyle.Simple, - description: property.Description)); + defaultValue: property.DefaultValue, + description: property.Description, + schema: property.Schema)); } parameters.AddRange(GetParametersFromPayloadMetadata(property.Properties, enableNamespacing, parameterName)); @@ -198,4 +209,5 @@ private static string GetPropertyName(RestApiOperationPayloadProperty property, private const string MediaTypeTextPlain = "text/plain"; private static readonly Regex s_invalidSymbolsRegex = new("[^0-9A-Za-z_]+"); + private static readonly string[] s_preferredResponses = new string[] { "200", "201", "202", "203", "204", "205", "206", "207", "208", "226", "2XX", "default" }; } diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationResponseExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationResponseExtensions.cs new file mode 100644 index 000000000000..fbbc68bba4ab --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationResponseExtensions.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Json.Schema; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Class for extensions methods for the class. +/// +public static class RestApiOperationResponseExtensions +{ + /// + /// Validates the response content against the schema. + /// + /// True if the response is valid, false otherwise. + /// + /// If the schema is not specified, the response is considered valid. + /// If the content type is not specified, the response is considered valid. + /// If the content type is not supported, the response is considered valid. + /// Right now, only JSON is supported. + /// + public static bool IsValid(this RestApiOperationResponse response) + { + if (response.ExpectedSchema is null) + { + return true; + } + + if (string.IsNullOrEmpty(response.ContentType)) + { + return true; + } + + return response.ContentType switch + { + "application/json" => ValidateJson(response), + "application/xml" => ValidateXml(response), + "text/plain" or "text/html" => ValidateTextHtml(response), + _ => true, + }; + } + + private static bool ValidateJson(RestApiOperationResponse response) + { + try + { + var jsonSchema = JsonSchema.FromText(JsonSerializer.Serialize(response.ExpectedSchema)); + using var contentDoc = JsonDocument.Parse(response.Content.ToString()); + var result = jsonSchema.Evaluate(contentDoc); + return result.IsValid; + } + catch (JsonException) + { + return false; + } + } + + private static bool ValidateXml(RestApiOperationResponse response) + { + // todo -- implement + return true; + } + + private static bool ValidateTextHtml(RestApiOperationResponse response) + { + try + { + var jsonSchema = JsonSchema.FromText(JsonSerializer.Serialize(response.ExpectedSchema)); + using var contentDoc = JsonDocument.Parse($"\"{response.Content}\""); + var result = jsonSchema.Evaluate(contentDoc); + return result.IsValid; + } + catch (JsonException) + { + return false; + } + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj new file mode 100644 index 000000000000..e29c17e45bfd --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Functions.OpenApi.csproj @@ -0,0 +1,28 @@ + + + + Microsoft.SemanticKernel.Plugins.OpenApi + $(AssemblyName) + netstandard2.0 + alpha + + + + + + Semantic Kernel - OpenAPI Plugins + Semantic Kernel OpenAPI Plugins + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.OpenApi/HttpContentFactory.cs b/dotnet/src/Functions/Functions.OpenApi/HttpContentFactory.cs new file mode 100644 index 000000000000..11e9075cc266 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/HttpContentFactory.cs @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Represents a delegate for creating HTTP content for a REST API operation. +/// +/// The operation payload metadata. +/// The operation arguments. +/// The HTTP content representing the operation payload. +internal delegate HttpContent HttpContentFactory(RestApiOperationPayload? payload, IDictionary arguments); diff --git a/dotnet/src/Functions/Functions.OpenAPI/HttpResponseContentSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentSerializer.cs similarity index 88% rename from dotnet/src/Functions/Functions.OpenAPI/HttpResponseContentSerializer.cs rename to dotnet/src/Functions/Functions.OpenApi/HttpResponseContentSerializer.cs index 2ea3c5f49ecd..8af1aae2e043 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/HttpResponseContentSerializer.cs +++ b/dotnet/src/Functions/Functions.OpenApi/HttpResponseContentSerializer.cs @@ -3,7 +3,7 @@ using System.Net.Http; using System.Threading.Tasks; -namespace Microsoft.SemanticKernel.Functions.OpenAPI; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Represents a delegate for serializing REST API operation response content. diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs new file mode 100644 index 000000000000..eb637b86f1ab --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs @@ -0,0 +1,277 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Text.Json.Nodes; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// The REST API operation. +/// +public sealed class RestApiOperation +{ + /// + /// Gets the name of an artificial parameter to be used for operation having "text/plain" payload media type. + /// + public static string PayloadArgumentName => "payload"; + + /// + /// Gets the name of an artificial parameter to be used for indicate payload media-type if it's missing in payload metadata. + /// + public static string ContentTypeArgumentName => "content-type"; + + /// + /// The operation identifier. + /// + public string Id { get; } + + /// + /// The operation description. + /// + public string Description { get; } + + /// + /// The operation path. + /// + public string Path { get; } + + /// + /// The operation method - GET, POST, PUT, DELETE. + /// + public HttpMethod Method { get; } + + /// + /// The server URL. + /// + public Uri? ServerUrl { get; } + + /// + /// The operation parameters. + /// + public IList Parameters { get; } + + /// + /// The list of possible operation responses. + /// + public IDictionary Responses { get; } + + /// + /// The operation payload. + /// + public RestApiOperationPayload? Payload { get; } + + /// + /// Creates an instance of a class. + /// + /// The operation identifier. + /// The server URL. + /// The operation path. + /// The operation method. + /// The operation description. + /// The operation parameters. + /// The operation payload. + /// The operation responses. + public RestApiOperation( + string id, + Uri? serverUrl, + string path, + HttpMethod method, + string description, + IList parameters, + RestApiOperationPayload? payload = null, + IDictionary? responses = null) + { + this.Id = id; + this.ServerUrl = serverUrl; + this.Path = path; + this.Method = method; + this.Description = description; + this.Parameters = parameters; + this.Payload = payload; + this.Responses = responses ?? new Dictionary(); + } + + /// + /// Builds operation Url. + /// + /// The operation arguments. + /// Override for REST API operation server url. + /// The URL of REST API host. + /// The operation Url. + public Uri BuildOperationUrl(IDictionary arguments, Uri? serverUrlOverride = null, Uri? apiHostUrl = null) + { + var serverUrl = this.GetServerUrl(serverUrlOverride, apiHostUrl); + + var path = this.BuildPath(this.Path, arguments); + + return new Uri(serverUrl, $"{path.TrimStart('/')}"); + } + + /// + /// Builds operation request headers. + /// + /// The operation arguments. + /// The request headers. + public IDictionary BuildHeaders(IDictionary arguments) + { + var headers = new Dictionary(); + + var parameters = this.Parameters.Where(p => p.Location == RestApiOperationParameterLocation.Header); + + foreach (var parameter in parameters) + { + if (!arguments.TryGetValue(parameter.Name, out object? argument) || argument is null) + { + // Throw an exception if the parameter is a required one but no value is provided. + if (parameter.IsRequired) + { + throw new KernelException($"No argument is provided for the '{parameter.Name}' required parameter of the operation - '{this.Id}'."); + } + + // Skipping not required parameter if no argument provided for it. + continue; + } + + var parameterStyle = parameter.Style ?? RestApiOperationParameterStyle.Simple; + + if (!s_parameterSerializers.TryGetValue(parameterStyle, out var serializer)) + { + throw new KernelException($"The headers parameter '{parameterStyle}' serialization style is not supported."); + } + + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); + + //Serializing the parameter and adding it to the headers. + headers.Add(parameter.Name, serializer.Invoke(parameter, node)); + } + + return headers; + } + + /// + /// Builds the operation query string. + /// + /// The operation arguments. + /// The query string. + public string BuildQueryString(IDictionary arguments) + { + var segments = new List(); + + var parameters = this.Parameters.Where(p => p.Location == RestApiOperationParameterLocation.Query); + + foreach (var parameter in parameters) + { + if (!arguments.TryGetValue(parameter.Name, out object? argument) || argument is null) + { + // Throw an exception if the parameter is a required one but no value is provided. + if (parameter.IsRequired) + { + throw new KernelException($"No argument or value is provided for the '{parameter.Name}' required parameter of the operation - '{this.Id}'."); + } + + // Skipping not required parameter if no argument provided for it. + continue; + } + + var parameterStyle = parameter.Style ?? RestApiOperationParameterStyle.Form; + + if (!s_parameterSerializers.TryGetValue(parameterStyle, out var serializer)) + { + throw new KernelException($"The query string parameter '{parameterStyle}' serialization style is not supported."); + } + + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); + + // Serializing the parameter and adding it to the query string if there's an argument for it. + segments.Add(serializer.Invoke(parameter, node)); + } + + return string.Join("&", segments); + } + + #region private + + /// + /// Builds operation path. + /// + /// The original path template. + /// The operation arguments. + /// The path. + private string BuildPath(string pathTemplate, IDictionary arguments) + { + var parameters = this.Parameters.Where(p => p.Location == RestApiOperationParameterLocation.Path); + + foreach (var parameter in parameters) + { + if (!arguments.TryGetValue(parameter.Name, out object? argument) || argument is null) + { + // Throw an exception if the parameter is a required one but no value is provided. + if (parameter.IsRequired) + { + throw new KernelException($"No argument is provided for the '{parameter.Name}' required parameter of the operation - '{this.Id}'."); + } + + // Skipping not required parameter if no argument provided for it. + continue; + } + + var parameterStyle = parameter.Style ?? RestApiOperationParameterStyle.Simple; + + if (!s_parameterSerializers.TryGetValue(parameterStyle, out var serializer)) + { + throw new KernelException($"The path parameter '{parameterStyle}' serialization style is not supported."); + } + + var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument); + + // Serializing the parameter and adding it to the path. + pathTemplate = pathTemplate.Replace($"{{{parameter.Name}}}", node.ToString().Trim('"')); + } + + return pathTemplate; + } + + /// + /// Returns operation server Url. + /// + /// Override for REST API operation server url. + /// The URL of REST API host. + /// The operation server url. + private Uri GetServerUrl(Uri? serverUrlOverride, Uri? apiHostUrl) + { + string serverUrlString; + + if (serverUrlOverride is not null) + { + serverUrlString = serverUrlOverride.AbsoluteUri; + } + else + { + serverUrlString = + this.ServerUrl?.AbsoluteUri ?? + apiHostUrl?.AbsoluteUri ?? + throw new InvalidOperationException($"Server url is not defined for operation {this.Id}"); + } + + // Make sure base url ends with trailing slash + if (!serverUrlString.EndsWith("/", StringComparison.OrdinalIgnoreCase)) + { + serverUrlString += "/"; + } + + return new Uri(serverUrlString); + } + + private static readonly Dictionary> s_parameterSerializers = new() + { + { RestApiOperationParameterStyle.Simple, SimpleStyleParameterSerializer.Serialize }, + { RestApiOperationParameterStyle.Form, FormStyleParameterSerializer.Serialize }, + { RestApiOperationParameterStyle.SpaceDelimited, SpaceDelimitedStyleParameterSerializer.Serialize }, + { RestApiOperationParameterStyle.PipeDelimited, PipeDelimitedStyleParameterSerializer.Serialize } + }; + + # endregion +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationExpectedResponse.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationExpectedResponse.cs new file mode 100644 index 000000000000..5a343638f7c6 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationExpectedResponse.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// The REST API operation response. +/// +public sealed class RestApiOperationExpectedResponse +{ + /// + /// Gets the description of the response. + /// + public string Description { get; } + + /// + /// Gets the media type of the response. + /// + public string MediaType { get; } + + /// + /// The schema of the response. + /// + public KernelJsonSchema? Schema { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// The description of the response. + /// The media type of the response. + /// The schema against which the response body should be validated. + public RestApiOperationExpectedResponse(string description, string mediaType, KernelJsonSchema? schema = null) + { + this.Description = description; + this.MediaType = mediaType; + this.Schema = schema; + } +} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameter.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameter.cs similarity index 87% rename from dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameter.cs rename to dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameter.cs index ec10244971cf..c6d8f3f1c8a0 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameter.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameter.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// The REST API operation parameter. @@ -50,13 +50,18 @@ public sealed class RestApiOperationParameter /// /// The default value. /// - public string? DefaultValue { get; } + public object? DefaultValue { get; } /// /// Specifies whether arrays and objects should generate separate parameters for each array item or object property. /// public bool Expand { get; } + /// + /// The schema of the parameter. + /// + public KernelJsonSchema? Schema { get; } + /// /// Creates an instance of a class. /// @@ -69,6 +74,7 @@ public sealed class RestApiOperationParameter /// Type of array item for parameters of "array" type. /// The parameter default value. /// The parameter description. + /// The parameter schema. public RestApiOperationParameter( string name, string type, @@ -77,8 +83,9 @@ public RestApiOperationParameter( RestApiOperationParameterLocation location, RestApiOperationParameterStyle? style = null, string? arrayItemType = null, - string? defaultValue = null, - string? description = null) + object? defaultValue = null, + string? description = null, + KernelJsonSchema? schema = null) { this.Name = name; this.Type = type; @@ -89,5 +96,6 @@ public RestApiOperationParameter( this.ArrayItemType = arrayItemType; this.DefaultValue = defaultValue; this.Description = description; + this.Schema = schema; } } diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameterLocation.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameterLocation.cs similarity index 90% rename from dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameterLocation.cs rename to dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameterLocation.cs index 47bc58ee7035..cad2155303eb 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameterLocation.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameterLocation.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// The REST API operation parameter location. diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameterStyle.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameterStyle.cs similarity index 93% rename from dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameterStyle.cs rename to dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameterStyle.cs index fd223154c0e1..22f91956ab34 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationParameterStyle.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationParameterStyle.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// The REST API operation parameter style. diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationPayload.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationPayload.cs similarity index 76% rename from dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationPayload.cs rename to dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationPayload.cs index db19565be333..e30f2bf1a1e9 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationPayload.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationPayload.cs @@ -2,12 +2,12 @@ using System.Collections.Generic; -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// The REST API operation payload. /// -public record RestApiOperationPayload +public class RestApiOperationPayload { /// /// The payload MediaType. @@ -24,16 +24,23 @@ public record RestApiOperationPayload /// public IList Properties { get; } + /// + /// The schema of the parameter. + /// + public KernelJsonSchema? Schema { get; } + /// /// Creates an instance of a class. /// /// The media type. /// The properties. /// The description. - public RestApiOperationPayload(string mediaType, IList properties, string? description = null) + /// The JSON Schema. + public RestApiOperationPayload(string mediaType, IList properties, string? description = null, KernelJsonSchema? schema = null) { this.MediaType = mediaType; this.Properties = properties; this.Description = description; + this.Schema = schema; } } diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationPayloadProperty.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationPayloadProperty.cs new file mode 100644 index 000000000000..f83152ea1d0e --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationPayloadProperty.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// The REST API operation payload property. +/// +public sealed class RestApiOperationPayloadProperty +{ + /// + /// The property name. + /// + public string Name { get; } + + /// + /// The property type. + /// + public string Type { get; } + + /// + /// The property description. + /// + public string? Description { get; } + + /// + /// Flag specifying if the property is required or not. + /// + public bool IsRequired { get; } + + /// + /// The properties. + /// + public IList Properties { get; } + + /// + /// The schema of the parameter. + /// + public KernelJsonSchema? Schema { get; } + + /// + /// The default value. + /// + public object? DefaultValue { get; } + + /// + /// Creates an instance of a class. + /// + /// The name of the property. + /// The type of the property. + /// A flag specifying if the property is required or not. + /// A list of properties for the payload property. + /// A description of the property. + /// The schema of the payload property. + /// The default value of the property. + /// Returns a new instance of the class. + public RestApiOperationPayloadProperty( + string name, + string type, + bool isRequired, + IList properties, + string? description = null, + KernelJsonSchema? schema = null, + object? defaultValue = null) + { + this.Name = name; + this.Type = type; + this.IsRequired = isRequired; + this.Description = description; + this.Properties = properties; + this.Schema = schema; + this.DefaultValue = defaultValue; + } +} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationRunOptions.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationRunOptions.cs similarity index 79% rename from dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationRunOptions.cs rename to dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationRunOptions.cs index 765f99e9ae83..bf716e8f371c 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationRunOptions.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationRunOptions.cs @@ -2,12 +2,12 @@ using System; -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// Options for REST API operation run. /// -internal class RestApiOperationRunOptions +internal sealed class RestApiOperationRunOptions { /// /// Override for REST API operation server URL. diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs new file mode 100644 index 000000000000..1717106ba256 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Provides extension methods for importing plugins exposed through OpenAI's ChatGPT format. +/// +public static class OpenAIPluginKernelExtensions +{ + private static readonly JsonSerializerOptions s_jsonOptionsCache = + new() + { + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }, + }; + + // TODO: Review XML comments + + /// + /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format and imports it into the 's plugin collection. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// The file path to the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task ImportPluginFromOpenAIAsync( + this Kernel kernel, + string pluginName, + string filePath, + OpenAIFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + KernelPlugin plugin = await kernel.CreatePluginFromOpenAIAsync(pluginName, filePath, executionParameters, cancellationToken).ConfigureAwait(false); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format and imports it into the 's plugin collection. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A local or remote URI referencing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task ImportPluginFromOpenAIAsync( + this Kernel kernel, + string pluginName, + Uri uri, + OpenAIFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + KernelPlugin plugin = await kernel.CreatePluginFromOpenAIAsync(pluginName, uri, executionParameters, cancellationToken).ConfigureAwait(false); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format and imports it into the 's plugin collection. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A stream representing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task ImportPluginFromOpenAIAsync( + this Kernel kernel, + string pluginName, + Stream stream, + OpenAIFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + KernelPlugin plugin = await kernel.CreatePluginFromOpenAIAsync(pluginName, stream, executionParameters, cancellationToken).ConfigureAwait(false); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// + /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// The file path to the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task CreatePluginFromOpenAIAsync( + this Kernel kernel, + string pluginName, + string filePath, + OpenAIFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + Verify.ValidPluginName(pluginName, kernel.Plugins); + + var openAIManifest = await DocumentLoader.LoadDocumentFromFilePathAsync( + filePath, + kernel.LoggerFactory.CreateLogger(typeof(OpenAIPluginKernelExtensions)) ?? NullLogger.Instance, + cancellationToken).ConfigureAwait(false); + + return await CreateAsync( + kernel, + openAIManifest, + pluginName, + executionParameters, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A local or remote URI referencing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task CreatePluginFromOpenAIAsync( + this Kernel kernel, + string pluginName, + Uri uri, + OpenAIFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + Verify.ValidPluginName(pluginName, kernel.Plugins); + +#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. + var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); +#pragma warning restore CA2000 + + var openAIManifest = await DocumentLoader.LoadDocumentFromUriAsync( + uri, + kernel.LoggerFactory.CreateLogger(typeof(OpenAIPluginKernelExtensions)) ?? NullLogger.Instance, + httpClient, + null, // auth is not needed when loading the manifest + executionParameters?.UserAgent, + cancellationToken).ConfigureAwait(false); + + return await CreateAsync( + kernel, + openAIManifest, + pluginName, + executionParameters, + cancellationToken).ConfigureAwait(false); + } + + /// + /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plugin name. + /// A stream representing the AI Plugin + /// Plugin execution parameters. + /// The cancellation token. + /// A collection of invocable functions + public static async Task CreatePluginFromOpenAIAsync( + this Kernel kernel, + string pluginName, + Stream stream, + OpenAIFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + Verify.ValidPluginName(pluginName, kernel.Plugins); + + var openAIManifest = await DocumentLoader.LoadDocumentFromStreamAsync(stream).ConfigureAwait(false); + + return await CreateAsync( + kernel, + openAIManifest, + pluginName, + executionParameters, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + + #region private + + private static async Task CreateAsync( + Kernel kernel, + string openAIManifest, + string pluginName, + OpenAIFunctionExecutionParameters? executionParameters = null, + CancellationToken cancellationToken = default) + { + JsonNode pluginJson; + OpenAIAuthenticationConfig openAIAuthConfig; + try + { + pluginJson = JsonNode.Parse(openAIManifest)!; + openAIAuthConfig = pluginJson["auth"].Deserialize(s_jsonOptionsCache)!; + } + catch (JsonException ex) + { + throw new KernelException("Parsing of Open AI manifest failed.", ex); + } + + if (executionParameters?.AuthCallback is not null) + { + var callback = executionParameters.AuthCallback; + ((OpenApiFunctionExecutionParameters)executionParameters).AuthCallback = async (request, ct) => + { + await callback(request, pluginName, openAIAuthConfig, ct).ConfigureAwait(false); + }; + } + + return await kernel.CreatePluginFromOpenApiAsync( + pluginName, + ParseOpenAIManifestForOpenApiSpecUrl(pluginJson), + executionParameters, + cancellationToken).ConfigureAwait(false); + } + + private static Uri ParseOpenAIManifestForOpenApiSpecUrl(JsonNode pluginJson) + { + string? apiType = pluginJson?["api"]?["type"]?.ToString(); + if (string.IsNullOrWhiteSpace(apiType) || apiType != "openapi") + { + throw new KernelException($"Unexpected API type '{apiType}' found in Open AI manifest."); + } + + string? apiUrl = pluginJson?["api"]?["url"]?.ToString(); + if (string.IsNullOrWhiteSpace(apiUrl)) + { + throw new KernelException("No Open API spec URL found in Open AI manifest."); + } + + try + { + return new Uri(apiUrl); + } + catch (System.UriFormatException ex) + { + throw new KernelException("Invalid Open API spec URI found in Open AI manifest.", ex); + } + } + + #endregion +} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs new file mode 100644 index 000000000000..369e8e8694cd --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Represents a delegate that defines the method signature for asynchronously authenticating an HTTP request. +/// +/// The to authenticate. +/// The name of the plugin to be authenticated. +/// The used to authenticate. +/// The cancellation token. +/// A representing the asynchronous operation. +public delegate Task OpenAIAuthenticateRequestAsyncCallback(HttpRequestMessage request, string pluginName, OpenAIAuthenticationConfig openAIAuthConfig, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs new file mode 100644 index 000000000000..c4d1ff9caa09 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Represents the authentication section for an OpenAI plugin. +/// +public class OpenAIAuthenticationConfig +{ + /// + /// The type of authentication. + /// + [JsonPropertyName("type")] + public OpenAIAuthenticationType Type { get; set; } = OpenAIAuthenticationType.None; + + /// + /// The type of authorization. + /// + [JsonPropertyName("authorization_type")] + public OpenAIAuthorizationType AuthorizationType { get; set; } + + /// + /// The client URL. + /// + [JsonPropertyName("client_url")] + public Uri? ClientUrl { get; set; } + + /// + /// The authorization URL. + /// + [JsonPropertyName("authorization_url")] + public Uri? AuthorizationUrl { get; set; } + + /// + /// The authorization content type. + /// + [JsonPropertyName("authorization_content_type")] + public string? AuthorizationContentType { get; set; } + + /// + /// The authorization scope. + /// + [JsonPropertyName("scope")] + public string? Scope { get; set; } + + /// + /// The verification tokens. + /// + [JsonPropertyName("verification_tokens")] + public Dictionary? VerificationTokens { get; set; } +} + +/// +/// Represents the type of authentication for an OpenAI plugin. +/// +public enum OpenAIAuthenticationType +{ + /// + /// No authentication. + /// + None, + + /// + /// User HTTP authentication. + /// + UserHttp, + + /// + /// Service HTTP authentication. + /// + ServiceHttp, + + /// + /// OAuth authentication. + /// + OAuth +} + +/// +/// Represents the type of authorization for an OpenAI plugin. +/// +public enum OpenAIAuthorizationType +{ + /// + /// Basic authorization. + /// + Basic, + + /// + /// Bearer authorization. + /// + Bearer +} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs new file mode 100644 index 000000000000..bc2084fb21fb --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// OpenAI function execution parameters +/// +public class OpenAIFunctionExecutionParameters : OpenApiFunctionExecutionParameters +{ + /// + /// Callback for adding Open AI authentication data to HTTP requests. + /// + public new OpenAIAuthenticateRequestAsyncCallback? AuthCallback { get; set; } + + /// + public OpenAIFunctionExecutionParameters( + HttpClient? httpClient = null, + OpenAIAuthenticateRequestAsyncCallback? authCallback = null, + Uri? serverUrlOverride = null, + string? userAgent = null, + bool ignoreNonCompliantErrors = false, + bool enableDynamicOperationPayload = true, + bool enablePayloadNamespacing = false) : base(httpClient, null, serverUrlOverride, userAgent, ignoreNonCompliantErrors, enableDynamicOperationPayload, enablePayloadNamespacing) + { + this.AuthCallback = authCallback; + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/IOpenApiDocumentParser.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/IOpenApiDocumentParser.cs new file mode 100644 index 000000000000..1ed6b08eefc9 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/IOpenApiDocumentParser.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Interface for OpenAPI document parser classes. +/// +internal interface IOpenApiDocumentParser +{ + /// + /// Parses OpenAPI document. + /// + /// Stream containing OpenAPI document to parse. + /// Flag indicating whether to ignore non-compliant errors. + /// If set to true, the parser will not throw exceptions for non-compliant documents. + /// Please note that enabling this option may result in incomplete or inaccurate parsing results. + /// Optional list of operations not to import, e.g. in case they are not supported + /// The cancellation token. + /// List of rest operations. + Task> ParseAsync( + Stream stream, + bool ignoreNonCompliantErrors = false, + IList? operationsToExclude = null, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs new file mode 100644 index 000000000000..392894abc8dc --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs @@ -0,0 +1,377 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.OpenApi.Any; +using Microsoft.OpenApi.Models; +using Microsoft.OpenApi.Readers; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Parser for OpenAPI documents. +/// +internal sealed class OpenApiDocumentParser : IOpenApiDocumentParser +{ + /// + /// Initializes a new instance of the class. + /// + /// The to use for logging. If null, no logging will be performed. + public OpenApiDocumentParser(ILoggerFactory? loggerFactory = null) + { + this._logger = loggerFactory?.CreateLogger(typeof(OpenApiDocumentParser)) ?? NullLogger.Instance; + } + + /// + public async Task> ParseAsync( + Stream stream, + bool ignoreNonCompliantErrors = false, + IList? operationsToExclude = null, + CancellationToken cancellationToken = default) + { + var jsonObject = await this.DowngradeDocumentVersionToSupportedOneAsync(stream, cancellationToken).ConfigureAwait(false); + + using var memoryStream = new MemoryStream(JsonSerializer.SerializeToUtf8Bytes(jsonObject, JsonOptionsCache.WriteIndented)); + + var result = await this._openApiReader.ReadAsync(memoryStream, cancellationToken).ConfigureAwait(false); + + this.AssertReadingSuccessful(result, ignoreNonCompliantErrors); + + return ExtractRestApiOperations(result.OpenApiDocument, operationsToExclude); + } + + #region private + + /// + /// Max depth to traverse down OpenAPI schema to discover payload properties. + /// + private const int PayloadPropertiesHierarchyMaxDepth = 10; + + /// + /// Name of property that contains OpenAPI document version. + /// + private const string OpenApiVersionPropertyName = "openapi"; + + /// + /// Latest supported version of OpenAPI document. + /// + private static readonly Version s_latestSupportedVersion = new(3, 0, 1); + + /// + /// List of supported Media Types. + /// + private static readonly List s_supportedMediaTypes = new() + { + "application/json", + "text/plain" + }; + + private readonly OpenApiStreamReader _openApiReader = new(); + private readonly ILogger _logger; + + /// + /// Downgrades the version of an OpenAPI document to the latest supported one - 3.0.1. + /// This class relies on Microsoft.OpenAPI.NET library to work with OpenAPI documents. + /// The library, at the moment, does not support 3.1 spec, and the latest supported version is 3.0.1. + /// There's an open issue tracking the support progress - https://github.com/microsoft/OpenAPI.NET/issues/795 + /// This method should be removed/revised as soon the support is added. + /// + /// The original OpenAPI document stream. + /// The cancellation token. + /// OpenAPI document with downgraded document version. + private async Task DowngradeDocumentVersionToSupportedOneAsync(Stream stream, CancellationToken cancellationToken) + { + var jsonObject = await ConvertContentToJsonAsync(stream, cancellationToken).ConfigureAwait(false) ?? throw new KernelException("Parsing of OpenAPI document failed."); + if (!jsonObject.TryGetPropertyValue(OpenApiVersionPropertyName, out var propertyNode)) + { + // The document is either malformed or has 2.x version that specifies document version in the 'swagger' property rather than in the 'openapi' one. + return jsonObject; + } + + if (propertyNode is not JsonValue value) + { + // The 'openapi' property has unexpected type. + return jsonObject; + } + + if (!Version.TryParse(value.ToString(), out var version)) + { + // The 'openapi' property is malformed. + return jsonObject; + } + + if (version > s_latestSupportedVersion) + { + jsonObject[OpenApiVersionPropertyName] = s_latestSupportedVersion.ToString(); + } + + return jsonObject; + } + + /// + /// Converts YAML content to JSON content. + /// The method uses SharpYaml library that comes as a not-direct dependency of Microsoft.OpenAPI.NET library. + /// Should be replaced later when there's more convenient way to convert YAML content to JSON one. + /// + /// The YAML/JSON content stream. + /// The to monitor for cancellation requests. The default is . + /// JSON content stream. + private static async Task ConvertContentToJsonAsync(Stream stream, CancellationToken cancellationToken = default) + { + var serializer = new SharpYaml.Serialization.Serializer(); + + var obj = serializer.Deserialize(stream); + + using var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(obj))); + + return await JsonSerializer.DeserializeAsync(memoryStream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + /// + /// Parses an OpenAPI document and extracts REST API operations. + /// + /// The OpenAPI document. + /// Optional list of operations not to import, e.g. in case they are not supported + /// List of Rest operations. + private static List ExtractRestApiOperations(OpenApiDocument document, IList? operationsToExclude = null) + { + var result = new List(); + + var serverUrl = document.Servers.FirstOrDefault()?.Url; + + foreach (var pathPair in document.Paths) + { + var operations = CreateRestApiOperations(serverUrl, pathPair.Key, pathPair.Value, operationsToExclude); + + result.AddRange(operations); + } + + return result; + } + + /// + /// Creates REST API operation. + /// + /// The server url. + /// Rest resource path. + /// Rest resource metadata. + /// Optional list of operations not to import, e.g. in case they are not supported + /// Rest operation. + private static List CreateRestApiOperations(string? serverUrl, string path, OpenApiPathItem pathItem, IList? operationsToExclude = null) + { + var operations = new List(); + + foreach (var operationPair in pathItem.Operations) + { + var method = operationPair.Key.ToString(); + + var operationItem = operationPair.Value; + + if (operationsToExclude != null && operationsToExclude.Contains(operationItem.OperationId, StringComparer.OrdinalIgnoreCase)) + { + continue; + } + + var operation = new RestApiOperation( + operationItem.OperationId, + string.IsNullOrEmpty(serverUrl) ? null : new Uri(serverUrl), + path, + new HttpMethod(method), + string.IsNullOrEmpty(operationItem.Description) ? operationItem.Summary : operationItem.Description, + CreateRestApiOperationParameters(operationItem.OperationId, operationItem.Parameters), + CreateRestApiOperationPayload(operationItem.OperationId, operationItem.RequestBody), + CreateRestApiOperationExpectedResponses(operationItem.Responses).ToDictionary(item => item.Item1, item => item.Item2) + ); + + operations.Add(operation); + } + + return operations; + } + + /// + /// Creates REST API operation parameters. + /// + /// The operation id. + /// The OpenAPI parameters. + /// The parameters. + private static List CreateRestApiOperationParameters(string operationId, IList parameters) + { + var result = new List(); + + foreach (var parameter in parameters) + { + if (parameter.In == null) + { + throw new KernelException($"Parameter location of {parameter.Name} parameter of {operationId} operation is undefined."); + } + + if (parameter.Style == null) + { + throw new KernelException($"Parameter style of {parameter.Name} parameter of {operationId} operation is undefined."); + } + + var restParameter = new RestApiOperationParameter( + parameter.Name, + parameter.Schema.Type, + parameter.Required, + parameter.Explode, + (RestApiOperationParameterLocation)Enum.Parse(typeof(RestApiOperationParameterLocation), parameter.In.ToString()!), + (RestApiOperationParameterStyle)Enum.Parse(typeof(RestApiOperationParameterStyle), parameter.Style.ToString()!), + parameter.Schema.Items?.Type, + GetParameterValue(parameter.Schema.Default), + parameter.Description, + parameter.Schema.ToJsonSchema() + ); + + result.Add(restParameter); + } + + return result; + } + + /// + /// Creates REST API operation payload. + /// + /// The operation id. + /// The OpenAPI request body. + /// The REST API operation payload. + private static RestApiOperationPayload? CreateRestApiOperationPayload(string operationId, OpenApiRequestBody requestBody) + { + if (requestBody?.Content == null) + { + return null; + } + + var mediaType = s_supportedMediaTypes.FirstOrDefault(smt => requestBody.Content.ContainsKey(smt)) ?? throw new KernelException($"Neither of the media types of {operationId} is supported."); + var mediaTypeMetadata = requestBody.Content[mediaType]; + + var payloadProperties = GetPayloadProperties(operationId, mediaTypeMetadata.Schema, mediaTypeMetadata.Schema?.Required ?? new HashSet()); + + return new RestApiOperationPayload(mediaType, payloadProperties, requestBody.Description, mediaTypeMetadata?.Schema?.ToJsonSchema()); + } + + private static IEnumerable<(string, RestApiOperationExpectedResponse)> CreateRestApiOperationExpectedResponses(OpenApiResponses responses) + { + foreach (var response in responses) + { + var mediaType = s_supportedMediaTypes.FirstOrDefault(smt => response.Value.Content.ContainsKey(smt)); + if (mediaType is not null) + { + var matchingSchema = response.Value.Content[mediaType].Schema; + var description = response.Value.Description ?? matchingSchema?.Description ?? string.Empty; + + yield return (response.Key, new RestApiOperationExpectedResponse(description, mediaType, matchingSchema?.ToJsonSchema())); + } + } + } + + /// + /// Returns REST API operation payload properties. + /// + /// The operation id. + /// An OpenAPI document schema representing request body properties. + /// List of required properties. + /// Current level in OpenAPI schema. + /// The REST API operation payload properties. + private static List GetPayloadProperties(string operationId, OpenApiSchema? schema, ISet requiredProperties, + int level = 0) + { + if (schema == null) + { + return new List(); + } + + if (level > PayloadPropertiesHierarchyMaxDepth) + { + throw new KernelException($"Max level {PayloadPropertiesHierarchyMaxDepth} of traversing payload properties of {operationId} operation is exceeded."); + } + + var result = new List(); + + foreach (var propertyPair in schema.Properties) + { + var propertyName = propertyPair.Key; + + var propertySchema = propertyPair.Value; + + var property = new RestApiOperationPayloadProperty( + propertyName, + propertySchema.Type, + requiredProperties.Contains(propertyName), + GetPayloadProperties(operationId, propertySchema, requiredProperties, level + 1), + propertySchema.Description, + propertySchema.ToJsonSchema(), + GetParameterValue(propertySchema.Default)); + + result.Add(property); + } + + return result; + } + + /// + /// Returns parameter value. + /// + /// The value metadata. + /// The parameter value. + private static object? GetParameterValue(IOpenApiAny valueMetadata) + { + if (valueMetadata is not IOpenApiPrimitive value) + { + return null; + } + + return value.PrimitiveType switch + { + PrimitiveType.Integer => ((OpenApiInteger)value).Value, + PrimitiveType.Long => ((OpenApiLong)value).Value, + PrimitiveType.Float => ((OpenApiFloat)value).Value, + PrimitiveType.Double => ((OpenApiDouble)value).Value, + PrimitiveType.String => ((OpenApiString)value).Value, + PrimitiveType.Byte => ((OpenApiByte)value).Value, + PrimitiveType.Binary => ((OpenApiBinary)value).Value, + PrimitiveType.Boolean => ((OpenApiBoolean)value).Value, + PrimitiveType.Date => ((OpenApiDate)value).Value, + PrimitiveType.DateTime => ((OpenApiDateTime)value).Value, + PrimitiveType.Password => ((OpenApiPassword)value).Value, + _ => throw new KernelException($"The value type - {value.PrimitiveType} is not supported."), + }; + } + + /// + /// Asserts the successful reading of OpenAPI document. + /// + /// The reading results to be checked. + /// Flag indicating whether to ignore non-compliant errors. + /// If set to true, the parser will not throw exceptions for non-compliant documents. + /// Please note that enabling this option may result in incomplete or inaccurate parsing results. + /// + private void AssertReadingSuccessful(ReadResult readResult, bool ignoreNonCompliantErrors) + { + if (readResult.OpenApiDiagnostic.Errors.Any()) + { + var message = $"Parsing of '{readResult.OpenApiDocument.Info?.Title}' OpenAPI document complete with the following errors: {string.Join(";", readResult.OpenApiDiagnostic.Errors)}"; + + this._logger.LogWarning("{Message}", message); + + if (!ignoreNonCompliantErrors) + { + throw new KernelException(message); + } + } + } + + #endregion +} diff --git a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs new file mode 100644 index 000000000000..ae85afbfec80 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs @@ -0,0 +1,397 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Runs REST API operation represented by RestApiOperation model class. +/// +internal sealed class RestApiOperationRunner +{ + private const string MediaTypeApplicationJson = "application/json"; + private const string MediaTypeTextPlain = "text/plain"; + + private const string DefaultResponseKey = "default"; + private const string WildcardResponseKeyFormat = "{0}XX"; + + /// + /// List of payload builders/factories. + /// + private readonly Dictionary _payloadFactoryByMediaType; + + /// + /// A dictionary containing the content type as the key and the corresponding content serializer as the value. + /// + private static readonly Dictionary s_serializerByContentType = new() + { + { "image", async (content) => await content.ReadAsByteArrayAndTranslateExceptionAsync().ConfigureAwait(false) }, + { "text", async (content) => await content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false) }, + { "application/json", async (content) => await content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false)}, + { "application/xml", async (content) => await content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false)} + }; + + /// + /// An instance of the HttpClient class. + /// + private readonly HttpClient _httpClient; + + /// + /// Delegate for authorizing the HTTP request. + /// + private readonly AuthenticateRequestAsyncCallback _authCallback; + + /// + /// Request-header field containing information about the user agent originating the request + /// + private readonly string? _userAgent; + + /// + /// Determines whether the operation payload is constructed dynamically based on operation payload metadata. + /// If false, the operation payload must be provided via the 'payload' property. + /// + private readonly bool _enableDynamicPayload; + + /// + /// Determines whether payload parameters are resolved from the arguments by + /// full name (parameter name prefixed with the parent property name). + /// + private readonly bool _enablePayloadNamespacing; + + /// + /// Creates an instance of the class. + /// + /// An instance of the HttpClient class. + /// Optional callback for adding auth data to the API requests. + /// Optional request-header field containing information about the user agent originating the request. + /// Determines whether the operation payload is constructed dynamically based on operation payload metadata. + /// If false, the operation payload must be provided via the 'payload' property. + /// + /// Determines whether payload parameters are resolved from the arguments by + /// full name (parameter name prefixed with the parent property name). + public RestApiOperationRunner( + HttpClient httpClient, + AuthenticateRequestAsyncCallback? authCallback = null, + string? userAgent = null, + bool enableDynamicPayload = false, + bool enablePayloadNamespacing = false) + { + this._httpClient = httpClient; + this._userAgent = userAgent ?? HttpHeaderValues.UserAgent; + this._enableDynamicPayload = enableDynamicPayload; + this._enablePayloadNamespacing = enablePayloadNamespacing; + + // If no auth callback provided, use empty function + if (authCallback is null) + { + this._authCallback = (_, __) => Task.CompletedTask; + } + else + { + this._authCallback = authCallback; + } + + this._payloadFactoryByMediaType = new() + { + { MediaTypeApplicationJson, this.BuildJsonPayload }, + { MediaTypeTextPlain, this.BuildPlainTextPayload } + }; + } + + /// + /// Executes the specified asynchronously, using the provided . + /// + /// The REST API operation to execute. + /// The dictionary of arguments to be passed to the operation. + /// Options for REST API operation run. + /// The cancellation token. + /// The task execution result. + public Task RunAsync( + RestApiOperation operation, + KernelArguments arguments, + RestApiOperationRunOptions? options = null, + CancellationToken cancellationToken = default) + { + var url = this.BuildsOperationUrl(operation, arguments, options?.ServerUrlOverride, options?.ApiHostUrl); + + var headers = operation.BuildHeaders(arguments); + + var payload = this.BuildOperationPayload(operation, arguments); + + return this.SendAsync(url, operation.Method, headers, payload, operation.Responses.ToDictionary(item => item.Key, item => item.Value.Schema), cancellationToken); + } + + #region private + + /// + /// Sends an HTTP request. + /// + /// The url to send request to. + /// The HTTP request method. + /// Headers to include into the HTTP request. + /// HTTP request payload. + /// The dictionary of expected response schemas. + /// The cancellation token. + /// Response content and content type + private async Task SendAsync( + Uri url, + HttpMethod method, + IDictionary? headers = null, + HttpContent? payload = null, + IDictionary? expectedSchemas = null, + CancellationToken cancellationToken = default) + { + using var requestMessage = new HttpRequestMessage(method, url); + + await this._authCallback(requestMessage, cancellationToken).ConfigureAwait(false); + + if (payload != null) + { + requestMessage.Content = payload; + } + + requestMessage.Headers.Add("User-Agent", !string.IsNullOrWhiteSpace(this._userAgent) + ? this._userAgent + : HttpHeaderValues.UserAgent); + + if (headers != null) + { + foreach (var header in headers) + { + requestMessage.Headers.Add(header.Key, header.Value); + } + } + + using var responseMessage = await this._httpClient.SendWithSuccessCheckAsync(requestMessage, cancellationToken).ConfigureAwait(false); + + var response = await SerializeResponseContentAsync(responseMessage.Content).ConfigureAwait(false); + + response.ExpectedSchema ??= GetExpectedSchema(expectedSchemas, responseMessage.StatusCode); + + return response; + } + + /// + /// Serializes the response content of an HTTP request. + /// + /// The HttpContent object containing the response content to be serialized. + /// The serialized content. + private static async Task SerializeResponseContentAsync(HttpContent content) + { + var contentType = content.Headers.ContentType; + + var mediaType = contentType?.MediaType ?? throw new KernelException("No media type available."); + + // Obtain the content serializer by media type (e.g., text/plain, application/json, image/jpg) + if (!s_serializerByContentType.TryGetValue(mediaType, out var serializer)) + { + // Split the media type into a primary-type and a sub-type + var mediaTypeParts = mediaType.Split('/'); + if (mediaTypeParts.Length != 2) + { + throw new KernelException($"The string `{mediaType}` is not a valid media type."); + } + + var primaryMediaType = mediaTypeParts.First(); + + // Try to obtain the content serializer by the primary type (e.g., text, application, image) + if (!s_serializerByContentType.TryGetValue(primaryMediaType, out serializer)) + { + throw new KernelException($"The content type `{mediaType}` is not supported."); + } + } + + // Serialize response content and return it + var serializedContent = await serializer.Invoke(content).ConfigureAwait(false); + + return new RestApiOperationResponse(serializedContent, contentType!.ToString()); + } + + /// + /// Builds operation payload. + /// + /// The operation. + /// The payload arguments. + /// The HttpContent representing the payload. + private HttpContent? BuildOperationPayload(RestApiOperation operation, IDictionary arguments) + { + if (operation?.Method != HttpMethod.Put && operation?.Method != HttpMethod.Post) + { + return null; + } + + var mediaType = operation.Payload?.MediaType; + if (string.IsNullOrEmpty(mediaType)) + { + if (!arguments.TryGetValue(RestApiOperation.ContentTypeArgumentName, out object? fallback) || fallback is not string mediaTypeFallback) + { + throw new KernelException($"No media type is provided for the {operation.Id} operation."); + } + + mediaType = mediaTypeFallback; + } + + if (!this._payloadFactoryByMediaType.TryGetValue(mediaType!, out var payloadFactory)) + { + throw new KernelException($"The media type {mediaType} of the {operation.Id} operation is not supported by {nameof(RestApiOperationRunner)}."); + } + + return payloadFactory.Invoke(operation.Payload, arguments); + } + + /// + /// Builds "application/json" payload. + /// + /// The payload meta-data. + /// The payload arguments. + /// The HttpContent representing the payload. + private HttpContent BuildJsonPayload(RestApiOperationPayload? payloadMetadata, IDictionary arguments) + { + // Build operation payload dynamically + if (this._enableDynamicPayload) + { + if (payloadMetadata == null) + { + throw new KernelException("Payload can't be built dynamically due to the missing payload metadata."); + } + + var payload = this.BuildJsonObject(payloadMetadata.Properties, arguments); + + return new StringContent(payload.ToJsonString(), Encoding.UTF8, MediaTypeApplicationJson); + } + + // Get operation payload content from the 'payload' argument if dynamic payload building is not required. + if (!arguments.TryGetValue(RestApiOperation.PayloadArgumentName, out object? argument) || argument is not string content) + { + throw new KernelException($"No payload is provided by the argument '{RestApiOperation.PayloadArgumentName}'."); + } + + return new StringContent(content, Encoding.UTF8, MediaTypeApplicationJson); + } + + /// + /// Builds a JSON object from a list of RestAPI operation payload properties. + /// + /// The properties. + /// The arguments. + /// The namespace to add to the property name. + /// The JSON object. + private JsonObject BuildJsonObject(IList properties, IDictionary arguments, string? propertyNamespace = null) + { + var result = new JsonObject(); + + foreach (var propertyMetadata in properties) + { + var argumentName = this.GetArgumentNameForPayload(propertyMetadata.Name, propertyNamespace); + + if (propertyMetadata.Type == "object") + { + var node = this.BuildJsonObject(propertyMetadata.Properties, arguments, argumentName); + result.Add(propertyMetadata.Name, node); + continue; + } + + if (arguments.TryGetValue(argumentName, out object? propertyValue) && propertyValue is not null) + { + result.Add(propertyMetadata.Name, OpenApiTypeConverter.Convert(propertyMetadata.Name, propertyMetadata.Type, propertyValue)); + continue; + } + + if (propertyMetadata.IsRequired) + { + throw new KernelException($"No argument is found for the '{propertyMetadata.Name}' payload property."); + } + } + + return result; + } + + /// + /// Gets the expected schema for the specified status code. + /// + /// The dictionary of expected response schemas. + /// The status code. + /// The expected schema for the given status code. + private static KernelJsonSchema? GetExpectedSchema(IDictionary? expectedSchemas, HttpStatusCode statusCode) + { + KernelJsonSchema? matchingResponse = null; + if (expectedSchemas is not null) + { + var statusCodeKey = $"{(int)statusCode}"; + + // Exact Match + matchingResponse = expectedSchemas.FirstOrDefault(r => r.Key == statusCodeKey).Value; + + // Wildcard match e.g. 2XX + matchingResponse ??= expectedSchemas.FirstOrDefault(r => r.Key == string.Format(CultureInfo.InvariantCulture, WildcardResponseKeyFormat, statusCodeKey.Substring(0, 1))).Value; + + // Default + matchingResponse ??= expectedSchemas.FirstOrDefault(r => r.Key == DefaultResponseKey).Value; + } + + return matchingResponse; + } + + /// + /// Builds "text/plain" payload. + /// + /// The payload meta-data. + /// The payload arguments. + /// The HttpContent representing the payload. + private HttpContent BuildPlainTextPayload(RestApiOperationPayload? payloadMetadata, IDictionary arguments) + { + if (!arguments.TryGetValue(RestApiOperation.PayloadArgumentName, out object? argument) || argument is not string payload) + { + throw new KernelException($"No argument is found for the '{RestApiOperation.PayloadArgumentName}' payload content."); + } + + return new StringContent(payload, Encoding.UTF8, MediaTypeTextPlain); + } + + /// + /// Retrieves the argument name for a payload property. + /// + /// The name of the property. + /// The namespace to add to the property name (optional). + /// The argument name for the payload property. + private string GetArgumentNameForPayload(string propertyName, string? propertyNamespace) + { + if (!this._enablePayloadNamespacing) + { + return propertyName; + } + + return string.IsNullOrEmpty(propertyNamespace) ? propertyName : $"{propertyNamespace}.{propertyName}"; + } + + /// + /// Builds operation Url. + /// + /// The REST API operation. + /// The operation arguments. + /// Override for REST API operation server url. + /// The URL of REST API host. + /// The operation Url. + private Uri BuildsOperationUrl(RestApiOperation operation, IDictionary arguments, Uri? serverUrlOverride = null, Uri? apiHostUrl = null) + { + var url = operation.BuildOperationUrl(arguments, serverUrlOverride, apiHostUrl); + + var urlBuilder = new UriBuilder(url); + + urlBuilder.Query = operation.BuildQueryString(arguments); + + return urlBuilder.Uri; + } + + #endregion +} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/ArrayParameterValueSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/ArrayParameterValueSerializer.cs similarity index 75% rename from dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/ArrayParameterValueSerializer.cs rename to dotnet/src/Functions/Functions.OpenApi/Serialization/ArrayParameterValueSerializer.cs index 943f2a3ac150..277f39cb4419 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Builders/Serialization/ArrayParameterValueSerializer.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/ArrayParameterValueSerializer.cs @@ -4,7 +4,7 @@ using System.Text.Json.Nodes; using System.Web; -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; +namespace Microsoft.SemanticKernel.Plugins.OpenApi; /// /// This class provides methods for serializing values of array parameters. @@ -15,7 +15,7 @@ internal static class ArrayParameterValueSerializer /// Serializes the items of an array as separate parameters with the same name. /// /// The name of the parameter. - /// The array containing the items to be serialized. + /// The array containing the items to serialize. /// The delimiter used to separate parameters. /// A string containing the serialized parameters. public static string SerializeArrayAsSeparateParameters(string name, JsonArray array, string delimiter) @@ -33,16 +33,17 @@ public static string SerializeArrayAsSeparateParameters(string name, JsonArray a /// /// Serializes the items of an array as one parameter with delimited values. /// - /// The array containing the items to be serialized. + /// The array containing the items to serialize. /// The delimiter used to separate items. + /// Flag specifying whether to encode items or not. /// A string containing the serialized parameter. - public static string SerializeArrayAsDelimitedValues(JsonArray array, string delimiter) + public static string SerializeArrayAsDelimitedValues(JsonArray array, string delimiter, bool encode = true) { - var values = new List(); + var values = new List(); foreach (var item in array) { - values.Add(HttpUtility.UrlEncode(item?.ToString())); + values.Add(encode ? HttpUtility.UrlEncode(item?.ToString()) : item?.ToString()); } return string.Join(delimiter, values); diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/FormStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/FormStyleParameterSerializer.cs new file mode 100644 index 000000000000..0f985f3d8197 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/FormStyleParameterSerializer.cs @@ -0,0 +1,62 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Nodes; +using System.Web; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Serializes REST API operation parameter of the 'Form' style. +/// +internal static class FormStyleParameterSerializer +{ + /// + /// Serializes a REST API operation `Form` style parameter. + /// + /// The REST API operation parameter to serialize. + /// The parameter argument. + /// The serialized parameter. + public static string Serialize(RestApiOperationParameter parameter, JsonNode argument) + { + const string ArrayType = "array"; + + Verify.NotNull(parameter); + Verify.NotNull(argument); + + if (parameter.Style != RestApiOperationParameterStyle.Form) + { + throw new NotSupportedException($"Unsupported Rest API operation parameter style '{parameter.Style}' for parameter '{parameter.Name}'"); + } + + // Handling parameters of array type. + if (parameter.Type == ArrayType) + { + return SerializeArrayParameter(parameter, argument); + } + + // Handling parameters of primitive and removing extra quotes added by the JsonValue for string values. + return $"{parameter.Name}={HttpUtility.UrlEncode(argument.ToString().Trim('"'))}"; + } + + /// + /// Serializes an array-type parameter. + /// + /// The REST API operation parameter to serialize. + /// The argument value. + /// The serialized parameter string. + private static string SerializeArrayParameter(RestApiOperationParameter parameter, JsonNode argument) + { + if (argument is not JsonArray array) + { + throw new ArgumentException(parameter.Name, $"Unexpected argument type '{argument.GetType()} with value '{argument}' for parameter type '{parameter.Type}'."); + } + + if (parameter.Expand) + { + return ArrayParameterValueSerializer.SerializeArrayAsSeparateParameters(parameter.Name, array, delimiter: "&"); // id=1&id=2&id=3 + } + + return $"{parameter.Name}={ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: ",")}"; // id=1,2,3 + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs new file mode 100644 index 000000000000..5b7422950d15 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/OpenApiTypeConverter.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Globalization; +using System.Text.Json; +using System.Text.Json.Nodes; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Provides functionality for converting OpenApi types - https://swagger.io/docs/specification/data-models/data-types/ +/// +internal static class OpenApiTypeConverter +{ + /// + /// Converts the given parameter argument to a JsonNode based on the specified type. + /// + /// The parameter name. + /// The parameter type. + /// The argument to be converted. + /// A JsonNode representing the converted value. + public static JsonNode Convert(string name, string type, object argument) + { + Verify.NotNull(argument); + + try + { + JsonNode? converter = type switch + { + "string" => JsonValue.Create(argument), + "array" => argument switch + { + string s => JsonArray.Parse(s) as JsonArray, + _ => JsonSerializer.SerializeToNode(argument) as JsonArray + }, + "integer" => argument switch + { + string stringArgument => JsonValue.Create(long.Parse(stringArgument, CultureInfo.InvariantCulture)), + byte or sbyte or short or ushort or int or uint or long or ulong => JsonValue.Create(argument), + _ => null + }, + "boolean" => argument switch + { + bool b => JsonValue.Create(b), + string s => JsonValue.Create(bool.Parse(s)), + _ => null + }, + "number" => argument switch + { + string stringArgument when long.TryParse(stringArgument, out var intValue) => JsonValue.Create(intValue), + string stringArgument when double.TryParse(stringArgument, out var doubleValue) => JsonValue.Create(doubleValue), + byte or sbyte or short or ushort or int or uint or long or ulong or float or double or decimal => JsonValue.Create(argument), + _ => null + }, + _ => throw new NotSupportedException($"Unexpected type '{type}' of parameter '{name}' with argument '{argument}'."), + }; + + return converter ?? throw new ArgumentOutOfRangeException(name, argument, $"Argument type '{argument.GetType()}' is not convertible to parameter type '{type}'."); + } + catch (ArgumentException ex) + { + throw new ArgumentOutOfRangeException(name, argument, ex.Message); + } + catch (FormatException ex) + { + throw new ArgumentOutOfRangeException(name, argument, ex.Message); + } + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/PipeDelimitedStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/PipeDelimitedStyleParameterSerializer.cs new file mode 100644 index 000000000000..3ee0d9c7e79a --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/PipeDelimitedStyleParameterSerializer.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Nodes; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Serializes REST API operation parameter of the 'PipeDelimited' style. +/// +internal static class PipeDelimitedStyleParameterSerializer +{ + /// + /// Serializes a REST API operation `PipeDelimited` style parameter. + /// + /// The REST API operation parameter to serialize. + /// The parameter argument. + /// The serialized parameter. + public static string Serialize(RestApiOperationParameter parameter, JsonNode argument) + { + const string ArrayType = "array"; + + Verify.NotNull(parameter); + Verify.NotNull(argument); + + if (parameter.Style != RestApiOperationParameterStyle.PipeDelimited) + { + throw new NotSupportedException($"Unsupported Rest API operation parameter style '{parameter.Style}' for parameter '{parameter.Name}'"); + } + + if (parameter.Type != ArrayType) + { + throw new NotSupportedException($"Unsupported Rest API operation parameter type '{parameter.Type}' for parameter '{parameter.Name}'"); + } + + return SerializeArrayParameter(parameter, argument); + } + + /// + /// Serializes an array-type parameter. + /// + /// The REST API operation parameter to serialize. + /// The argument value. + /// The serialized parameter string. + private static string SerializeArrayParameter(RestApiOperationParameter parameter, JsonNode argument) + { + if (argument is not JsonArray array) + { + throw new ArgumentException(parameter.Name, $"Unexpected argument type '{argument.GetType()} with value '{argument}' for parameter type '{parameter.Type}'."); + } + + if (parameter.Expand) + { + return ArrayParameterValueSerializer.SerializeArrayAsSeparateParameters(parameter.Name, array, delimiter: "&"); //id=1&id=2&id=3 + } + + return $"{parameter.Name}={ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: "|")}"; //id=1|2|3 + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/SimpleStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/SimpleStyleParameterSerializer.cs new file mode 100644 index 000000000000..98da72fbdb76 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/SimpleStyleParameterSerializer.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Nodes; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Serializes REST API operation parameter of the 'Simple' style. +/// +internal static class SimpleStyleParameterSerializer +{ + /// + /// Serializes a REST API operation `Simple` style parameter. + /// + /// The REST API operation parameter to serialize. + /// The parameter argument. + /// The serialized parameter. + public static string Serialize(RestApiOperationParameter parameter, JsonNode argument) + { + const string ArrayType = "array"; + + Verify.NotNull(parameter); + Verify.NotNull(argument); + + if (parameter.Style != RestApiOperationParameterStyle.Simple) + { + throw new NotSupportedException($"Unsupported Rest API operation parameter style '{parameter.Style}' for parameter '{parameter.Name}'"); + } + + // Serializing parameters of array type. + if (parameter.Type == ArrayType) + { + return SerializeArrayParameter(parameter, argument); + } + + // Handling parameters of primitive and removing extra quotes added by the JsonValue for string values. + return argument.ToString().Trim('"'); + } + + /// + /// Serializes an array-type parameter. + /// + /// The REST API operation parameter to serialize. + /// The argument value. + /// The serialized parameter string. + private static string SerializeArrayParameter(RestApiOperationParameter parameter, object argument) + { + if (argument is not JsonArray array) + { + throw new ArgumentException(parameter.Name, $"Unexpected argument type '{argument.GetType()} with value '{argument}' for parameter type '{parameter.Type}'."); + } + + return ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: ",", encode: false); //1,2,3 + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/SpaceDelimitedStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/SpaceDelimitedStyleParameterSerializer.cs new file mode 100644 index 000000000000..f42911ab0536 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/SpaceDelimitedStyleParameterSerializer.cs @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Nodes; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// Serializes REST API operation parameter of the 'SpaceDelimited' style. +/// +internal static class SpaceDelimitedStyleParameterSerializer +{ + /// + /// Serializes a REST API operation `SpaceDelimited` style parameter. + /// + /// The REST API operation parameter to serialize. + /// The parameter argument. + /// The serialized parameter. + public static string Serialize(RestApiOperationParameter parameter, JsonNode argument) + { + const string ArrayType = "array"; + + Verify.NotNull(parameter); + + if (parameter.Style != RestApiOperationParameterStyle.SpaceDelimited) + { + throw new NotSupportedException($"Unsupported Rest API operation parameter style '{parameter.Style}' for parameter '{parameter.Name}'"); + } + + if (parameter.Type != ArrayType) + { + throw new NotSupportedException($"Unsupported Rest API operation parameter type '{parameter.Type}' for parameter '{parameter.Name}'"); + } + + return SerializeArrayParameter(parameter, argument); + } + + /// + /// Serializes an array-type parameter. + /// + /// The REST API operation parameter to serialize. + /// The argument value. + /// The serialized parameter string. + private static string SerializeArrayParameter(RestApiOperationParameter parameter, JsonNode argument) + { + if (argument is not JsonArray array) + { + throw new ArgumentException(parameter.Name, $"Unexpected argument type '{argument.GetType()} with value '{argument}' for parameter type '{parameter.Type}'."); + } + + if (parameter.Expand) + { + return ArrayParameterValueSerializer.SerializeArrayAsSeparateParameters(parameter.Name, array, delimiter: "&"); //id=1&id=2&id=3 + } + + return $"{parameter.Name}={ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: "%20")}"; //id=1%202%203 + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj b/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj index cc3d8351e03c..9cdac0229e72 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj +++ b/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj @@ -1,5 +1,4 @@  - SemanticKernel.Functions.UnitTests SemanticKernel.Functions.UnitTests @@ -9,26 +8,35 @@ enable disable false - CA2007,VSTHRD111 + CA2007,CA1861,CA1869,VSTHRD111,SKEXP0040,SKEXP0041,SKEXP0042 - - - - - + + + + + - - - - - + + + + + + + + + + + + + + + - @@ -37,14 +45,18 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + - - + - + + + - - + \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/Grpc/Extensions/GrpcOperationExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/Grpc/Extensions/GrpcOperationExtensionsTests.cs index 02cc8ae63edd..d0111b548153 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Grpc/Extensions/GrpcOperationExtensionsTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/Grpc/Extensions/GrpcOperationExtensionsTests.cs @@ -2,11 +2,11 @@ using System.Collections.Generic; using System.Linq; -using Microsoft.SemanticKernel.Functions.Grpc.Extensions; -using Microsoft.SemanticKernel.Functions.Grpc.Model; +using Microsoft.SemanticKernel.Plugins.Grpc; +using Microsoft.SemanticKernel.Plugins.Grpc.Model; using Xunit; -namespace SemanticKernel.Functions.UnitTests.Grpc.Extensions; +namespace SemanticKernel.Functions.UnitTests.Grpc; public class GrpcOperationExtensionsTests { diff --git a/dotnet/src/Functions/Functions.UnitTests/Grpc/GrpcRunnerTests.cs b/dotnet/src/Functions/Functions.UnitTests/Grpc/GrpcRunnerTests.cs index 9d8f3d80c6b7..3c5cddb36922 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Grpc/GrpcRunnerTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/Grpc/GrpcRunnerTests.cs @@ -10,8 +10,9 @@ using System.Text.Json.Nodes; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Functions.Grpc; -using Microsoft.SemanticKernel.Functions.Grpc.Model; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.Grpc; +using Microsoft.SemanticKernel.Plugins.Grpc.Model; using Xunit; namespace SemanticKernel.Functions.UnitTests.Grpc; @@ -57,7 +58,7 @@ public async Task ShouldUseAddressProvidedInGrpcOperationAsync() operation.Package = "greet"; operation.Address = "https://fake-random-test-host"; - var arguments = new Dictionary(); + var arguments = new KernelArguments(); arguments.Add("payload", JsonSerializer.Serialize(new { name = "author" })); // Act @@ -87,7 +88,7 @@ public async Task ShouldUseAddressOverrideFromArgumentsAsync() operation.Package = "greet"; operation.Address = "https://fake-random-test-host"; - var arguments = new Dictionary(); + var arguments = new KernelArguments(); arguments.Add("payload", JsonSerializer.Serialize(new { name = "author" })); arguments.Add("address", "https://fake-random-test-host-from-args"); @@ -121,7 +122,7 @@ public async Task ShouldRunOperationsWithSimpleDataContractAsync() operation.Package = "greet"; operation.Address = "https://fake-random-test-host"; - var arguments = new Dictionary(); + var arguments = new KernelArguments(); arguments.Add("payload", JsonSerializer.Serialize(new { name = "author" })); // Act diff --git a/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/ProtoDocumentParserV30Tests.cs b/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/ProtoDocumentParserV30Tests.cs index 838fe275ed53..e7d14e3156dd 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/ProtoDocumentParserV30Tests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/ProtoDocumentParserV30Tests.cs @@ -2,7 +2,7 @@ using System.IO; using System.Linq; -using Microsoft.SemanticKernel.Functions.Grpc.Protobuf; +using Microsoft.SemanticKernel.Plugins.Grpc.Protobuf; using SemanticKernel.Functions.UnitTests.Grpc.Protobuf.TestPlugins; using Xunit; diff --git a/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/TestPlugins/ResourcePluginsProvider.cs b/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/TestPlugins/ResourcePluginsProvider.cs index b2405df9f7e5..015b67eace1e 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/TestPlugins/ResourcePluginsProvider.cs +++ b/dotnet/src/Functions/Functions.UnitTests/Grpc/Protobuf/TestPlugins/ResourcePluginsProvider.cs @@ -11,7 +11,7 @@ internal static class ResourcePluginsProvider /// Loads .proto file from assembly resource. /// /// The resource name. - /// The OpenApi document resource stream. + /// The OpenAPI document resource stream. public static Stream LoadFromResource(string resourceName) { var type = typeof(ResourcePluginsProvider); diff --git a/dotnet/src/Functions/Functions.UnitTests/Markdown/Functions/KernelFunctionMarkdownTests.cs b/dotnet/src/Functions/Functions.UnitTests/Markdown/Functions/KernelFunctionMarkdownTests.cs new file mode 100644 index 000000000000..599f5b6f92a8 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/Markdown/Functions/KernelFunctionMarkdownTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.Markdown.Functions; + +public class KernelFunctionMarkdownTests +{ + [Fact] + public void ItShouldCreatePromptFunctionConfigFromMarkdown() + { + // Arrange + // Act + var model = KernelFunctionMarkdown.CreateFromPromptMarkdown(this._markdown, "TellMeAbout"); + + // Assert + Assert.NotNull(model); + Assert.Equal("TellMeAbout", model.Name); + Assert.Equal("Hello AI, tell me about {{$input}}", model.Template); + Assert.Equal(2, model.ExecutionSettings.Count); + Assert.Equal("gpt4", model.ExecutionSettings["service1"].ModelId); + Assert.Equal("gpt3.5", model.ExecutionSettings["service2"].ModelId); + } + + [Fact] + public void ItShouldCreatePromptFunctionFromMarkdown() + { + // Arrange + var kernel = new Kernel(); + + // Act + var function = KernelFunctionMarkdown.CreateFromPromptMarkdown(this._markdown, "TellMeAbout"); + + // Assert + Assert.NotNull(function); + Assert.Equal("TellMeAbout", function.Name); + } + + private readonly string _markdown = @" +This is a semantic kernel prompt template +```sk.prompt +Hello AI, tell me about {{$input}} +``` +These are AI execution settings +```sk.execution_settings +{ + ""service1"" : { + ""model_id"": ""gpt4"", + ""temperature"": 0.7 + } +} +``` +These are more AI execution settings +```sk.execution_settings +{ + ""service2"" : { + ""model_id"": ""gpt3.5"", + ""temperature"": 0.8 + } +} +``` +"; +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/BasicAuthenticationProviderTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/BasicAuthenticationProviderTests.cs deleted file mode 100644 index a06830633a77..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/BasicAuthenticationProviderTests.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Authentication; - -public class BasicAuthenticationProviderTests -{ - [Fact] - public async Task AuthenticateRequestAsyncSucceedsAsync() - { - // Arrange - var credentials = Guid.NewGuid().ToString(); - using var request = new HttpRequestMessage(); - - var target = new BasicAuthenticationProvider(() => Task.FromResult(credentials)); - - // Act - await target.AuthenticateRequestAsync(request); - - // Assert - Assert.Equal("Basic", request.Headers.Authorization?.Scheme); - Assert.Equal(Convert.ToBase64String(Encoding.UTF8.GetBytes(credentials)), request.Headers.Authorization?.Parameter); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/BearerAuthenticationProviderTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/BearerAuthenticationProviderTests.cs deleted file mode 100644 index 8e9fd71a0552..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/BearerAuthenticationProviderTests.cs +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Authentication; - -public class BearerAuthenticationProviderTests -{ - [Fact] - public async Task AuthenticateRequestAsyncSucceedsAsync() - { - // Arrange - var token = Guid.NewGuid().ToString(); - using var request = new HttpRequestMessage(); - - var target = new BearerAuthenticationProvider(() => Task.FromResult(token)); - - // Act - await target.AuthenticateRequestAsync(request); - - // Assert - Assert.Equal("Bearer", request.Headers.Authorization?.Scheme); - Assert.Equal(token, request.Headers.Authorization?.Parameter); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/CustomAuthenticationProviderTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/CustomAuthenticationProviderTests.cs deleted file mode 100644 index 2911f9471e57..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Authentication/CustomAuthenticationProviderTests.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Authentication; - -public class CustomAuthenticationProviderTests -{ - [Fact] - public async Task AuthenticateRequestAsyncSucceedsAsync() - { - // Arrange - var header = "X-MyHeader"; - var value = Guid.NewGuid().ToString(); - - using var request = new HttpRequestMessage(); - - var target = new CustomAuthenticationProvider(() => Task.FromResult(header), () => Task.FromResult(value)); - - // Act - await target.AuthenticateRequestAsync(request); - - // Assert - Assert.True(request.Headers.Contains(header)); - Assert.Equal(request.Headers.GetValues(header).FirstOrDefault(), value); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/QueryStringBuilderTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/QueryStringBuilderTests.cs deleted file mode 100644 index 366f6f8eee54..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/QueryStringBuilderTests.cs +++ /dev/null @@ -1,505 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Builders; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Builders; -public class QueryStringBuilderTests -{ - [Fact] - public void ShouldAddQueryStringParametersAndUseValuesFromArguments() - { - // Arrange - var firstParameterMetadata = new RestApiOperationParameter( - name: "p1", - type: "fake_type", - isRequired: true, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form); - - var secondParameterMetadata = new RestApiOperationParameter( - name: "p2", - type: "fake_type", - isRequired: true, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form); - - var operation = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "/", - HttpMethod.Get, - "fake_description", - new List { firstParameterMetadata, secondParameterMetadata }, - new Dictionary()); - - var arguments = new Dictionary - { - { "p1", "v1" }, - { "p2", "v2" } - }; - - // Act - var queryString = operation.BuildQueryString(arguments); - - // Assert - Assert.Equal("p1=v1&p2=v2", queryString); - } - - [Fact] - public void ShouldSkipNotRequiredQueryStringParametersIfTheirArgumentsMissing() - { - // Arrange - var firstParameterMetadata = new RestApiOperationParameter( - name: "p1", - type: "fake_type", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form); - - var secondParameterMetadata = new RestApiOperationParameter( - name: "p2", - type: "fake_type", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form); - - var operation = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "/", - HttpMethod.Get, - "fake_description", - new List { firstParameterMetadata, secondParameterMetadata }, - new Dictionary()); - - var arguments = new Dictionary - { - { "p2", "v2" } - }; - - // Act - var queryString = operation.BuildQueryString(arguments); - - // Assert - Assert.Equal("p2=v2", queryString); - } - - [Fact] - public void ShouldThrowExceptionIfNoValueIsProvideForRequiredQueryStringParameter() - { - // Arrange - var firstParameterMetadata = new RestApiOperationParameter( - name: "p1", - type: "fake_type", - isRequired: true, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form); - - var secondParameterMetadata = new RestApiOperationParameter( - name: "p2", - type: "fake_type", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form); - - var operation = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "/", - HttpMethod.Get, - "fake_description", - new List { firstParameterMetadata, secondParameterMetadata }, - new Dictionary()); - - var arguments = new Dictionary - { - { "p2", "v2" } - }; - - //Act and assert - Assert.Throws(() => operation.BuildQueryString(arguments)); - } - - [Theory] - [InlineData(":", "%3a")] - [InlineData("/", "%2f")] - [InlineData("?", "%3f")] - [InlineData("#", "%23")] - public void ItShouldEncodeSpecialSymbolsInQueryStringValues(string specialSymbol, string encodedEquivalent) - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "string", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form) - }; - - var arguments = new Dictionary - { - { "p1", $"p1_value{specialSymbol}" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var queryString = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(queryString); - - Assert.EndsWith(encodedEquivalent, queryString, StringComparison.Ordinal); - } - - [Fact] - public void ItShouldCreateAmpersandSeparatedParameterPerArrayItemForFormStyleParameters() - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "array", - isRequired: false, - expand: true, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form, - arrayItemType: "string"), - new RestApiOperationParameter( - name: "p2", - type: "array", - isRequired: false, - expand: true, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form, - arrayItemType: "integer") - }; - - var arguments = new Dictionary - { - { "p1", "[\"a\",\"b\",\"c\"]" }, - { "p2", "[1,2,3]" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=a&p1=b&p1=c&p2=1&p2=2&p2=3", result); - } - - [Fact] - public void ItShouldCreateParameterWithCommaSeparatedValuePerArrayItemForFormStyleParameters() - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "array", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form, - arrayItemType: "string"), - new RestApiOperationParameter( - name: "p2", - type: "array", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form, - arrayItemType: "integer") - }; - - var arguments = new Dictionary - { - { "p1", "[\"a\",\"b\",\"c\"]" }, - { "p2", "[1,2,3]" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=a,b,c&p2=1,2,3", result); - } - - [Fact] - public void ItShouldCreateParameterForPrimitiveValuesForFormStyleParameters() - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "string", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form, - arrayItemType: "string"), - new RestApiOperationParameter( - name: "p2", - type: "boolean", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.Form) - }; - - var arguments = new Dictionary - { - { "p1", "v1" }, - { "p2", "true" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=v1&p2=true", result); - } - - [Fact] - public void ItShouldCreateAmpersandSeparatedParameterPerArrayItemForSpaceDelimitedStyleParameters() - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "array", - isRequired: false, - expand: true, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.SpaceDelimited, - arrayItemType: "string"), - new RestApiOperationParameter( - name: "p2", - type: "array", - isRequired: false, - expand: true, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.SpaceDelimited, - arrayItemType: "integer") - }; - - var arguments = new Dictionary - { - { "p1", "[\"a\",\"b\",\"c\"]" }, - { "p2", "[1,2,3]" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=a&p1=b&p1=c&p2=1&p2=2&p2=3", result); - } - - [Fact] - public void ItShouldCreateParameterWithSpaceSeparatedValuePerArrayItemForSpaceDelimitedStyleParameters() - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "array", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.SpaceDelimited, - arrayItemType: "string"), - new RestApiOperationParameter( - name: "p2", - type: "array", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.SpaceDelimited, - arrayItemType: "integer") - }; - - var arguments = new Dictionary - { - { "p1", "[\"a\",\"b\",\"c\"]" }, - { "p2", "[1,2,3]" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=a%20b%20c&p2=1%202%203", result); - } - - [Fact] - public void ItShouldCreateAmpersandSeparatedParameterPerArrayItemForPipeDelimitedStyleParameters() - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "array", - isRequired: false, - expand: true, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.PipeDelimited, - arrayItemType: "string"), - new RestApiOperationParameter( - name: "p2", - type: "array", - isRequired: false, - expand: true, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.PipeDelimited, - arrayItemType: "integer") - }; - - var arguments = new Dictionary - { - { "p1", "[\"a\",\"b\",\"c\"]" }, - { "p2", "[1,2,3]" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=a&p1=b&p1=c&p2=1&p2=2&p2=3", result); - } - - [Fact] - public void ItShouldCreateParameterWithPipeSeparatedValuePerArrayItemForPipeDelimitedStyleParameters() - { - // Arrange - var metadata = new List - { - new RestApiOperationParameter( - name: "p1", - type: "array", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.PipeDelimited, - arrayItemType: "string"), - new RestApiOperationParameter( - name: "p2", - type: "array", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - style: RestApiOperationParameterStyle.PipeDelimited, - arrayItemType: "integer") - }; - - var arguments = new Dictionary - { - { "p1", "[\"a\",\"b\",\"c\"]" }, - { "p2", "[1,2,3]" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=a|b|c&p2=1|2|3", result); - } - - [Fact] - public void ItShouldMixAndMatchParametersOfDifferentTypesAndStyles() - { - // Arrange - var metadata = new List - { - //'Form' style array parameter with comma separated values - new RestApiOperationParameter(name: "p1", type: "array", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.Form, arrayItemType: "string"), - - //'Form' style primitive boolean parameter - new RestApiOperationParameter(name: "p2", type: "boolean", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.Form), - - //'Form' style array parameter with parameter per array item - new RestApiOperationParameter(name : "p3", type : "array", isRequired : true, expand : true, location : RestApiOperationParameterLocation.Query, style : RestApiOperationParameterStyle.Form), - - //'SpaceDelimited' style array parameter with space separated values - new RestApiOperationParameter(name : "p4", type : "array", isRequired : true, expand : false, location : RestApiOperationParameterLocation.Query, style : RestApiOperationParameterStyle.SpaceDelimited), - - //'SpaceDelimited' style array parameter with parameter per array item - new RestApiOperationParameter(name : "p5", type : "array", isRequired : true, expand : true, location : RestApiOperationParameterLocation.Query, style : RestApiOperationParameterStyle.SpaceDelimited), - - //'PipeDelimited' style array parameter with pipe separated values - new RestApiOperationParameter(name : "p6", type : "array", isRequired : true, expand : false, location : RestApiOperationParameterLocation.Query, style : RestApiOperationParameterStyle.PipeDelimited), - - //'PipeDelimited' style array parameter with parameter per array item - new RestApiOperationParameter(name : "p7", type : "array", isRequired : true, expand : true, location : RestApiOperationParameterLocation.Query, style : RestApiOperationParameterStyle.PipeDelimited), - }; - - var arguments = new Dictionary - { - { "p1", "[\"a\",\"b\"]" }, - { "p2", "false" }, - { "p3", "[1,2]" }, - { "p4", "[3,4]" }, - { "p5", "[\"c\",\"d\"]" }, - { "p6", "[5,6]" }, - { "p7", "[\"e\",\"f\"]" } - }; - - var operation = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata, new Dictionary()); - - // Act - var result = operation.BuildQueryString(arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("p1=a,b&p2=false&p3=1&p3=2&p4=3%204&p5=c&p5=d&p6=5|6&p7=e&p7=f", result); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Extensions/KernelAIPluginExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Extensions/KernelAIPluginExtensionsTests.cs deleted file mode 100644 index d8d34f1f1f51..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Extensions/KernelAIPluginExtensionsTests.cs +++ /dev/null @@ -1,275 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Net.Mime; -using System.Text; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Functions.OpenAPI.OpenApi; -using Microsoft.SemanticKernel.Orchestration; -using SemanticKernel.Functions.UnitTests.OpenAPI.TestPlugins; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Extensions; - -public sealed class KernelAIPluginExtensionsTests : IDisposable -{ - /// - /// System under test - an instance of OpenApiDocumentParser class. - /// - private readonly OpenApiDocumentParser _sut; - - /// - /// OpenAPI document stream. - /// - private readonly Stream _openApiDocument; - - /// - /// IKernel instance. - /// - private readonly IKernel _kernel; - - /// - /// Creates an instance of a class. - /// - public KernelAIPluginExtensionsTests() - { - this._kernel = KernelBuilder.Create(); - - this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV2_0.json"); - - this._sut = new OpenApiDocumentParser(); - } - - [Fact] - public async Task ItCanIncludeOpenApiOperationParameterTypesIntoFunctionParametersViewAsync() - { - //Act - var plugin = await this._kernel.ImportPluginFunctionsAsync("fakePlugin", this._openApiDocument); - - //Assert - var setSecretFunction = plugin["SetSecret"]; - Assert.NotNull(setSecretFunction); - - var functionView = setSecretFunction.Describe(); - Assert.NotNull(functionView); - - var secretNameParameter = functionView.Parameters.First(p => p.Name == "secret_name"); - Assert.Equal(ParameterViewType.String, secretNameParameter.Type); - - var apiVersionParameter = functionView.Parameters.First(p => p.Name == "api_version"); - Assert.Equal("string", apiVersionParameter?.Type?.ToString()); - - var payloadParameter = functionView.Parameters.First(p => p.Name == "payload"); - Assert.Equal(ParameterViewType.Object, payloadParameter.Type); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task ItUsesServerUrlOverrideIfProvidedAsync(bool removeServersProperty) - { - // Arrange - const string DocumentUri = "http://localhost:3001/openapi.json"; - const string ServerUrlOverride = "https://server-override.com/"; - - var openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV3_0.json"); - - if (removeServersProperty) - { - openApiDocument = OpenApiTestHelper.ModifyOpenApiDocument(openApiDocument, (doc) => - { - doc.Remove("servers"); - }); - } - - using var messageHandlerStub = new HttpMessageHandlerStub(openApiDocument); - using var httpClient = new HttpClient(messageHandlerStub, false); - - var executionParameters = new OpenApiFunctionExecutionParameters { HttpClient = httpClient, ServerUrlOverride = new Uri(ServerUrlOverride) }; - var variables = this.GetFakeContextVariables(); - - // Act - var plugin = await this._kernel.ImportPluginFunctionsAsync("fakePlugin", new Uri(DocumentUri), executionParameters); - var setSecretFunction = plugin["SetSecret"]; - - messageHandlerStub.ResetResponse(); - - var result = await this._kernel.RunAsync(setSecretFunction, variables); - - // Assert - Assert.NotNull(setSecretFunction); - - var functionView = setSecretFunction.Describe(); - Assert.NotNull(functionView); - - var serverUrlParameter = functionView.Parameters.First(p => p.Name == "server_url"); - Assert.Equal(ServerUrlOverride, serverUrlParameter.DefaultValue); - - Assert.NotNull(messageHandlerStub.RequestUri); - Assert.StartsWith(ServerUrlOverride, messageHandlerStub.RequestUri.AbsoluteUri, StringComparison.Ordinal); - } - - [Theory] - [InlineData("documentV2_0.json")] - [InlineData("documentV3_0.json")] - public async Task ItUsesServerUrlFromOpenApiDocumentAsync(string documentFileName) - { - // Arrange - const string DocumentUri = "http://localhost:3001/openapi.json"; - const string ServerUrlFromDocument = "https://my-key-vault.vault.azure.net/"; - - var openApiDocument = ResourcePluginsProvider.LoadFromResource(documentFileName); - - using var messageHandlerStub = new HttpMessageHandlerStub(openApiDocument); - using var httpClient = new HttpClient(messageHandlerStub, false); - - var executionParameters = new OpenApiFunctionExecutionParameters { HttpClient = httpClient }; - var variables = this.GetFakeContextVariables(); - - // Act - var plugin = await this._kernel.ImportPluginFunctionsAsync("fakePlugin", new Uri(DocumentUri), executionParameters); - var setSecretFunction = plugin["SetSecret"]; - - messageHandlerStub.ResetResponse(); - - var result = await this._kernel.RunAsync(setSecretFunction, variables); - - // Assert - Assert.NotNull(setSecretFunction); - - var functionView = setSecretFunction.Describe(); - Assert.NotNull(functionView); - - var serverUrlParameter = functionView.Parameters.First(p => p.Name == "server_url"); - Assert.Equal(ServerUrlFromDocument, serverUrlParameter.DefaultValue); - - Assert.NotNull(messageHandlerStub.RequestUri); - Assert.StartsWith(ServerUrlFromDocument, messageHandlerStub.RequestUri.AbsoluteUri, StringComparison.Ordinal); - } - - [Theory] - [InlineData("http://localhost:3001/openapi.json", "http://localhost:3001/", "documentV2_0.json")] - [InlineData("http://localhost:3001/openapi.json", "http://localhost:3001/", "documentV3_0.json")] - [InlineData("https://api.example.com/openapi.json", "https://api.example.com/", "documentV2_0.json")] - [InlineData("https://api.example.com/openapi.json", "https://api.example.com/", "documentV3_0.json")] - [SuppressMessage("Design", "CA1054:URI-like parameters should not be strings", Justification = "Required for test data.")] - public async Task ItUsesOpenApiDocumentHostUrlWhenServerUrlIsNotProvidedAsync(string documentUri, string expectedServerUrl, string documentFileName) - { - // Arrange - var openApiDocument = ResourcePluginsProvider.LoadFromResource(documentFileName); - - using var content = OpenApiTestHelper.ModifyOpenApiDocument(openApiDocument, (doc) => - { - doc.Remove("servers"); - doc.Remove("host"); - doc.Remove("schemes"); - }); - - using var messageHandlerStub = new HttpMessageHandlerStub(content); - using var httpClient = new HttpClient(messageHandlerStub, false); - - var executionParameters = new OpenApiFunctionExecutionParameters { HttpClient = httpClient }; - var variables = this.GetFakeContextVariables(); - - // Act - var plugin = await this._kernel.ImportPluginFunctionsAsync("fakePlugin", new Uri(documentUri), executionParameters); - var setSecretFunction = plugin["SetSecret"]; - - messageHandlerStub.ResetResponse(); - - var result = await this._kernel.RunAsync(setSecretFunction, variables); - - // Assert - Assert.NotNull(setSecretFunction); - - var functionView = setSecretFunction.Describe(); - Assert.NotNull(functionView); - - var serverUrlParameter = functionView.Parameters.First(p => p.Name == "server_url"); - Assert.Equal(expectedServerUrl, serverUrlParameter.DefaultValue); - - Assert.NotNull(messageHandlerStub.RequestUri); - Assert.StartsWith(expectedServerUrl, messageHandlerStub.RequestUri.AbsoluteUri, StringComparison.Ordinal); - } - - [Fact] - public async Task ItShouldConvertPluginComplexResponseToStringToSaveItInContextAsync() - { - //Arrange - using var messageHandlerStub = new HttpMessageHandlerStub(); - messageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - using var httpClient = new HttpClient(messageHandlerStub, false); - - var executionParameters = new OpenApiFunctionExecutionParameters(); - executionParameters.HttpClient = httpClient; - - var fakePlugin = new FakePlugin(); - - var openApiPlugins = await this._kernel.ImportPluginFunctionsAsync("fakePlugin", this._openApiDocument, executionParameters); - var fakePlugins = this._kernel.ImportFunctions(fakePlugin); - - var kernel = KernelBuilder.Create(); - - var arguments = new ContextVariables(); - arguments.Add("secret-name", "fake-secret-name"); - arguments.Add("api-version", "fake-api-version"); - - //Act - var res = await kernel.RunAsync(arguments, openApiPlugins["GetSecret"], fakePlugins["DoFakeAction"]); - - //Assert - Assert.NotNull(res); - - var openApiPluginResult = res.FunctionResults.FirstOrDefault(); - Assert.NotNull(openApiPluginResult); - - var result = openApiPluginResult.GetValue(); - - //Check original response - Assert.NotNull(result); - Assert.Equal("fake-content", result.Content); - - //Check the response, converted to a string indirectly through an argument passed to a fake plugin that follows the OpenApi plugin in the pipeline since there's no direct access to the context. - Assert.Equal("fake-content", fakePlugin.ParameterValueFakeMethodCalledWith); - } - - public void Dispose() - { - this._openApiDocument.Dispose(); - } - - #region private ================================================================================ - - private ContextVariables GetFakeContextVariables() - { - var variables = new ContextVariables(); - - variables["secret-name"] = "fake-secret-name"; - variables["api-version"] = "fake-api-version"; - variables["X-API-Version"] = "fake-api-version"; - variables["payload"] = "fake-payload"; - - return variables; - } - - private sealed class FakePlugin - { - public string? ParameterValueFakeMethodCalledWith { get; private set; } - - [SKFunction] - public void DoFakeAction(string parameter) - { - this.ParameterValueFakeMethodCalledWith = parameter; - } - } - - #endregion -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Extensions/RestApiOperationExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Extensions/RestApiOperationExtensionsTests.cs deleted file mode 100644 index f17b2c4ccfbb..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Extensions/RestApiOperationExtensionsTests.cs +++ /dev/null @@ -1,372 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Extensions; -public class RestApiOperationExtensionsTests -{ - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - [InlineData("GET")] - public void ItShouldAddServerUrlParameterWithDefaultValueFromOperation(string method) - { - //Arrange - var payload = CreateTestJsonPayload(); - - var operation = CreateTestOperation(method, payload, new Uri("https://fake-random-test-host")); - - //Act - var parameters = operation.GetParameters(); - - //Assert - Assert.NotNull(parameters); - - var serverUrl = parameters.FirstOrDefault(p => p.Name == "server-url"); - Assert.NotNull(serverUrl); - Assert.Equal("string", serverUrl.Type); - Assert.False(serverUrl.IsRequired); - Assert.Equal("https://fake-random-test-host/", serverUrl.DefaultValue); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - [InlineData("GET")] - public void ItShouldAddServerUrlParameterWithDefaultValueFromOverrideParameter(string method) - { - //Arrange - var payload = CreateTestJsonPayload(); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(serverUrlOverride: new Uri("https://fake-random-test-host")); - - //Assert - Assert.NotNull(parameters); - - var serverUrl = parameters.FirstOrDefault(p => p.Name == "server-url"); - Assert.NotNull(serverUrl); - Assert.Equal("string", serverUrl.Type); - Assert.False(serverUrl.IsRequired); - Assert.Equal("https://fake-random-test-host/", serverUrl.DefaultValue); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldAddPayloadAndContentTypeParametersByDefault(string method) - { - //Arrange - var payload = CreateTestJsonPayload(); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(); - - //Assert - Assert.NotNull(parameters); - - var payloadParam = parameters.FirstOrDefault(p => p.Name == "payload"); - Assert.NotNull(payloadParam); - Assert.Equal("object", payloadParam.Type); - Assert.True(payloadParam.IsRequired); - Assert.Equal("REST API request body.", payloadParam.Description); - - var contentTypeParam = parameters.FirstOrDefault(p => p.Name == "content-type"); - Assert.NotNull(contentTypeParam); - Assert.Equal("string", contentTypeParam.Type); - Assert.False(contentTypeParam.IsRequired); - Assert.Equal("Content type of REST API request body.", contentTypeParam.Description); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldAddPayloadAndContentTypeParametersWhenSpecified(string method) - { - //Arrange - var payload = CreateTestJsonPayload(); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(addPayloadParamsFromMetadata: false); - - //Assert - Assert.NotNull(parameters); - - var payloadProp = parameters.FirstOrDefault(p => p.Name == "payload"); - Assert.NotNull(payloadProp); - Assert.Equal("object", payloadProp.Type); - Assert.True(payloadProp.IsRequired); - Assert.Equal("REST API request body.", payloadProp.Description); - - var contentTypeProp = parameters.FirstOrDefault(p => p.Name == "content-type"); - Assert.NotNull(contentTypeProp); - Assert.Equal("string", contentTypeProp.Type); - Assert.False(contentTypeProp.IsRequired); - Assert.Equal("Content type of REST API request body.", contentTypeProp.Description); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldAddPayloadAndContentTypePropertiesForPlainTextContentType(string method) - { - //Arrange - var payload = CreateTestTextPayload(); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(); - - //Assert - Assert.NotNull(parameters); - - var payloadParam = parameters.FirstOrDefault(p => p.Name == "payload"); - Assert.NotNull(payloadParam); - Assert.Equal("string", payloadParam.Type); - Assert.True(payloadParam.IsRequired); - Assert.Equal("REST API request body.", payloadParam.Description); - - var contentTypeParam = parameters.FirstOrDefault(p => p.Name == "content-type"); - Assert.NotNull(contentTypeParam); - Assert.Equal("string", contentTypeParam.Type); - Assert.False(contentTypeParam.IsRequired); - Assert.Equal("Content type of REST API request body.", contentTypeParam.Description); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldAddPayloadAndContentTypePropertiesIfParametersFromPayloadMetadataAreNotRequired(string method) - { - //Arrange - var payload = CreateTestJsonPayload(); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(addPayloadParamsFromMetadata: false); - - //Assert - Assert.NotNull(parameters); - - var payloadParam = parameters.FirstOrDefault(p => p.Name == "payload"); - Assert.NotNull(payloadParam); - Assert.Equal("object", payloadParam.Type); - Assert.True(payloadParam.IsRequired); - Assert.Equal("REST API request body.", payloadParam.Description); - - var contentTypeParam = parameters.FirstOrDefault(p => p.Name == "content-type"); - Assert.NotNull(contentTypeParam); - Assert.Equal("string", contentTypeParam.Type); - Assert.False(contentTypeParam.IsRequired); - Assert.Equal("Content type of REST API request body.", contentTypeParam.Description); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldAddParametersDeclaredInPayloadMetadata(string method) - { - //Arrange - var payload = CreateTestJsonPayload(); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(addPayloadParamsFromMetadata: true); - - //Assert - Assert.NotNull(parameters); - - Assert.Equal(6, parameters.Count); //5(props from payload) + 1('server-url' property) - - var name = parameters.FirstOrDefault(p => p.Name == "name"); - Assert.NotNull(name); - Assert.Equal("string", name.Type); - Assert.True(name.IsRequired); - Assert.Equal("The name.", name.Description); - - var landmarks = parameters.FirstOrDefault(p => p.Name == "landmarks"); - Assert.NotNull(landmarks); - Assert.Equal("array", landmarks.Type); - Assert.False(landmarks.IsRequired); - Assert.Equal("The landmarks.", landmarks.Description); - - var leader = parameters.FirstOrDefault(p => p.Name == "leader"); - Assert.NotNull(leader); - Assert.Equal("string", leader.Type); - Assert.True(leader.IsRequired); - Assert.Equal("The leader.", leader.Description); - - var population = parameters.FirstOrDefault(p => p.Name == "population"); - Assert.NotNull(population); - Assert.Equal("integer", population.Type); - Assert.True(population.IsRequired); - Assert.Equal("The population.", population.Description); - - var hasMagicWards = parameters.FirstOrDefault(p => p.Name == "hasMagicWards"); - Assert.NotNull(hasMagicWards); - Assert.Equal("boolean", hasMagicWards.Type); - Assert.False(hasMagicWards.IsRequired); - Assert.Null(hasMagicWards.Description); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldAddNamespaceToParametersDeclaredInPayloadMetadata(string method) - { - //Arrange - var payload = CreateTestJsonPayload(); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(addPayloadParamsFromMetadata: true, enablePayloadNamespacing: true); - - //Assert - Assert.NotNull(parameters); - - Assert.Equal(6, parameters.Count); //5(props from payload) + 1('server-url' property) - - var name = parameters.FirstOrDefault(p => p.Name == "name"); - Assert.NotNull(name); - Assert.Equal("string", name.Type); - Assert.True(name.IsRequired); - Assert.Equal("The name.", name.Description); - - var landmarks = parameters.FirstOrDefault(p => p.Name == "location.landmarks"); - Assert.NotNull(landmarks); - Assert.Equal("array", landmarks.Type); - Assert.False(landmarks.IsRequired); - Assert.Equal("The landmarks.", landmarks.Description); - - var leader = parameters.FirstOrDefault(p => p.Name == "rulingCouncil.leader"); - Assert.NotNull(leader); - Assert.Equal("string", leader.Type); - Assert.True(leader.IsRequired); - Assert.Equal("The leader.", leader.Description); - - var population = parameters.FirstOrDefault(p => p.Name == "population"); - Assert.NotNull(population); - Assert.Equal("integer", population.Type); - Assert.True(population.IsRequired); - Assert.Equal("The population.", population.Description); - - var hasMagicWards = parameters.FirstOrDefault(p => p.Name == "hasMagicWards"); - Assert.NotNull(hasMagicWards); - Assert.Equal("boolean", hasMagicWards.Type); - Assert.False(hasMagicWards.IsRequired); - Assert.Null(hasMagicWards.Description); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldThrowExceptionIfPayloadMetadataDescribingParametersIsMissing(string method) - { - //Arrange - var operation = CreateTestOperation(method, null); - - //Act - Assert.Throws(() => operation.GetParameters(addPayloadParamsFromMetadata: true, enablePayloadNamespacing: true)); - } - - [Fact] - public void ItShouldSetAlternativeNameToParametersForGetOperation() - { - //Arrange - var operation = CreateTestOperation("GET"); - - //Act - var parameters = operation.GetParameters(addPayloadParamsFromMetadata: true); - - //Assert - Assert.NotNull(parameters); - - var serverUrlProp = parameters.FirstOrDefault(p => p.Name == "server-url"); - Assert.NotNull(serverUrlProp); - Assert.Equal("server_url", serverUrlProp.AlternativeName); - } - - [Theory] - [InlineData("PUT")] - [InlineData("POST")] - public void ItShouldSetAlternativeNameToParametersForPutAndPostOperation(string method) - { - //Arrange - var latitude = new RestApiOperationPayloadProperty("location.latitude", "number", false, new List()); - var place = new RestApiOperationPayloadProperty("place", "string", true, new List()); - - var payload = new RestApiOperationPayload("application/json", new[] { place, latitude }); - - var operation = CreateTestOperation(method, payload); - - //Act - var parameters = operation.GetParameters(addPayloadParamsFromMetadata: true); - - //Assert - Assert.NotNull(parameters); - - var serverUrlProp = parameters.FirstOrDefault(p => p.Name == "server-url"); - Assert.NotNull(serverUrlProp); - Assert.Equal("server_url", serverUrlProp.AlternativeName); - - var placeProp = parameters.FirstOrDefault(p => p.Name == "place"); - Assert.NotNull(placeProp); - Assert.Equal("place", placeProp.AlternativeName); - - var personNameProp = parameters.FirstOrDefault(p => p.Name == "location.latitude"); - Assert.NotNull(personNameProp); - Assert.Equal("location_latitude", personNameProp.AlternativeName); - } - - private static RestApiOperation CreateTestOperation(string method, RestApiOperationPayload? payload = null, Uri? url = null) - { - return new RestApiOperation( - id: "fake-id", - serverUrl: url, - path: "fake-path", - method: new HttpMethod(method), - description: "fake-description", - parameters: new List(), - headers: new Dictionary(), - payload: payload); - } - - private static RestApiOperationPayload CreateTestJsonPayload() - { - var name = new RestApiOperationPayloadProperty("name", "string", true, new List(), "The name."); - - var leader = new RestApiOperationPayloadProperty("leader", "string", true, new List(), "The leader."); - - var landmarks = new RestApiOperationPayloadProperty("landmarks", "array", false, new List(), "The landmarks."); - var location = new RestApiOperationPayloadProperty("location", "object", true, new[] { landmarks }, "The location."); - - var rulingCouncil = new RestApiOperationPayloadProperty("rulingCouncil", "object", true, new[] { leader }, "The ruling council."); - - var population = new RestApiOperationPayloadProperty("population", "integer", true, new List(), "The population."); - - var hasMagicWards = new RestApiOperationPayloadProperty("hasMagicWards", "boolean", false, new List()); - - return new RestApiOperationPayload("application/json", new[] { name, location, rulingCouncil, population, hasMagicWards }); - } - - private static RestApiOperationPayload CreateTestTextPayload() - { - return new RestApiOperationPayload("text/plain", new List()); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/JsonPathPluginTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/JsonPathPluginTests.cs deleted file mode 100644 index 6e8993c6fb20..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/JsonPathPluginTests.cs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel.Functions.OpenAPI; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI; - -public class JsonPathPluginTests -{ - private const string Json = @"{ - 'Stores': [ - 'Lambton Quay', - 'Willis Street' - ], - 'Manufacturers': [ - { - 'Name': 'Acme Co', - 'Products': [ - { - 'Name': 'Anvil', - 'Price': 50 - } - ] - }, - { - 'Name': 'Contoso', - 'Products': [ - { - 'Name': 'Elbow Grease', - 'Price': 99.95 - }, - { - 'Name': 'Headlight Fluid', - 'Price': 4 - } - ] - } - ] -}"; - - [Theory] - [InlineData("$.Manufacturers[0].Products[0].Name", "Anvil")] // single value - [InlineData("$.Manufacturers[0].Products[0].Foo", "")] // no value - public void GetJsonElementValueSucceeds(string jsonPath, string expected) - { - var target = new JsonPathPlugin(); - - string actual = target.GetJsonElementValue(Json, jsonPath); - - Assert.Equal(expected, actual, StringComparer.OrdinalIgnoreCase); - } - - [Theory] - [InlineData("$..Products[?(@.Price >= 50)].Name", "[\"Anvil\",\"Elbow Grease\"]")] // multiple values - [InlineData("$.Manufacturers", - "[[{\"Name\":\"Acme Co\",\"Products\":[{\"Name\":\"Anvil\",\"Price\":50}]},{\"Name\":\"Contoso\",\"Products\":[{\"Name\":\"Elbow Grease\",\"Price\":99.95},{\"Name\":\"Headlight Fluid\",\"Price\":4}]}]]")] // complex value - public void GetJsonPropertyValueSucceeds(string jsonPath, string expected) - { - var target = new JsonPathPlugin(); - - string actual = target.GetJsonElements(Json, jsonPath); - - Assert.Equal(expected, actual, StringComparer.OrdinalIgnoreCase); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV20Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV20Tests.cs deleted file mode 100644 index 47864d773a18..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV20Tests.cs +++ /dev/null @@ -1,276 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Functions.OpenAPI.OpenApi; -using SemanticKernel.Functions.UnitTests.OpenAPI.TestPlugins; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI; - -public sealed class OpenApiDocumentParserV20Tests : IDisposable -{ - /// - /// System under test - an instance of OpenApiDocumentParser class. - /// - private readonly OpenApiDocumentParser _sut; - - /// - /// OpenAPI document stream. - /// - private readonly Stream _openApiDocument; - - /// - /// Creates an instance of a class. - /// - public OpenApiDocumentParserV20Tests() - { - this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV2_0.json"); - - this._sut = new OpenApiDocumentParser(); - } - - [Fact] - public async Task ItCanParsePutOperationBodySuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var putOperation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(putOperation); - - var payload = putOperation.Payload; - Assert.NotNull(payload); - Assert.Equal("application/json", payload.MediaType); - - var properties = payload.Properties; - Assert.NotNull(properties); - Assert.Equal(2, properties.Count); - - var valueProperty = properties.FirstOrDefault(p => p.Name == "value"); - Assert.NotNull(valueProperty); - Assert.True(valueProperty.IsRequired); - Assert.Equal("The value of the secret.", valueProperty.Description); - Assert.Equal("string", valueProperty.Type); - Assert.NotNull(valueProperty.Properties); - Assert.False(valueProperty.Properties.Any()); - - var attributesProperty = properties.FirstOrDefault(p => p.Name == "attributes"); - Assert.NotNull(attributesProperty); - Assert.False(attributesProperty.IsRequired); - Assert.Equal("attributes", attributesProperty.Description); - Assert.Equal("object", attributesProperty.Type); - Assert.NotNull(attributesProperty.Properties); - Assert.True(attributesProperty.Properties.Any()); - - var enabledProperty = attributesProperty.Properties.FirstOrDefault(p => p.Name == "enabled"); - Assert.NotNull(enabledProperty); - Assert.False(enabledProperty.IsRequired); - Assert.Equal("Determines whether the object is enabled.", enabledProperty.Description); - Assert.Equal("boolean", enabledProperty.Type); - Assert.False(enabledProperty.Properties?.Any()); - } - - [Fact] - public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var putOperation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(putOperation); - Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); - Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); - Assert.Equal(HttpMethod.Put, putOperation.Method); - Assert.Equal("/secrets/{secret-name}", putOperation.Path); - - var parameters = putOperation.GetParameters(); - Assert.NotNull(parameters); - Assert.True(parameters.Count >= 5); - - var pathParameter = parameters.Single(p => p.Name == "secret-name"); //'secret-name' path parameter. - Assert.True(pathParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Path, pathParameter.Location); - Assert.Null(pathParameter.DefaultValue); - - var apiVersionParameter = parameters.Single(p => p.Name == "api-version"); //'api-version' query string parameter. - Assert.True(apiVersionParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Query, apiVersionParameter.Location); - Assert.Equal("7.0", apiVersionParameter.DefaultValue); - - var serverUrlParameter = parameters.Single(p => p.Name == "server-url"); //'server-url' artificial parameter. - Assert.False(serverUrlParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Path, serverUrlParameter.Location); - Assert.Equal("https://my-key-vault.vault.azure.net/", serverUrlParameter.DefaultValue); - - var payloadParameter = parameters.Single(p => p.Name == "payload"); //'payload' artificial parameter. - Assert.True(payloadParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Body, payloadParameter.Location); - Assert.Null(payloadParameter.DefaultValue); - Assert.Equal("REST API request body.", payloadParameter.Description); - - var contentTypeParameter = parameters.Single(p => p.Name == "content-type"); //'content-type' artificial parameter. - Assert.False(contentTypeParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Body, contentTypeParameter.Location); - Assert.Null(contentTypeParameter.DefaultValue); - Assert.Equal("Content type of REST API request body.", contentTypeParameter.Description); - } - - [Fact] - public async Task ItCanUseOperationSummaryAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "Excuses"); - Assert.NotNull(operation); - Assert.Equal("Turn a scenario into a creative or humorous excuse to send your boss", operation.Description); - } - - [Fact] - public async Task ItCanExtractSimpleTypeHeaderParameterMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert string header parameter metadata - var accept = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "Accept"); - - Assert.Equal("string", accept.Type); - Assert.Equal("application/json", accept.DefaultValue); - Assert.Equal("Indicates which content types, expressed as MIME types, the client is able to understand.", accept.Description); - Assert.False(accept.IsRequired); - - //Assert integer header parameter metadata - var apiVersion = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-API-Version"); - - Assert.Equal("integer", apiVersion.Type); - Assert.Equal("10", apiVersion.DefaultValue); - Assert.Equal("Requested API version.", apiVersion.Description); - Assert.True(apiVersion.IsRequired); - } - - [Fact] - public async Task ItCanExtractCsvStyleHeaderParameterMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert header parameters metadata - var acceptParameter = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-Operation-Csv-Ids"); - - Assert.Null(acceptParameter.DefaultValue); - Assert.False(acceptParameter.IsRequired); - Assert.Equal("array", acceptParameter.Type); - Assert.Equal(RestApiOperationParameterStyle.Simple, acceptParameter.Style); - Assert.Equal("The comma separated list of operation ids.", acceptParameter.Description); - Assert.Equal("string", acceptParameter.ArrayItemType); - } - - [Fact] - public async Task ItCanExtractHeadersSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(operation.Headers); - Assert.Equal(3, operation.Headers.Count); - - Assert.True(operation.Headers.ContainsKey("Accept")); - Assert.True(operation.Headers.ContainsKey("X-API-Version")); - Assert.True(operation.Headers.ContainsKey("X-Operation-Csv-Ids")); - } - - [Fact] - public async Task ItCanExtractAllPathsAsOperationsAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.Equal(3, operations.Count); - } - - [Fact] - public async Task ItCanParseOperationHavingTextPlainBodySuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "Excuses"); - Assert.NotNull(operation); - - var payload = operation.Payload; - Assert.NotNull(payload); - Assert.Equal("text/plain", payload.MediaType); - Assert.Equal("excuse event", payload.Description); - - var properties = payload.Properties; - Assert.NotNull(properties); - Assert.Empty(properties); - } - - [Fact] - public async Task ItCanWorkWithDocumentsWithoutHostAndSchemaAttributesAsync() - { - //Arrange - using var stream = OpenApiTestHelper.ModifyOpenApiDocument(this._openApiDocument, (doc) => - { - doc.Remove("host"); - doc.Remove("schemes"); - }); - - //Act - var operations = await this._sut.ParseAsync(stream); - - //Assert - Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); - } - - private static RestApiOperationParameter GetParameterMetadata(IList operations, string operationId, - RestApiOperationParameterLocation location, string name) - { - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == operationId); - Assert.NotNull(operation.Parameters); - Assert.True(operation.Parameters.Any()); - - var parameters = operation.Parameters.Where(p => p.Location == location); - - var parameter = parameters.Single(p => p.Name == name); - Assert.NotNull(parameter); - - return parameter; - } - - public void Dispose() - { - this._openApiDocument.Dispose(); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV30Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV30Tests.cs deleted file mode 100644 index cfeda81fc71f..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV30Tests.cs +++ /dev/null @@ -1,366 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Functions.OpenAPI.OpenApi; -using SemanticKernel.Functions.UnitTests.OpenAPI.TestPlugins; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI; - -public sealed class OpenApiDocumentParserV30Tests : IDisposable -{ - /// - /// System under test - an instance of OpenApiDocumentParser class. - /// - private readonly OpenApiDocumentParser _sut; - - /// - /// OpenAPI document stream. - /// - private readonly Stream _openApiDocument; - - /// - /// Creates an instance of a class. - /// - public OpenApiDocumentParserV30Tests() - { - this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV3_0.json"); - - this._sut = new OpenApiDocumentParser(); - } - - [Fact] - public async Task ItCanParsePutOperationBodySuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var putOperation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(putOperation); - - var payload = putOperation.Payload; - Assert.NotNull(payload); - Assert.Equal("application/json", payload.MediaType); - - var properties = payload.Properties; - Assert.NotNull(properties); - Assert.Equal(2, properties.Count); - - var valueProperty = properties.FirstOrDefault(p => p.Name == "value"); - Assert.NotNull(valueProperty); - Assert.True(valueProperty.IsRequired); - Assert.Equal("The value of the secret.", valueProperty.Description); - Assert.Equal("string", valueProperty.Type); - Assert.NotNull(valueProperty.Properties); - Assert.False(valueProperty.Properties.Any()); - - var attributesProperty = properties.FirstOrDefault(p => p.Name == "attributes"); - Assert.NotNull(attributesProperty); - Assert.False(attributesProperty.IsRequired); - Assert.Equal("attributes", attributesProperty.Description); - Assert.Equal("object", attributesProperty.Type); - Assert.NotNull(attributesProperty.Properties); - Assert.True(attributesProperty.Properties.Any()); - - var enabledProperty = attributesProperty.Properties.FirstOrDefault(p => p.Name == "enabled"); - Assert.NotNull(enabledProperty); - Assert.False(enabledProperty.IsRequired); - Assert.Equal("Determines whether the object is enabled.", enabledProperty.Description); - Assert.Equal("boolean", enabledProperty.Type); - Assert.False(enabledProperty.Properties?.Any()); - } - - [Fact] - public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var putOperation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(putOperation); - Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); - Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); - Assert.Equal(HttpMethod.Put, putOperation.Method); - Assert.Equal("/secrets/{secret-name}", putOperation.Path); - - var parameters = putOperation.GetParameters(); - Assert.NotNull(parameters); - Assert.True(parameters.Count >= 5); - - var pathParameter = parameters.Single(p => p.Name == "secret-name"); //'secret-name' path parameter. - Assert.True(pathParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Path, pathParameter.Location); - Assert.Null(pathParameter.DefaultValue); - - var apiVersionParameter = parameters.Single(p => p.Name == "api-version"); //'api-version' query string parameter. - Assert.True(apiVersionParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Query, apiVersionParameter.Location); - Assert.Equal("7.0", apiVersionParameter.DefaultValue); - - var serverUrlParameter = parameters.Single(p => p.Name == "server-url"); //'server-url' artificial parameter. - Assert.False(serverUrlParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Path, serverUrlParameter.Location); - Assert.Equal("https://my-key-vault.vault.azure.net/", serverUrlParameter.DefaultValue); - - var payloadParameter = parameters.Single(p => p.Name == "payload"); //'payload' artificial parameter. - Assert.True(payloadParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Body, payloadParameter.Location); - Assert.Null(payloadParameter.DefaultValue); - Assert.Equal("REST API request body.", payloadParameter.Description); - - var contentTypeParameter = parameters.Single(p => p.Name == "content-type"); //'content-type' artificial parameter. - Assert.False(contentTypeParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Body, contentTypeParameter.Location); - Assert.Null(contentTypeParameter.DefaultValue); - Assert.Equal("Content type of REST API request body.", contentTypeParameter.Description); - } - - [Fact] - public async Task ItCanExtractSimpleTypeHeaderParameterMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert string header parameter metadata - var accept = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "Accept"); - - Assert.Equal("string", accept.Type); - Assert.Equal("application/json", accept.DefaultValue); - Assert.Equal("Indicates which content types, expressed as MIME types, the client is able to understand.", accept.Description); - Assert.False(accept.IsRequired); - - //Assert integer header parameter metadata - var apiVersion = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-API-Version"); - - Assert.Equal("integer", apiVersion.Type); - Assert.Equal("10", apiVersion.DefaultValue); - Assert.Equal("Requested API version.", apiVersion.Description); - Assert.True(apiVersion.IsRequired); - } - - [Fact] - public async Task ItCanUseOperationSummaryAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "Excuses"); - Assert.NotNull(operation); - Assert.Equal("Turn a scenario into a creative or humorous excuse to send your boss", operation.Description); - } - - [Fact] - public async Task ItCanExtractCsvStyleHeaderParameterMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert header parameters metadata - var acceptParameter = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-Operation-Csv-Ids"); - - Assert.Null(acceptParameter.DefaultValue); - Assert.False(acceptParameter.IsRequired); - Assert.Equal("array", acceptParameter.Type); - Assert.Equal(RestApiOperationParameterStyle.Simple, acceptParameter.Style); - Assert.Equal("The comma separated list of operation ids.", acceptParameter.Description); - Assert.Equal("string", acceptParameter.ArrayItemType); - } - - [Fact] - public async Task ItCanExtractHeadersSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(operation.Headers); - Assert.Equal(3, operation.Headers.Count); - - Assert.True(operation.Headers.ContainsKey("Accept")); - Assert.True(operation.Headers.ContainsKey("X-API-Version")); - Assert.True(operation.Headers.ContainsKey("X-Operation-Csv-Ids")); - } - - [Fact] - public async Task ItCanExtractAllPathsAsOperationsAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.Equal(3, operations.Count); - } - - [Fact] - public async Task ItCanParseOperationHavingTextPlainBodySuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "Excuses"); - Assert.NotNull(operation); - - var payload = operation.Payload; - Assert.NotNull(payload); - Assert.Equal("text/plain", payload.MediaType); - Assert.Equal("excuse event", payload.Description); - - var properties = payload.Properties; - Assert.NotNull(properties); - Assert.Empty(properties); - } - - [Fact] - public async Task ItShouldThrowExceptionForNonCompliantDocumentAsync() - { - // Arrange - var nonComplaintOpenApiDocument = ResourcePluginsProvider.LoadFromResource("nonCompliant_documentV3_0.json"); - - // Act and Assert - await Assert.ThrowsAsync(async () => await this._sut.ParseAsync(nonComplaintOpenApiDocument)); - } - - [Fact] - public async Task ItShouldWorkWithNonCompliantDocumentIfAllowedAsync() - { - // Arrange - var nonComplaintOpenApiDocument = ResourcePluginsProvider.LoadFromResource("nonCompliant_documentV3_0.json"); - - // Act - await this._sut.ParseAsync(nonComplaintOpenApiDocument, ignoreNonCompliantErrors: true); - - // Assert - // The absence of any thrown exceptions serves as evidence of the functionality's success. - } - - [Fact] - public async Task ItCanWorkWithDocumentsWithoutServersAttributeAsync() - { - //Arrange - using var stream = ModifyOpenApiDocument(this._openApiDocument, (doc) => - { - doc.Remove("servers"); - }); - - //Act - var operations = await this._sut.ParseAsync(stream); - - //Assert - Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); - } - - [Fact] - public async Task ItCanWorkWithDocumentsWithEmptyServersAttributeAsync() - { - //Arrange - using var stream = ModifyOpenApiDocument(this._openApiDocument, (doc) => - { - doc["servers"] = new JsonArray(); - }); - - //Act - var operations = await this._sut.ParseAsync(stream); - - //Assert - Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); - } - - [Theory] - [InlineData("explodeFormParam")] - [InlineData("anotherExplodeFormParam")] - public async Task ItShouldSupportsAmpersandSeparatedParametersForFormStyleArrayQueryStringParametersAsync(string parameterName) - { - //Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "GetSecret"); - - var explodeFormParam = operation.Parameters.Single(p => p.Name == parameterName); - - Assert.True(explodeFormParam.Expand); - } - - [Fact] - public async Task ItShouldSupportsCommaSeparatedValuesForFormStyleArrayQueryStringParametersAsync() - { - //Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "GetSecret"); - - var explodeFormParam = operation.Parameters.Single(p => p.Name == "nonExplodeFormParam"); - - Assert.False(explodeFormParam.Expand); - } - - private static MemoryStream ModifyOpenApiDocument(Stream openApiDocument, Action transformer) - { - var json = JsonSerializer.Deserialize(openApiDocument); - - transformer(json!); - - var stream = new MemoryStream(); - - JsonSerializer.Serialize(stream, json); - - stream.Seek(0, SeekOrigin.Begin); - - return stream; - } - - private static RestApiOperationParameter GetParameterMetadata(IList operations, string operationId, - RestApiOperationParameterLocation location, string name) - { - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == operationId); - Assert.NotNull(operation.Parameters); - Assert.True(operation.Parameters.Any()); - - var parameters = operation.Parameters.Where(p => p.Location == location); - - var parameter = parameters.Single(p => p.Name == name); - Assert.NotNull(parameter); - - return parameter; - } - - public void Dispose() - { - this._openApiDocument.Dispose(); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV31Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV31Tests.cs deleted file mode 100644 index 4bb447a46e80..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiDocumentParserV31Tests.cs +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Dynamic; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Microsoft.SemanticKernel.Functions.OpenAPI.OpenApi; -using SemanticKernel.Functions.UnitTests.OpenAPI.TestPlugins; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI; - -public sealed class OpenApiDocumentParserV31Tests : IDisposable -{ - /// - /// System under test - an instance of OpenApiDocumentParser class. - /// - private readonly OpenApiDocumentParser _sut; - - /// - /// OpenAPI document stream. - /// - private readonly Stream _openApiDocument; - - /// - /// Creates an instance of a class. - /// - public OpenApiDocumentParserV31Tests() - { - this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV3_1.yaml"); - - this._sut = new OpenApiDocumentParser(); - } - - [Fact] - public async Task ItCanParsePutOperationBodySuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var putOperation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(putOperation); - - var payload = putOperation.Payload; - Assert.NotNull(payload); - Assert.Equal("application/json", payload.MediaType); - - var properties = payload.Properties; - Assert.NotNull(properties); - Assert.Equal(2, properties.Count); - - var valueProperty = properties.FirstOrDefault(p => p.Name == "value"); - Assert.NotNull(valueProperty); - Assert.True(valueProperty.IsRequired); - Assert.Equal("The value of the secret.", valueProperty.Description); - Assert.Equal("string", valueProperty.Type); - Assert.NotNull(valueProperty.Properties); - Assert.False(valueProperty.Properties.Any()); - - var attributesProperty = properties.FirstOrDefault(p => p.Name == "attributes"); - Assert.NotNull(attributesProperty); - Assert.False(attributesProperty.IsRequired); - Assert.Equal("attributes", attributesProperty.Description); - Assert.Equal("object", attributesProperty.Type); - Assert.NotNull(attributesProperty.Properties); - Assert.True(attributesProperty.Properties.Any()); - - var enabledProperty = attributesProperty.Properties.FirstOrDefault(p => p.Name == "enabled"); - Assert.NotNull(enabledProperty); - Assert.False(enabledProperty.IsRequired); - Assert.Equal("Determines whether the object is enabled.", enabledProperty.Description); - Assert.Equal("boolean", enabledProperty.Type); - Assert.False(enabledProperty.Properties?.Any()); - } - - [Fact] - public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var putOperation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(putOperation); - Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); - Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); - Assert.Equal(HttpMethod.Put, putOperation.Method); - Assert.Equal("/secrets/{secret-name}", putOperation.Path); - - var parameters = putOperation.GetParameters(); - Assert.NotNull(parameters); - Assert.True(parameters.Count >= 5); - - var pathParameter = parameters.Single(p => p.Name == "secret-name"); //'secret-name' path parameter. - Assert.True(pathParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Path, pathParameter.Location); - Assert.Null(pathParameter.DefaultValue); - - var apiVersionParameter = parameters.Single(p => p.Name == "api-version"); //'api-version' query string parameter. - Assert.True(apiVersionParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Query, apiVersionParameter.Location); - Assert.Equal("7.0", apiVersionParameter.DefaultValue); - - var serverUrlParameter = parameters.Single(p => p.Name == "server-url"); //'server-url' artificial parameter. - Assert.False(serverUrlParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Path, serverUrlParameter.Location); - Assert.Equal("https://my-key-vault.vault.azure.net/", serverUrlParameter.DefaultValue); - - var payloadParameter = parameters.Single(p => p.Name == "payload"); //'payload' artificial parameter. - Assert.True(payloadParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Body, payloadParameter.Location); - Assert.Null(payloadParameter.DefaultValue); - Assert.Equal("REST API request body.", payloadParameter.Description); - - var contentTypeParameter = parameters.Single(p => p.Name == "content-type"); //'content-type' artificial parameter. - Assert.False(contentTypeParameter.IsRequired); - Assert.Equal(RestApiOperationParameterLocation.Body, contentTypeParameter.Location); - Assert.Null(contentTypeParameter.DefaultValue); - Assert.Equal("Content type of REST API request body.", contentTypeParameter.Description); - } - - [Fact] - public async Task ItCanUseOperationSummaryAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "Excuses"); - Assert.NotNull(operation); - Assert.Equal("Turn a scenario into a creative or humorous excuse to send your boss", operation.Description); - } - - [Fact] - public async Task ItCanExtractSimpleTypeHeaderParameterMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert string header parameter metadata - var accept = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "Accept"); - - Assert.Equal("string", accept.Type); - Assert.Equal("application/json", accept.DefaultValue); - Assert.Equal("Indicates which content types, expressed as MIME types, the client is able to understand.", accept.Description); - Assert.False(accept.IsRequired); - - //Assert integer header parameter metadata - var apiVersion = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-API-Version"); - - Assert.Equal("integer", apiVersion.Type); - Assert.Equal("10", apiVersion.DefaultValue); - Assert.Equal("Requested API version.", apiVersion.Description); - Assert.True(apiVersion.IsRequired); - } - - [Fact] - public async Task ItCanExtractCsvStyleHeaderParameterMetadataSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert header parameters metadata - var acceptParameter = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-Operation-Csv-Ids"); - - Assert.Null(acceptParameter.DefaultValue); - Assert.False(acceptParameter.IsRequired); - Assert.Equal("array", acceptParameter.Type); - Assert.Equal(RestApiOperationParameterStyle.Simple, acceptParameter.Style); - Assert.Equal("The comma separated list of operation ids.", acceptParameter.Description); - Assert.Equal("string", acceptParameter.ArrayItemType); - } - - [Fact] - public async Task ItCanExtractHeadersSuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "SetSecret"); - Assert.NotNull(operation.Headers); - Assert.Equal(3, operation.Headers.Count); - - Assert.True(operation.Headers.ContainsKey("Accept")); - Assert.True(operation.Headers.ContainsKey("X-API-Version")); - Assert.True(operation.Headers.ContainsKey("X-Operation-Csv-Ids")); - } - - [Fact] - public async Task ItCanExtractAllPathsAsOperationsAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.Equal(3, operations.Count); - } - - [Fact] - public async Task ItCanParseOperationHavingTextPlainBodySuccessfullyAsync() - { - // Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - // Assert - Assert.NotNull(operations); - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "Excuses"); - Assert.NotNull(operation); - - var payload = operation.Payload; - Assert.NotNull(payload); - Assert.Equal("text/plain", payload.MediaType); - Assert.Equal("excuse event", payload.Description); - - var properties = payload.Properties; - Assert.NotNull(properties); - Assert.Empty(properties); - } - - [Fact] - public async Task ItCanWorkWithDocumentsWithoutServersAttributeAsync() - { - //Arrange - using var stream = ModifyOpenApiDocument(this._openApiDocument, (yaml) => - { - yaml.Remove("servers"); - }); - - //Act - var operations = await this._sut.ParseAsync(stream); - - //Assert - Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); - } - - [Fact] - public async Task ItCanWorkWithDocumentsWithEmptyServersAttributeAsync() - { - //Arrange - using var stream = ModifyOpenApiDocument(this._openApiDocument, (yaml) => - { - yaml["servers"] = Array.Empty(); - }); - - //Act - var operations = await this._sut.ParseAsync(stream); - - //Assert - Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); - } - - [Theory] - [InlineData("explodeFormParam")] - [InlineData("anotherExplodeFormParam")] - public async Task ItShouldSupportsAmpersandSeparatedParametersForFormStyleArrayQueryStringParametersAsync(string parameterName) - { - //Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "GetSecret"); - - var explodeFormParam = operation.Parameters.Single(p => p.Name == parameterName); - - Assert.True(explodeFormParam.Expand); - } - - [Fact] - public async Task ItShouldSupportsCommaSeparatedValuesForFormStyleArrayQueryStringParametersAsync() - { - //Act - var operations = await this._sut.ParseAsync(this._openApiDocument); - - //Assert - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == "GetSecret"); - - var explodeFormParam = operation.Parameters.Single(p => p.Name == "nonExplodeFormParam"); - - Assert.False(explodeFormParam.Expand); - } - - private static MemoryStream ModifyOpenApiDocument(Stream openApiDocument, Action> transformer) - { - var serializer = new SharpYaml.Serialization.Serializer(); - - //Deserialize yaml - var yaml = serializer.Deserialize(openApiDocument); - - //Modify yaml - transformer(yaml!); - - //Serialize yaml - var stream = new MemoryStream(); - - serializer.Serialize(stream, yaml); - - stream.Seek(0, SeekOrigin.Begin); - - return stream; - } - - private static RestApiOperationParameter GetParameterMetadata(IList operations, string operationId, RestApiOperationParameterLocation location, string name) - { - Assert.True(operations.Any()); - - var operation = operations.Single(o => o.Id == operationId); - Assert.NotNull(operation.Parameters); - Assert.True(operation.Parameters.Any()); - - var parameters = operation.Parameters.Where(p => p.Location == location); - - var parameter = parameters.Single(p => p.Name == name); - Assert.NotNull(parameter); - - return parameter; - } - - public void Dispose() - { - this._openApiDocument.Dispose(); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationRunnerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationRunnerTests.cs deleted file mode 100644 index 4c42db734667..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationRunnerTests.cs +++ /dev/null @@ -1,1055 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Net.Mime; -using System.Text; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI; -using Microsoft.SemanticKernel.Functions.OpenAPI.Authentication; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Moq; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI; - -public sealed class RestApiOperationRunnerTests : IDisposable -{ - /// - /// A mock instance of the authentication callback. - /// - private readonly Mock _authenticationHandlerMock; - - /// - /// An instance of HttpMessageHandlerStub class used to get access to various properties of HttpRequestMessage sent by HTTP client. - /// - private readonly HttpMessageHandlerStub _httpMessageHandlerStub; - - /// - /// An instance of HttpClient class used by the tests. - /// - private readonly HttpClient _httpClient; - - /// - /// Creates an instance of a class. - /// - public RestApiOperationRunnerTests() - { - this._authenticationHandlerMock = new Mock(); - - this._httpMessageHandlerStub = new HttpMessageHandlerStub(); - - this._httpClient = new HttpClient(this._httpMessageHandlerStub); - } - - [Fact] - public async Task ItCanRunCreateAndUpdateOperationsWithJsonPayloadSuccessfullyAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var payload = new - { - value = "fake-value", - attributes = new - { - enabled = true - } - }; - - var arguments = new Dictionary - { - { "payload", JsonSerializer.Serialize(payload) }, - { "content-type", "application/json" } - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.RequestUri); - Assert.Equal("https://fake-random-test-host/fake-path", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); - - Assert.Equal(HttpMethod.Post, this._httpMessageHandlerStub.Method); - - Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); - Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("application/json; charset=utf-8")); - - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var deserializedPayload = JsonNode.Parse(new MemoryStream(messageContent)); - Assert.NotNull(deserializedPayload); - - var valueProperty = deserializedPayload["value"]?.ToString(); - Assert.Equal("fake-value", valueProperty); - - var attributesProperty = deserializedPayload["attributes"]; - Assert.NotNull(attributesProperty); - - var enabledProperty = attributesProperty["enabled"]?.AsValue(); - Assert.NotNull(enabledProperty); - Assert.Equal("true", enabledProperty.ToString()); - - Assert.NotNull(result); - - Assert.Equal("fake-content", result.Content); - - Assert.Equal("application/json; charset=utf-8", result.ContentType); - - this._authenticationHandlerMock.Verify(x => x(It.IsAny()), Times.Once); - } - - [Fact] - public async Task ItCanRunCreateAndUpdateOperationsWithPlainTextPayloadSuccessfullyAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Text.Plain); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "payload", "fake-input-value" }, - { "content-type", "text/plain"} - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.RequestUri); - Assert.Equal("https://fake-random-test-host/fake-path", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); - - Assert.Equal(HttpMethod.Post, this._httpMessageHandlerStub.Method); - - Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); - Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("text/plain; charset=utf-8")); - - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var payloadText = Encoding.UTF8.GetString(messageContent, 0, messageContent.Length); - Assert.Equal("fake-input-value", payloadText); - - Assert.NotNull(result); - - Assert.Equal("fake-content", result.Content); - - Assert.Equal("text/plain; charset=utf-8", result.ContentType); - - this._authenticationHandlerMock.Verify(x => x(It.IsAny()), Times.Once); - } - - [Fact] - public async Task ItShouldAddHeadersToHttpRequestAsync() - { - // Arrange - var headers = new Dictionary - { - { "fake-header", string.Empty } - }; - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Get, - "fake-description", - new List(), - headers - ); - - var arguments = new Dictionary - { - { "fake-header", "fake-header-value" } - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - await sut.RunAsync(operation, arguments); - - // Assert - 2 headers: 1 from the test and the useragent added internally - Assert.NotNull(this._httpMessageHandlerStub.RequestHeaders); - Assert.Equal(2, this._httpMessageHandlerStub.RequestHeaders.Count()); - - Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "fake-header" && h.Value.Contains("fake-header-value")); - } - - [Fact] - public async Task ItShouldAddUserAgentHeaderToHttpRequestIfConfiguredAsync() - { - // Arrange - var headers = new Dictionary - { - { "fake-header", string.Empty } - }; - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Get, - "fake-description", - new List(), - headers - ); - - var arguments = new Dictionary - { - { "fake-header", "fake-header-value" } - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, "fake-user-agent"); - - // Act - await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.RequestHeaders); - Assert.Equal(2, this._httpMessageHandlerStub.RequestHeaders.Count()); - - Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "fake-header" && h.Value.Contains("fake-header-value")); - Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "User-Agent" && h.Value.Contains("fake-user-agent")); - } - - [Fact] - public async Task ItShouldBuildJsonPayloadDynamicallyAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - List payloadProperties = new() - { - new("name", "string", true, new List()), - new("attributes", "object", false, new List() - { - new("enabled", "boolean", false, new List()), - }) - }; - - var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload - ); - - var arguments = new Dictionary(); - arguments.Add("name", "fake-name-value"); - arguments.Add("enabled", "true"); - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); - Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("application/json; charset=utf-8")); - - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var deserializedPayload = JsonNode.Parse(new MemoryStream(messageContent)); - Assert.NotNull(deserializedPayload); - - var name = deserializedPayload["name"]?.ToString(); - Assert.Equal("fake-name-value", name); - - var attributes = deserializedPayload["attributes"]; - Assert.NotNull(attributes); - - var enabled = attributes["enabled"]?.ToString(); - Assert.NotNull(enabled); - Assert.Equal("true", enabled); - } - - [Fact] - public async Task ItShouldBuildJsonPayloadDynamicallyUsingPayloadMetadataDataTypesAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - List payloadProperties = new() - { - new("name", "string", true, new List()), - new("attributes", "object", false, new List() - { - new("enabled", "boolean", false, new List()), - new("cardinality", "number", false, new List()), - new("coefficient", "number", false, new List()), - new("count", "integer", false, new List()), - new("params", "array", false, new List()), - }) - }; - - var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload - ); - - var arguments = new Dictionary(); - arguments.Add("name", "fake-string-value"); - arguments.Add("enabled", "true"); - arguments.Add("cardinality", "8"); - arguments.Add("coefficient", "0.8"); - arguments.Add("count", "1"); - arguments.Add("params", "[1,2,3]"); - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - - var deserializedPayload = JsonNode.Parse(new MemoryStream(messageContent)); - Assert.NotNull(deserializedPayload); - - var name = deserializedPayload["name"]?.GetValue(); - Assert.NotNull(name); - Assert.Equal(JsonValueKind.String, name.Value.ValueKind); - Assert.Equal("fake-string-value", name.ToString()); - - var attributes = deserializedPayload["attributes"]; - Assert.True(attributes is JsonObject); - - var enabled = attributes["enabled"]?.GetValue(); - Assert.NotNull(enabled); - Assert.Equal(JsonValueKind.True, enabled.Value.ValueKind); - - var cardinality = attributes["cardinality"]?.GetValue(); - Assert.NotNull(cardinality); - Assert.Equal(JsonValueKind.Number, cardinality.Value.ValueKind); - Assert.Equal("8", cardinality.Value.ToString()); - - var coefficient = attributes["coefficient"]?.GetValue(); - Assert.NotNull(coefficient); - Assert.Equal(JsonValueKind.Number, coefficient.Value.ValueKind); - Assert.Equal("0.8", coefficient.Value.ToString()); - - var count = attributes["count"]?.GetValue(); - Assert.NotNull(count); - Assert.Equal(JsonValueKind.Number, coefficient.Value.ValueKind); - Assert.Equal("1", count.Value.ToString()); - - var parameters = attributes["params"]; - Assert.NotNull(parameters); - Assert.True(parameters is JsonArray); - } - - [Fact] - public async Task ItShouldBuildJsonPayloadDynamicallyResolvingArgumentsByFullNamesAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - List payloadProperties = new() - { - new("upn", "string", true, new List()), - new("receiver", "object", false, new List() - { - new("upn", "string", false, new List()), - new("alternative", "object", false, new List() - { - new("upn", "string", false, new List()), - }), - }), - new("cc", "object", false, new List() - { - new("upn", "string", false, new List()), - }) - }; - - var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload - ); - - var arguments = new Dictionary(); - arguments.Add("upn", "fake-sender-upn"); - arguments.Add("receiver.upn", "fake-receiver-upn"); - arguments.Add("receiver.alternative.upn", "fake-receiver-alternative-upn"); - arguments.Add("cc.upn", "fake-cc-upn"); - - var sut = new RestApiOperationRunner( - this._httpClient, - this._authenticationHandlerMock.Object, - enableDynamicPayload: true, - enablePayloadNamespacing: true); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); - Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("application/json; charset=utf-8")); - - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var deserializedPayload = JsonNode.Parse(new MemoryStream(messageContent)); - Assert.NotNull(deserializedPayload); - - //Sender props - var senderUpn = deserializedPayload["upn"]?.ToString(); - Assert.Equal("fake-sender-upn", senderUpn); - - //Receiver props - var receiver = deserializedPayload["receiver"]; - Assert.NotNull(receiver); - - var receiverUpn = receiver["upn"]?.AsValue(); - Assert.NotNull(receiverUpn); - Assert.Equal("fake-receiver-upn", receiverUpn.ToString()); - - var alternative = receiver["alternative"]; - Assert.NotNull(alternative); - - var alternativeUpn = alternative["upn"]?.AsValue(); - Assert.NotNull(alternativeUpn); - Assert.Equal("fake-receiver-alternative-upn", alternativeUpn.ToString()); - - //CC props - var carbonCopy = deserializedPayload["cc"]; - Assert.NotNull(carbonCopy); - - var ccUpn = carbonCopy["upn"]?.AsValue(); - Assert.NotNull(ccUpn); - Assert.Equal("fake-cc-upn", ccUpn.ToString()); - } - - [Fact] - public async Task ItShouldThrowExceptionIfPayloadMetadataDoesNotHaveContentTypeAsync() - { - // Arrange - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary(); - - var sut = new RestApiOperationRunner( - this._httpClient, - this._authenticationHandlerMock.Object, - enableDynamicPayload: true); - - // Act - var exception = await Assert.ThrowsAsync(async () => await sut.RunAsync(operation, arguments)); - - Assert.Contains("No content type is provided", exception.Message, StringComparison.InvariantCulture); - } - - [Fact] - public async Task ItShouldThrowExceptionIfContentTypeArgumentIsNotProvidedAsync() - { - // Arrange - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary(); - - var sut = new RestApiOperationRunner( - this._httpClient, - this._authenticationHandlerMock.Object, - enableDynamicPayload: false); - - // Act - var exception = await Assert.ThrowsAsync(async () => await sut.RunAsync(operation, arguments)); - - Assert.Contains("No content type is provided", exception.Message, StringComparison.InvariantCulture); - } - - [Fact] - public async Task ItShouldUsePayloadArgumentForPlainTextContentTypeWhenBuildingPayloadDynamicallyAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Text.Plain); - - var payload = new RestApiOperationPayload(MediaTypeNames.Text.Plain, new List()); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload - ); - - var arguments = new Dictionary - { - { "payload", "fake-input-value" }, - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); - Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("text/plain; charset=utf-8")); - - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var payloadText = Encoding.UTF8.GetString(messageContent, 0, messageContent.Length); - Assert.Equal("fake-input-value", payloadText); - } - - [Theory] - [InlineData(MediaTypeNames.Text.Plain)] - [InlineData(MediaTypeNames.Application.Json)] - public async Task ItShouldUsePayloadAndContentTypeArgumentsIfDynamicPayloadBuildingIsNotRequiredAsync(string contentType) - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Text.Plain); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "payload", "fake-input-value" }, - { "content-type", $"{contentType}" }, - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: false); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); - Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains($"{contentType}; charset=utf-8")); - - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var payloadText = Encoding.UTF8.GetString(messageContent, 0, messageContent.Length); - Assert.Equal("fake-input-value", payloadText); - } - - [Fact] - public async Task ItShouldBuildJsonPayloadDynamicallyExcludingOptionalParametersIfTheirArgumentsNotProvidedAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - List payloadProperties = new() - { - new("upn", "string", false, new List()), - }; - - var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload - ); - - var arguments = new Dictionary(); - - var sut = new RestApiOperationRunner( - this._httpClient, - this._authenticationHandlerMock.Object, - enableDynamicPayload: true, - enablePayloadNamespacing: true); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var deserializedPayload = JsonNode.Parse(new MemoryStream(messageContent)); - Assert.NotNull(deserializedPayload); - - var senderUpn = deserializedPayload["upn"]?.ToString(); - Assert.Null(senderUpn); - } - - [Fact] - public async Task ItShouldBuildJsonPayloadDynamicallyIncludingOptionalParametersIfTheirArgumentsProvidedAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - List payloadProperties = new() - { - new("upn", "string", false, new List()), - }; - - var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload - ); - - var arguments = new Dictionary(); - arguments.Add("upn", "fake-sender-upn"); - - var sut = new RestApiOperationRunner( - this._httpClient, - this._authenticationHandlerMock.Object, - enableDynamicPayload: true, - enablePayloadNamespacing: true); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - var messageContent = this._httpMessageHandlerStub.RequestContent; - Assert.NotNull(messageContent); - Assert.True(messageContent.Length != 0); - - var deserializedPayload = JsonNode.Parse(new MemoryStream(messageContent)); - Assert.NotNull(deserializedPayload); - - var senderUpn = deserializedPayload["upn"]?.ToString(); - Assert.Equal("fake-sender-upn", senderUpn); - } - - [Fact] - public async Task ItShouldAddRequiredQueryStringParametersIfTheirArgumentsProvidedAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - var firstParameter = new RestApiOperationParameter( - "p1", - "string", - isRequired: true, //Marking the parameter as required - false, - RestApiOperationParameterLocation.Query, - RestApiOperationParameterStyle.Form); - - var secondParameter = new RestApiOperationParameter( - "p2", - "string", - isRequired: true, //Marking the parameter as required - false, - RestApiOperationParameterLocation.Query, - RestApiOperationParameterStyle.Form); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Get, - "fake-description", - new List() { firstParameter, secondParameter }, - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "p1", "v1" }, - { "p2", "v2" }, - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.RequestUri); - Assert.Equal("https://fake-random-test-host/fake-path?p1=v1&p2=v2", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); - } - - [Fact] - public async Task ItShouldAddNotRequiredQueryStringParametersIfTheirArgumentsProvidedAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - var firstParameter = new RestApiOperationParameter( - "p1", - "string", - isRequired: false, //Marking the parameter as not required - false, - RestApiOperationParameterLocation.Query, - RestApiOperationParameterStyle.Form); - - var secondParameter = new RestApiOperationParameter( - "p2", - "string", - isRequired: false, //Marking the parameter as not required - false, - RestApiOperationParameterLocation.Query, - RestApiOperationParameterStyle.Form); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Get, - "fake-description", - new List() { firstParameter, secondParameter }, - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "p1", "v1" }, - { "p2", "v2" }, - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.RequestUri); - Assert.Equal("https://fake-random-test-host/fake-path?p1=v1&p2=v2", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); - } - - [Fact] - public async Task ItShouldSkipNotRequiredQueryStringParametersIfNoArgumentsProvidedAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - var firstParameter = new RestApiOperationParameter( - "p1", - "string", - isRequired: false, //Marking the parameter as not required - false, - RestApiOperationParameterLocation.Query, - RestApiOperationParameterStyle.Form); - - var secondParameter = new RestApiOperationParameter( - "p2", - "string", - isRequired: true, //Marking the parameter as required - false, - RestApiOperationParameterLocation.Query, - RestApiOperationParameterStyle.Form); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Get, - "fake-description", - new List() { firstParameter, secondParameter }, - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "p2", "v2" }, //Providing argument for the required parameter only - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(this._httpMessageHandlerStub.RequestUri); - Assert.Equal("https://fake-random-test-host/fake-path?p2=v2", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); - } - - [Fact] - public async Task ItShouldThrowExceptionIfNoArgumentProvidedForRequiredQueryStringParameterAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); - - var parameter = new RestApiOperationParameter( - "p1", - "string", - isRequired: true, //Marking the parameter as required - false, - RestApiOperationParameterLocation.Query, - RestApiOperationParameterStyle.Form); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Get, - "fake-description", - new List() { parameter }, - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary(); //Providing no arguments - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act and Assert - await Assert.ThrowsAsync(() => sut.RunAsync(operation, arguments)); - } - - [Theory] - [InlineData(MediaTypeNames.Application.Json)] - [InlineData(MediaTypeNames.Application.Xml)] - [InlineData(MediaTypeNames.Text.Plain)] - [InlineData(MediaTypeNames.Text.Html)] - [InlineData(MediaTypeNames.Text.Xml)] - [InlineData("text/csv")] - [InlineData("text/markdown")] - public async Task ItShouldReadContentAsStringSuccessfullyAsync(string contentType) - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, contentType); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "payload", JsonSerializer.Serialize(new { value = "fake-value" }) }, - { "content-type", "application/json" } - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal("fake-content", result.Content); - - Assert.Equal($"{contentType}; charset=utf-8", result.ContentType); - } - - [Theory] - [InlineData("image/jpeg")] - [InlineData("image/png")] - [InlineData("image/gif")] - [InlineData("image/svg+xml")] - [InlineData("image/bmp")] - [InlineData("image/x-icon")] - public async Task ItShouldReadContentAsBytesSuccessfullyAsync(string contentType) - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new ByteArrayContent(new byte[] { 00, 01, 02 }); - this._httpMessageHandlerStub.ResponseToReturn.Content.Headers.ContentType = new MediaTypeHeaderValue(contentType); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "payload", JsonSerializer.Serialize(new { value = "fake-value" }) }, - { "content-type", "application/json" } - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act - var result = await sut.RunAsync(operation, arguments); - - // Assert - Assert.NotNull(result); - - Assert.Equal(new byte[] { 00, 01, 02 }, result.Content); - - Assert.Equal($"{contentType}", result.ContentType); - } - - [Fact] - public async Task ItShouldThrowExceptionForUnsupportedContentTypeAsync() - { - // Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, "fake/type"); - - var operation = new RestApiOperation( - "fake-id", - new Uri("https://fake-random-test-host"), - "fake-path", - HttpMethod.Post, - "fake-description", - new List(), - new Dictionary(), - payload: null - ); - - var arguments = new Dictionary - { - { "payload", JsonSerializer.Serialize(new { value = "fake-value" }) }, - { "content-type", "application/json" } - }; - - var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); - - // Act & Assert - await Assert.ThrowsAsync(() => sut.RunAsync(operation, arguments)); - } - - /// - /// Disposes resources used by this class. - /// - public void Dispose() - { - this._httpMessageHandlerStub.Dispose(); - - this._httpClient.Dispose(); - } - - private sealed class HttpMessageHandlerStub : DelegatingHandler - { - public HttpRequestHeaders? RequestHeaders { get; private set; } - - public HttpContentHeaders? ContentHeaders { get; private set; } - - public byte[]? RequestContent { get; private set; } - - public Uri? RequestUri { get; private set; } - - public HttpMethod? Method { get; private set; } - - public HttpResponseMessage ResponseToReturn { get; set; } - - public HttpMessageHandlerStub() - { - this.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent("{}", Encoding.UTF8, MediaTypeNames.Application.Json) - }; - } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - this.Method = request.Method; - this.RequestUri = request.RequestUri; - this.RequestHeaders = request.Headers; - this.RequestContent = request.Content == null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken); - this.ContentHeaders = request.Content?.Headers; - - return await Task.FromResult(this.ResponseToReturn); - } - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationTests.cs deleted file mode 100644 index 1a3f15c2312c..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationTests.cs +++ /dev/null @@ -1,331 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net.Http; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI; - -public class RestApiOperationTests -{ - [Fact] - public void ItShouldUseHostUrlIfNoOverrideProvided() - { - // Arrange - var sut = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "/", - HttpMethod.Get, - "fake_description", - new List(), - new Dictionary() - ); - - var arguments = new Dictionary(); - - // Act - var url = sut.BuildOperationUrl(arguments); - - // Assert - Assert.Equal("https://fake-random-test-host/", url.OriginalString); - } - - [Fact] - public void ItShouldUseHostUrlOverrideIfProvided() - { - // Arrange - var sut = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "/", - HttpMethod.Get, - "fake_description", - new List(), - new Dictionary() - ); - - var arguments = new Dictionary - { - { "server-url", "https://fake-random-test-host-override" } - }; - - // Act - var url = sut.BuildOperationUrl(arguments); - - // Assert - Assert.Equal("https://fake-random-test-host-override/", url.OriginalString); - } - - [Fact] - public void ItShouldReplacePathParametersByValuesFromArguments() - { - // Arrange - var sut = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "/{fake-path-parameter}/other_fake_path_section", - HttpMethod.Get, - "fake_description", - new List(), - new Dictionary() - ); - - var arguments = new Dictionary - { - { "fake-path-parameter", "fake-path-value" } - }; - - // Act - var url = sut.BuildOperationUrl(arguments); - - // Assert - Assert.Equal("https://fake-random-test-host/fake-path-value/other_fake_path_section", url.OriginalString); - } - - [Fact] - public void ItShouldReplacePathParametersByDefaultValues() - { - // Arrange - var parameterMetadata = new RestApiOperationParameter( - name: "fake-path-parameter", - type: "fake_type", - isRequired: true, - expand: false, - location: RestApiOperationParameterLocation.Path, - defaultValue: "fake-default-path"); - - var sut = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "/{fake-path-parameter}/other_fake_path_section", - HttpMethod.Get, - "fake_description", - new List { parameterMetadata }, - new Dictionary()); - - var arguments = new Dictionary(); - - // Act - var url = sut.BuildOperationUrl(arguments); - - // Assert - Assert.Equal("https://fake-random-test-host/fake-default-path/other_fake_path_section", url.OriginalString); - } - - [Fact] - public void ShouldBuildResourceUrlWithoutQueryString() - { - // Arrange - var firstParameterMetadata = new RestApiOperationParameter( - name: "p1", - type: "fake_type", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query, - defaultValue: "dv1"); - - var secondParameterMetadata = new RestApiOperationParameter( - name: "p2", - type: "fake_type", - isRequired: false, - expand: false, - location: RestApiOperationParameterLocation.Query); - - var sut = new RestApiOperation( - "fake_id", - new Uri("https://fake-random-test-host"), - "{fake-path}/", - HttpMethod.Get, - "fake_description", - new List { firstParameterMetadata, secondParameterMetadata }, - new Dictionary()); - - var arguments = new Dictionary - { - { "server-url", "https://fake-random-test-host-override" }, - { "fake-path", "fake-path-value" }, - }; - - // Act - var url = sut.BuildOperationUrl(arguments); - - // Assert - Assert.Equal("https://fake-random-test-host-override/fake-path-value/", url.OriginalString); - } - - [Fact] - public void ItShouldRenderHeaderValuesFromArguments() - { - // Arrange - var rawHeaders = new Dictionary - { - { "fake_header_one", string.Empty }, - { "fake_header_two", string.Empty } - }; - - var arguments = new Dictionary - { - { "fake_header_one", "fake_header_one_value" }, - { "fake_header_two", "fake_header_two_value" } - }; - - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", new List(), rawHeaders); - - // Act - var headers = sut.RenderHeaders(arguments); - - // Assert - Assert.Equal(2, headers.Count); - - var headerOne = headers["fake_header_one"]; - Assert.Equal("fake_header_one_value", headerOne); - - var headerTwo = headers["fake_header_two"]; - Assert.Equal("fake_header_two_value", headerTwo); - } - - [Fact] - public void ItShouldUseHeaderValuesIfTheyAreAlreadyProvided() - { - // Arrange - var rawHeaders = new Dictionary - { - { "fake_header_one", "fake_header_one_value" }, - { "fake_header_two", "fake_header_two_value" } - }; - - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", new List(), - rawHeaders); - - // Act - var headers = sut.RenderHeaders(new Dictionary()); - - // Assert - Assert.Equal(2, headers.Count); - - var headerOne = headers["fake_header_one"]; - Assert.Equal("fake_header_one_value", headerOne); - - var headerTwo = headers["fake_header_two"]; - Assert.Equal("fake_header_two_value", headerTwo); - } - - [Fact] - public void ItShouldThrowExceptionIfHeadersHaveNoValuesAndHeadersMetadataNotSupplied() - { - // Arrange - var rawHeaders = new Dictionary - { - { "fake_header_one", string.Empty }, - { "fake_header_two", string.Empty } - }; - - var metadata = new List(); - - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata, rawHeaders); - - // Act - void Act() => sut.RenderHeaders(new Dictionary()); - - // Assert - Assert.Throws(Act); - } - - [Fact] - public void ShouldThrowExceptionIfNoValueProvidedForRequiredHeader() - { - // Arrange - var rawHeaders = new Dictionary - { - { "fake_header_one", string.Empty }, - { "fake_header_two", string.Empty } - }; - - var metadata = new List - { - new(name: "fake_header_one", type: "string", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), - new(name: "fake_header_two", type : "string", isRequired : false, expand : false, location : RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple) - }; - - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata, rawHeaders); - - // Act - void Act() => sut.RenderHeaders(new Dictionary()); - - // Assert - Assert.Throws(Act); - } - - [Fact] - public void ItShouldSkipOptionalHeaderHavingNeitherValueNorDefaultValue() - { - // Arrange - var rawHeaders = new Dictionary - { - { "fake_header_one", string.Empty }, - { "fake_header_two", string.Empty } - }; - - var metadata = new List - { - new RestApiOperationParameter(name: "fake_header_one", type : "string", isRequired : true, expand : false, location : RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), - new RestApiOperationParameter(name : "fake_header_two", type : "string", isRequired : false, expand : false, location : RestApiOperationParameterLocation.Header, style : RestApiOperationParameterStyle.Simple) - }; - - var arguments = new Dictionary - { - { "fake_header_one", "fake_header_one_value" } - }; - - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata, rawHeaders); - - // Act - var headers = sut.RenderHeaders(arguments); - - // Assert - Assert.Single(headers); - - var headerOne = headers["fake_header_one"]; - Assert.Equal("fake_header_one_value", headerOne); - } - - [Fact] - public void ShouldUseDefaultValueForOptionalHeaderIfNoValueProvided() - { - // Arrange - var rawHeaders = new Dictionary - { - { "fake_header_one", string.Empty }, - { "fake_header_two", string.Empty } - }; - - var metadata = new List - { - new(name : "fake_header_one", type : "string", isRequired : true, expand : false, location : RestApiOperationParameterLocation.Header, style : RestApiOperationParameterStyle.Simple), - new(name: "fake_header_two", type : "string", isRequired : false, expand : false, location : RestApiOperationParameterLocation.Header, style : RestApiOperationParameterStyle.Simple, defaultValue: "fake_header_two_default_value") - }; - - var arguments = new Dictionary - { - { "fake_header_one", "fake_header_one_value" } //Argument is only provided for the first parameter and not for the second one - }; - - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata, rawHeaders); - - // Act - var headers = sut.RenderHeaders(arguments); - - // Assert - Assert.Equal(2, headers.Count); - - var headerOne = headers["fake_header_one"]; - Assert.Equal("fake_header_one_value", headerOne); - - var headerTwo = headers["fake_header_two"]; - Assert.Equal("fake_header_two_default_value", headerTwo); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/ResourcePluginsProvider.cs b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/ResourcePluginsProvider.cs deleted file mode 100644 index 2df0a9421971..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/ResourcePluginsProvider.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.IO; -using System.Resources; - -namespace SemanticKernel.Functions.UnitTests.OpenAPI.TestPlugins; - -internal static class ResourcePluginsProvider -{ - /// - /// Loads OpenApi document from assembly resource. - /// - /// The resource name. - /// The OpenApi document resource stream. - public static Stream LoadFromResource(string resourceName) - { - var type = typeof(ResourcePluginsProvider); - - var stream = type.Assembly.GetManifestResourceStream(type, resourceName); - if (stream == null) - { - throw new MissingManifestResourceException($"Unable to load OpenApi plugin from assembly resource '{resourceName}'."); - } - - return stream; - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV2_0.json b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV2_0.json deleted file mode 100644 index 55709c56583c..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV2_0.json +++ /dev/null @@ -1,253 +0,0 @@ -{ - "basePath": "/", - "consumes": [], - "definitions": {}, - "host": "my-key-vault.vault.azure.net", - "info": { - "description": "A sample connector for the Azure Key Vault service. This connector is built for the Azure Key Vault REST API. You can see the details of the API here: https://docs.microsoft.com/rest/api/keyvault/.", - "title": "Azure Key Vault [Sample]", - "version": "1.0" - }, - "parameters": {}, - "paths": { - "/secrets/{secret-name}": { - "get": { - "description": "Get a specified secret from a given key vault. For details, see: https://docs.microsoft.com/rest/api/keyvault/getsecret/getsecret.", - "operationId": "GetSecret", - "parameters": [ - { - "in": "path", - "name": "secret-name", - "required": true, - "type": "string" - }, - { - "default": "7.0", - "in": "query", - "name": "api-version", - "required": true, - "type": "string", - "x-ms-visibility": "internal" - } - ], - "responses": { - "200": { - "description": "default", - "schema": { - "properties": { - "attributes": { - "description": "attributes", - "properties": { - "created": { - "description": "created", - "format": "int32", - "type": "integer" - }, - "enabled": { - "description": "enabled", - "type": "boolean" - }, - "recoverylevel": { - "description": "recoverylevel", - "type": "string" - }, - "updated": { - "description": "updated", - "format": "int32", - "type": "integer" - } - }, - "type": "object" - }, - "id": { - "description": "id", - "type": "string" - }, - "value": { - "description": "value", - "format": "byte", - "type": "string" - } - }, - "type": "object" - } - } - }, - "summary": "Get secret" - }, - "put": { - "description": "Sets a secret in a specified key vault.", - "operationId": "SetSecret", - "parameters": [ - { - "in": "path", - "name": "secret-name", - "required": true, - "type": "string" - }, - { - "default": "7.0", - "in": "query", - "name": "api-version", - "required": true, - "type": "string", - "x-ms-visibility": "internal" - }, - { - "in": "body", - "name": "body", - "required": true, - "schema": { - "properties": { - "attributes": { - "description": "attributes", - "properties": { - "enabled": { - "description": "Determines whether the object is enabled.", - "type": "boolean" - } - }, - "type": "object" - }, - "value": { - "description": "The value of the secret.", - "type": "string" - } - }, - "required": [ - "value" - ], - "type": "object" - } - }, - { - "name": "Accept", - "in": "header", - "required": false, - "description": "Indicates which content types, expressed as MIME types, the client is able to understand.", - "type": "string", - "default": "application/json", - "x-ms-visibility": "internal" - }, - { - "name": "X-API-Version", - "in": "header", - "description": "Requested API version.", - "required": true, - "type": "integer", - "default": 10, - "x-ms-visibility": "internal", - "x-ms-summary": "X-API-Version" - }, - { - "collectionFormat": "csv", - "description": "The comma separated list of operation ids.", - "in": "header", - "items": { - "type": "string" - }, - "name": "X-Operation-Csv-Ids", - "required": false, - "type": "array", - "x-ms-summary": "Ids", - "x-ms-visibility": "advanced" - } - ], - "responses": { - "200": { - "description": "default", - "schema": { - "properties": { - "attributes": { - "description": "attributes", - "properties": { - "created": { - "description": "created", - "format": "int32", - "type": "integer" - }, - "enabled": { - "description": "enabled", - "type": "boolean" - }, - "recoverylevel": { - "description": "recoverylevel", - "type": "string" - }, - "updated": { - "description": "updated", - "format": "int32", - "type": "integer" - } - }, - "type": "object" - }, - "id": { - "description": "id", - "type": "string" - }, - "value": { - "description": "value", - "type": "string" - } - }, - "type": "object" - } - } - }, - "summary": "Create or update secret value" - } - }, - "/FunPlugin/Excuses": { - "post": { - "summary": "Turn a scenario into a creative or humorous excuse to send your boss", - "operationId": "Excuses", - "consumes": [ - "text/plain" - ], - "produces": [ - "text/plain" - ], - "parameters": [ - { - "in": "body", - "name": "body", - "description": "excuse event", - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "The OK response", - "schema": { - "type": "string" - } - } - } - } - } - }, - "produces": [], - "responses": {}, - "schemes": [ - "https" - ], - "security": [ - { - "oauth2_auth": [] - } - ], - "securityDefinitions": { - "oauth2_auth": { - "authorizationUrl": "https://login.windows.net/common/oauth2/authorize", - "flow": "accessCode", - "scopes": {}, - "tokenUrl": "https://login.windows.net/common/oauth2/authorize", - "type": "oauth2" - } - }, - "swagger": "2.0", - "tags": [] -} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV3_0.json b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV3_0.json deleted file mode 100644 index eac3eace5ee2..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV3_0.json +++ /dev/null @@ -1,225 +0,0 @@ -{ - "openapi": "3.0.1", - "info": { - "title": "Azure Key Vault [Sample]", - "description": "A sample connector for the Azure Key Vault service. This connector is built for the Azure Key Vault REST API. You can see the details of the API here: https://docs.microsoft.com/rest/api/keyvault/.", - "version": "1.0" - }, - "servers": [ - { - "url": "https://my-key-vault.vault.azure.net" - } - ], - "paths": { - "/secrets/{secret-name}": { - "get": { - "summary": "Get secret", - "description": "Get a specified secret from a given key vault. For details, see: https://docs.microsoft.com/rest/api/keyvault/getsecret/getsecret.", - "operationId": "GetSecret", - "parameters": [ - { - "name": "secret-name", - "in": "path", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "api-version", - "in": "query", - "required": true, - "schema": { - "type": "string", - "default": "7.0" - }, - "x-ms-visibility": "internal" - }, - { - "name": "nonExplodeFormParam", - "in": "query", - "style": "form", - "explode": false, - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - }, - { - "name": "explodeFormParam", - "in": "query", - "style": "form", - "explode": true, - "schema": { - "type": "array", - "items": { - "type": "string" - } - } - }, - { - "name": "anotherExplodeFormParam", - "in": "query", - "schema": { - "type": "array", - "items": { - "type": "integer" - } - } - } - ], - "responses": { - "200": { - "description": "default" - } - } - }, - "put": { - "summary": "Create or update secret value", - "description": "Sets a secret in a specified key vault.", - "operationId": "SetSecret", - "parameters": [ - { - "name": "secret-name", - "in": "path", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "api-version", - "in": "query", - "required": true, - "schema": { - "type": "string", - "default": "7.0" - }, - "x-ms-visibility": "internal" - }, - { - "name": "Accept", - "in": "header", - "description": "Indicates which content types, expressed as MIME types, the client is able to understand.", - "schema": { - "type": "string", - "default": "application/json" - }, - "x-ms-visibility": "internal" - }, - { - "name": "X-API-Version", - "in": "header", - "description": "Requested API version.", - "required": true, - "schema": { - "type": "integer", - "default": 10 - }, - "x-ms-visibility": "internal", - "x-ms-summary": "X-API-Version" - }, - { - "name": "X-Operation-Csv-Ids", - "in": "header", - "description": "The comma separated list of operation ids.", - "style": "simple", - "schema": { - "type": "array", - "items": { - "type": "string" - } - }, - "x-ms-summary": "Ids", - "x-ms-visibility": "advanced" - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "required": [ - "value" - ], - "type": "object", - "properties": { - "attributes": { - "type": "object", - "properties": { - "enabled": { - "type": "boolean", - "description": "Determines whether the object is enabled." - } - }, - "description": "attributes" - }, - "value": { - "type": "string", - "description": "The value of the secret." - } - } - } - } - }, - "required": true, - "x-bodyName": "body" - }, - "responses": { - "200": { - "description": "default" - } - } - } - }, - "/FunPlugin/Excuses": { - "post": { - "summary": "Turn a scenario into a creative or humorous excuse to send your boss", - "operationId": "Excuses", - "requestBody": { - "description": "excuse event", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } - }, - "x-bodyName": "body" - }, - "responses": { - "200": { - "description": "The OK response", - "content": { - "text/plain": { - "schema": { - "type": "string" - } - } - } - } - } - } - } - }, - "components": { - "securitySchemes": { - "oauth2_auth": { - "type": "oauth2", - "flows": { - "authorizationCode": { - "authorizationUrl": "https://login.windows.net/common/oauth2/authorize", - "tokenUrl": "https://login.windows.net/common/oauth2/authorize", - "scopes": { } - } - } - } - } - }, - "security": [ - { - "oauth2_auth": [ ] - } - ] -} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV3_1.yaml b/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV3_1.yaml deleted file mode 100644 index 2552d4d348e2..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/documentV3_1.yaml +++ /dev/null @@ -1,146 +0,0 @@ -openapi: 3.1.0 -info: - title: 'Azure Key Vault [Sample]' - description: 'A sample connector for the Azure Key Vault service. This connector is built for the Azure Key Vault REST API. You can see the details of the API here: https://docs.microsoft.com/rest/api/keyvault/.' - version: '1.0' -servers: - - url: https://my-key-vault.vault.azure.net -paths: - '/secrets/{secret-name}': - get: - summary: Get secret - description: 'Get a specified secret from a given key vault. For details, see: https://docs.microsoft.com/rest/api/keyvault/getsecret/getsecret.' - operationId: GetSecret - parameters: - - name: secret-name - in: path - required: true - schema: - type: string - - name: api-version - in: query - required: true - schema: - type: string - default: '7.0' - x-ms-visibility: internal - - name: nonExplodeFormParam - in: query - style: form - explode: false - schema: - type: array - items: - type: string - - name: explodeFormParam - in: query - style: form - explode: true - schema: - type: array - items: - type: string - - name: anotherExplodeFormParam - in: query - schema: - type: array - items: - type: integer - responses: - '200': - description: default - put: - summary: Create or update secret value - description: Sets a secret in a specified key vault. - operationId: SetSecret - parameters: - - name: secret-name - in: path - required: true - schema: - type: string - - name: api-version - in: query - required: true - schema: - type: string - default: '7.0' - x-ms-visibility: internal - - name: Accept - in: header - description: 'Indicates which content types, expressed as MIME types, the client is able to understand.' - schema: - type: string - default: application/json - x-ms-visibility: internal - - name: X-API-Version - in: header - description: Requested API version. - required: true - schema: - type: integer - default: 10 - x-ms-visibility: internal - x-ms-summary: X-API-Version - - name: X-Operation-Csv-Ids - in: header - description: The comma separated list of operation ids. - style: simple - schema: - type: array - items: - type: string - x-ms-summary: Ids - x-ms-visibility: advanced - requestBody: - content: - application/json: - schema: - required: - - value - type: object - properties: - attributes: - type: object - properties: - enabled: - type: boolean - description: Determines whether the object is enabled. - description: attributes - value: - type: string - description: The value of the secret. - required: true - x-bodyName: body - responses: - '200': - description: default - /FunPlugin/Excuses: - post: - summary: Turn a scenario into a creative or humorous excuse to send your boss - operationId: Excuses - requestBody: - description: excuse event - content: - text/plain: - schema: - type: string - x-bodyName: body - responses: - '200': - description: The OK response - content: - text/plain: - schema: - type: string -components: - securitySchemes: - oauth2_auth: - type: oauth2 - flows: - authorizationCode: - authorizationUrl: https://login.windows.net/common/oauth2/authorize - tokenUrl: https://login.windows.net/common/oauth2/authorize - scopes: { } -security: - - oauth2_auth: [ ] diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/KernelOpenApiPluginExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/KernelOpenApiPluginExtensionsTests.cs new file mode 100644 index 000000000000..c7c23abb55ab --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/KernelOpenApiPluginExtensionsTests.cs @@ -0,0 +1,276 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Net.Mime; +using System.Text; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using SemanticKernel.Functions.UnitTests.OpenApi.TestPlugins; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public sealed class KernelOpenApiPluginExtensionsTests : IDisposable +{ + /// + /// System under test - an instance of OpenApiDocumentParser class. + /// + private readonly OpenApiDocumentParser _sut; + + /// + /// OpenAPI function execution parameters. + /// + private readonly OpenApiFunctionExecutionParameters _executionParameters; + + /// + /// OpenAPI document stream. + /// + private readonly Stream _openApiDocument; + + /// + /// Kernel instance. + /// + private readonly Kernel _kernel; + + /// + /// Creates an instance of a class. + /// + public KernelOpenApiPluginExtensionsTests() + { + this._kernel = new Kernel(); + + this._executionParameters = new OpenApiFunctionExecutionParameters() { EnableDynamicPayload = false }; + + this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV2_0.json"); + + this._sut = new OpenApiDocumentParser(); + } + + [Fact] + public async Task ItCanIncludeOpenApiOperationParameterTypesIntoFunctionParametersViewAsync() + { + // Act + var plugin = await this._kernel.ImportPluginFromOpenApiAsync("fakePlugin", this._openApiDocument, this._executionParameters); + + // Assert + var setSecretFunction = plugin["SetSecret"]; + Assert.NotNull(setSecretFunction); + + var functionView = setSecretFunction.Metadata; + Assert.NotNull(functionView); + + var secretNameParameter = functionView.Parameters.First(p => p.Name == "secret_name"); + Assert.NotNull(secretNameParameter.Schema); + Assert.Equal("string", secretNameParameter.Schema!.RootElement.GetProperty("type").GetString()); + + var apiVersionParameter = functionView.Parameters.First(p => p.Name == "api_version"); + Assert.Equal("string", apiVersionParameter.Schema!.RootElement.GetProperty("type").GetString()); + + var payloadParameter = functionView.Parameters.First(p => p.Name == "payload"); + Assert.NotNull(payloadParameter.Schema); + Assert.Equal("object", payloadParameter.Schema!.RootElement.GetProperty("type").GetString()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task ItUsesServerUrlOverrideIfProvidedAsync(bool removeServersProperty) + { + // Arrange + const string DocumentUri = "http://localhost:3001/openapi.json"; + const string ServerUrlOverride = "https://server-override.com/"; + + var openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV3_0.json"); + + if (removeServersProperty) + { + openApiDocument = OpenApiTestHelper.ModifyOpenApiDocument(openApiDocument, (doc) => + { + doc.Remove("servers"); + }); + } + + using var messageHandlerStub = new HttpMessageHandlerStub(openApiDocument); + using var httpClient = new HttpClient(messageHandlerStub, false); + + this._executionParameters.HttpClient = httpClient; + this._executionParameters.ServerUrlOverride = new Uri(ServerUrlOverride); + + var arguments = this.GetFakeFunctionArguments(); + + // Act + var plugin = await this._kernel.ImportPluginFromOpenApiAsync("fakePlugin", new Uri(DocumentUri), this._executionParameters); + var setSecretFunction = plugin["SetSecret"]; + + messageHandlerStub.ResetResponse(); + + var result = await this._kernel.InvokeAsync(setSecretFunction, arguments); + + // Assert + Assert.NotNull(messageHandlerStub.RequestUri); + Assert.StartsWith(ServerUrlOverride, messageHandlerStub.RequestUri.AbsoluteUri, StringComparison.Ordinal); + } + + [Theory] + [InlineData("documentV2_0.json")] + [InlineData("documentV3_0.json")] + public async Task ItUsesServerUrlFromOpenApiDocumentAsync(string documentFileName) + { + // Arrange + const string DocumentUri = "http://localhost:3001/openapi.json"; + const string ServerUrlFromDocument = "https://my-key-vault.vault.azure.net/"; + + var openApiDocument = ResourcePluginsProvider.LoadFromResource(documentFileName); + + using var messageHandlerStub = new HttpMessageHandlerStub(openApiDocument); + using var httpClient = new HttpClient(messageHandlerStub, false); + + this._executionParameters.HttpClient = httpClient; + + var arguments = this.GetFakeFunctionArguments(); + + // Act + var plugin = await this._kernel.ImportPluginFromOpenApiAsync("fakePlugin", new Uri(DocumentUri), this._executionParameters); + var setSecretFunction = plugin["SetSecret"]; + + messageHandlerStub.ResetResponse(); + + var result = await this._kernel.InvokeAsync(setSecretFunction, arguments); + + // Assert + Assert.NotNull(messageHandlerStub.RequestUri); + Assert.StartsWith(ServerUrlFromDocument, messageHandlerStub.RequestUri.AbsoluteUri, StringComparison.Ordinal); + } + + [Theory] + [InlineData("http://localhost:3001/openapi.json", "http://localhost:3001/", "documentV2_0.json")] + [InlineData("http://localhost:3001/openapi.json", "http://localhost:3001/", "documentV3_0.json")] + [InlineData("https://api.example.com/openapi.json", "https://api.example.com/", "documentV2_0.json")] + [InlineData("https://api.example.com/openapi.json", "https://api.example.com/", "documentV3_0.json")] + [SuppressMessage("Design", "CA1054:URI-like parameters should not be strings", Justification = "Required for test data.")] + public async Task ItUsesOpenApiDocumentHostUrlWhenServerUrlIsNotProvidedAsync(string documentUri, string expectedServerUrl, string documentFileName) + { + // Arrange + var openApiDocument = ResourcePluginsProvider.LoadFromResource(documentFileName); + + using var content = OpenApiTestHelper.ModifyOpenApiDocument(openApiDocument, (doc) => + { + doc.Remove("servers"); + doc.Remove("host"); + doc.Remove("schemes"); + }); + + using var messageHandlerStub = new HttpMessageHandlerStub(content); + using var httpClient = new HttpClient(messageHandlerStub, false); + + this._executionParameters.HttpClient = httpClient; + + var arguments = this.GetFakeFunctionArguments(); + + // Act + var plugin = await this._kernel.ImportPluginFromOpenApiAsync("fakePlugin", new Uri(documentUri), this._executionParameters); + var setSecretFunction = plugin["SetSecret"]; + + messageHandlerStub.ResetResponse(); + + var result = await this._kernel.InvokeAsync(setSecretFunction, arguments); + + // Assert + Assert.NotNull(messageHandlerStub.RequestUri); + Assert.StartsWith(expectedServerUrl, messageHandlerStub.RequestUri.AbsoluteUri, StringComparison.Ordinal); + } + + [Fact] + public async Task ItShouldRespectRunAsyncCancellationTokenOnExecutionAsync() + { + // Arrange + using var messageHandlerStub = new HttpMessageHandlerStub(); + messageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + using var httpClient = new HttpClient(messageHandlerStub, false); + + this._executionParameters.HttpClient = httpClient; + + var fakePlugin = new FakePlugin(); + + using var registerCancellationToken = new System.Threading.CancellationTokenSource(); + using var executeCancellationToken = new System.Threading.CancellationTokenSource(); + + var openApiPlugins = await this._kernel.ImportPluginFromOpenApiAsync("fakePlugin", this._openApiDocument, this._executionParameters, registerCancellationToken.Token); + + var kernel = new Kernel(); + + var arguments = new KernelArguments + { + { "secret-name", "fake-secret-name" }, + { "api-version", "fake-api-version" } + }; + + // Act + registerCancellationToken.Cancel(); + var result = await kernel.InvokeAsync(openApiPlugins["GetSecret"], arguments, executeCancellationToken.Token); + + // Assert + Assert.NotNull(result); + + var response = result.GetValue(); + + //Check original response + Assert.NotNull(response); + Assert.Equal("fake-content", response.Content); + } + + [Fact] + public async Task ItShouldSanitizeOperationNameAsync() + { + // Arrange + var openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV3_0.json"); + + using var content = OpenApiTestHelper.ModifyOpenApiDocument(openApiDocument, (doc) => + { + doc["paths"]!["/secrets/{secret-name}"]!["get"]!["operationId"] = "issues/create-mile.stone"; + }); + + // Act + var plugin = await this._kernel.ImportPluginFromOpenApiAsync("fakePlugin", content, this._executionParameters); + + // Assert + Assert.True(plugin.TryGetFunction("IssuesCreatemilestone", out var _)); + } + + public void Dispose() + { + this._openApiDocument.Dispose(); + } + + #region private ================================================================================ + + private KernelArguments GetFakeFunctionArguments() + { + return new KernelArguments + { + ["secret-name"] = "fake-secret-name", + ["api-version"] = "7.0", + ["X-API-Version"] = 6, + ["payload"] = "fake-payload" + }; + } + + private sealed class FakePlugin + { + public string? ParameterValueFakeMethodCalledWith { get; private set; } + + [KernelFunction] + public void DoFakeAction(string parameter) + { + this.ParameterValueFakeMethodCalledWith = parameter; + } + } + + #endregion +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/OpenApiSchemaExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/OpenApiSchemaExtensionsTests.cs new file mode 100644 index 000000000000..95bfef3271cc --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/OpenApiSchemaExtensionsTests.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Globalization; +using Microsoft.OpenApi.Any; +using Microsoft.OpenApi.Models; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi.Extensions; +public class OpenApiSchemaExtensionsTests +{ + [Fact] + public void ItShouldConvertOpenApiSchemaUsingInvariantCulture() + { + // Arrange + var schema = new OpenApiSchema + { + Type = "object", + Properties = new Dictionary + { + ["property1"] = new OpenApiSchema + { + Type = "number", + Format = "double", + Default = new OpenApiDouble(12.01) + } + } + }; + + var currentCulture = CultureInfo.CurrentCulture; // Backup current culture + + // Act & Assert + try + { + CultureInfo.CurrentCulture = new CultureInfo("fr-FR"); // French culture uses comma as decimal separator + + var result = OpenApiSchemaExtensions.ToJsonSchema(schema); // Should use invariant culture + + Assert.True(result.RootElement.TryGetProperty("properties", out var properties)); + Assert.True(properties.TryGetProperty("property1", out var property2)); + Assert.Equal(12.01, property2.GetProperty("default").GetDouble()); + } + finally + { + CultureInfo.CurrentCulture = currentCulture; // Restore current culture + } + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/RestApiOperationExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/RestApiOperationExtensionsTests.cs new file mode 100644 index 000000000000..e20836b38309 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/RestApiOperationExtensionsTests.cs @@ -0,0 +1,338 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public class RestApiOperationExtensionsTests +{ + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldAddPayloadAndContentTypeParametersByDefault(string method) + { + //Arrange + var payload = CreateTestJsonPayload(); + + var operation = CreateTestOperation(method, payload); + + //Act + var parameters = operation.GetParameters(addPayloadParamsFromMetadata: false); + + //Assert + Assert.NotNull(parameters); + + var payloadParam = parameters.FirstOrDefault(p => p.Name == "payload"); + Assert.NotNull(payloadParam); + Assert.Equal("object", payloadParam.Type); + Assert.True(payloadParam.IsRequired); + Assert.Equal("REST API request body.", payloadParam.Description); + + var contentTypeParam = parameters.FirstOrDefault(p => p.Name == "content-type"); + Assert.NotNull(contentTypeParam); + Assert.Equal("string", contentTypeParam.Type); + Assert.False(contentTypeParam.IsRequired); + Assert.Equal("Content type of REST API request body.", contentTypeParam.Description); + } + + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldAddPayloadAndContentTypeParametersWhenSpecified(string method) + { + //Arrange + var payload = CreateTestJsonPayload(); + + var operation = CreateTestOperation(method, payload); + + //Act + var parameters = operation.GetParameters(addPayloadParamsFromMetadata: false); + + //Assert + Assert.NotNull(parameters); + + var payloadProp = parameters.FirstOrDefault(p => p.Name == "payload"); + Assert.NotNull(payloadProp); + Assert.Equal("object", payloadProp.Type); + Assert.True(payloadProp.IsRequired); + Assert.Equal("REST API request body.", payloadProp.Description); + + var contentTypeProp = parameters.FirstOrDefault(p => p.Name == "content-type"); + Assert.NotNull(contentTypeProp); + Assert.Equal("string", contentTypeProp.Type); + Assert.False(contentTypeProp.IsRequired); + Assert.Equal("Content type of REST API request body.", contentTypeProp.Description); + } + + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldAddPayloadAndContentTypePropertiesForPlainTextContentType(string method) + { + //Arrange + var payload = CreateTestTextPayload(); + + var operation = CreateTestOperation(method, payload); + + //Act + var parameters = operation.GetParameters(addPayloadParamsFromMetadata: false); + + //Assert + Assert.NotNull(parameters); + + var payloadParam = parameters.FirstOrDefault(p => p.Name == "payload"); + Assert.NotNull(payloadParam); + Assert.Equal("string", payloadParam.Type); + Assert.True(payloadParam.IsRequired); + Assert.Equal("REST API request body.", payloadParam.Description); + + var contentTypeParam = parameters.FirstOrDefault(p => p.Name == "content-type"); + Assert.NotNull(contentTypeParam); + Assert.Equal("string", contentTypeParam.Type); + Assert.False(contentTypeParam.IsRequired); + Assert.Equal("Content type of REST API request body.", contentTypeParam.Description); + } + + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldAddPayloadAndContentTypePropertiesIfParametersFromPayloadMetadataAreNotRequired(string method) + { + //Arrange + var payload = CreateTestJsonPayload(); + + var operation = CreateTestOperation(method, payload); + + //Act + var parameters = operation.GetParameters(addPayloadParamsFromMetadata: false); + + //Assert + Assert.NotNull(parameters); + + var payloadParam = parameters.FirstOrDefault(p => p.Name == "payload"); + Assert.NotNull(payloadParam); + Assert.Equal("object", payloadParam.Type); + Assert.True(payloadParam.IsRequired); + Assert.Equal("REST API request body.", payloadParam.Description); + + var contentTypeParam = parameters.FirstOrDefault(p => p.Name == "content-type"); + Assert.NotNull(contentTypeParam); + Assert.Equal("string", contentTypeParam.Type); + Assert.False(contentTypeParam.IsRequired); + Assert.Equal("Content type of REST API request body.", contentTypeParam.Description); + } + + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldAddParametersDeclaredInPayloadMetadata(string method) + { + //Arrange + var payload = CreateTestJsonPayload(); + + var operation = CreateTestOperation(method, payload); + + //Act + var parameters = operation.GetParameters(addPayloadParamsFromMetadata: true); + + //Assert + Assert.NotNull(parameters); + + Assert.Equal(5, parameters.Count); //5 props from payload + + var name = parameters.FirstOrDefault(p => p.Name == "name"); + Assert.NotNull(name); + Assert.Equal("string", name.Type); + Assert.True(name.IsRequired); + Assert.Equal("The name.", name.Description); + + var landmarks = parameters.FirstOrDefault(p => p.Name == "landmarks"); + Assert.NotNull(landmarks); + Assert.Equal("array", landmarks.Type); + Assert.False(landmarks.IsRequired); + Assert.Equal("The landmarks.", landmarks.Description); + + var leader = parameters.FirstOrDefault(p => p.Name == "leader"); + Assert.NotNull(leader); + Assert.Equal("string", leader.Type); + Assert.True(leader.IsRequired); + Assert.Equal("The leader.", leader.Description); + + var population = parameters.FirstOrDefault(p => p.Name == "population"); + Assert.NotNull(population); + Assert.Equal("integer", population.Type); + Assert.True(population.IsRequired); + Assert.Equal("The population.", population.Description); + + var hasMagicWards = parameters.FirstOrDefault(p => p.Name == "hasMagicWards"); + Assert.NotNull(hasMagicWards); + Assert.Equal("boolean", hasMagicWards.Type); + Assert.False(hasMagicWards.IsRequired); + Assert.Null(hasMagicWards.Description); + } + + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldAddNamespaceToParametersDeclaredInPayloadMetadata(string method) + { + //Arrange + var payload = CreateTestJsonPayload(); + + var operation = CreateTestOperation(method, payload); + + //Act + var parameters = operation.GetParameters(addPayloadParamsFromMetadata: true, enablePayloadNamespacing: true); + + //Assert + Assert.NotNull(parameters); + + Assert.Equal(5, parameters.Count); //5 props from payload + + var name = parameters.FirstOrDefault(p => p.Name == "name"); + Assert.NotNull(name); + Assert.Equal("string", name.Type); + Assert.True(name.IsRequired); + Assert.Equal("The name.", name.Description); + + var landmarks = parameters.FirstOrDefault(p => p.Name == "location.landmarks"); + Assert.NotNull(landmarks); + Assert.Equal("array", landmarks.Type); + Assert.False(landmarks.IsRequired); + Assert.Equal("The landmarks.", landmarks.Description); + + var leader = parameters.FirstOrDefault(p => p.Name == "rulingCouncil.leader"); + Assert.NotNull(leader); + Assert.Equal("string", leader.Type); + Assert.True(leader.IsRequired); + Assert.Equal("The leader.", leader.Description); + + var population = parameters.FirstOrDefault(p => p.Name == "population"); + Assert.NotNull(population); + Assert.Equal("integer", population.Type); + Assert.True(population.IsRequired); + Assert.Equal("The population.", population.Description); + + var hasMagicWards = parameters.FirstOrDefault(p => p.Name == "hasMagicWards"); + Assert.NotNull(hasMagicWards); + Assert.Equal("boolean", hasMagicWards.Type); + Assert.False(hasMagicWards.IsRequired); + Assert.Null(hasMagicWards.Description); + } + + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldThrowExceptionIfPayloadMetadataDescribingParametersIsMissing(string method) + { + //Arrange + var operation = CreateTestOperation(method, null); + + //Act + Assert.Throws(() => operation.GetParameters(addPayloadParamsFromMetadata: true, enablePayloadNamespacing: true)); + } + + [Theory] + [InlineData("PUT")] + [InlineData("POST")] + public void ItShouldSetAlternativeNameToParametersForPutAndPostOperation(string method) + { + //Arrange + var latitude = new RestApiOperationPayloadProperty("location.latitude", "number", false, new List()); + var place = new RestApiOperationPayloadProperty("place", "string", true, new List()); + + var payload = new RestApiOperationPayload("application/json", new[] { place, latitude }); + + var operation = CreateTestOperation(method, payload); + + //Act + var parameters = operation.GetParameters(addPayloadParamsFromMetadata: true); + + //Assert + Assert.NotNull(parameters); + + var placeProp = parameters.FirstOrDefault(p => p.Name == "place"); + Assert.NotNull(placeProp); + Assert.Equal("place", placeProp.AlternativeName); + + var personNameProp = parameters.FirstOrDefault(p => p.Name == "location.latitude"); + Assert.NotNull(personNameProp); + Assert.Equal("location_latitude", personNameProp.AlternativeName); + } + + private static RestApiOperation CreateTestOperation(string method, RestApiOperationPayload? payload = null, Uri? url = null) + { + return new RestApiOperation( + id: "fake-id", + serverUrl: url, + path: "fake-path", + method: new HttpMethod(method), + description: "fake-description", + parameters: new List(), + payload: payload); + } + + private static RestApiOperationPayload CreateTestJsonPayload() + { + var name = new RestApiOperationPayloadProperty( + name: "name", + type: "string", + isRequired: true, + properties: new List(), + description: "The name."); + + var leader = new RestApiOperationPayloadProperty( + name: "leader", + type: "string", + isRequired: true, + properties: new List(), + description: "The leader."); + + var landmarks = new RestApiOperationPayloadProperty( + name: "landmarks", + type: "array", + isRequired: false, + properties: new List(), + description: "The landmarks."); + + var location = new RestApiOperationPayloadProperty( + name: "location", + type: "object", + isRequired: true, + properties: new[] { landmarks }, + description: "The location."); + + var rulingCouncil = new RestApiOperationPayloadProperty( + name: "rulingCouncil", + type: "object", + isRequired: true, + properties: new[] { leader }, + description: "The ruling council."); + + var population = new RestApiOperationPayloadProperty( + name: "population", + type: "integer", + isRequired: true, + properties: new List(), + description: "The population."); + + var hasMagicWards = new RestApiOperationPayloadProperty( + name: "hasMagicWards", + type: "boolean", + isRequired: false, + properties: new List()); + + return new RestApiOperationPayload("application/json", new[] { name, location, rulingCouncil, population, hasMagicWards }); + } + + private static RestApiOperationPayload CreateTestTextPayload() + { + return new RestApiOperationPayload("text/plain", new List()); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/HttpMessageHandlerStub.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/HttpMessageHandlerStub.cs similarity index 97% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/HttpMessageHandlerStub.cs rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/HttpMessageHandlerStub.cs index 542aa33a7c91..ec503c11abe5 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/HttpMessageHandlerStub.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/HttpMessageHandlerStub.cs @@ -9,7 +9,7 @@ using System.Threading; using System.Threading.Tasks; -namespace SemanticKernel.Functions.UnitTests.OpenAPI; +namespace SemanticKernel.Functions.UnitTests.OpenApi; internal sealed class HttpMessageHandlerStub : DelegatingHandler { diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs new file mode 100644 index 000000000000..36f7601dd02e --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Moq; +using SemanticKernel.Functions.UnitTests.OpenApi.TestPlugins; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi.OpenAI; + +public sealed class KernelOpenAIPluginExtensionsTests : IDisposable +{ + /// + /// OpenAPI document stream. + /// + private readonly Stream _openApiDocument; + + /// + /// Kernel instance. + /// + private readonly Kernel _kernel; + + /// + /// Creates an instance of a class. + /// + public KernelOpenAIPluginExtensionsTests() + { + this._kernel = new Kernel(); + + this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV2_0.json"); + } + + [Fact] + public async Task ItUsesOauthFromOpenAiPluginManifestWhenFetchingOpenApiSpecAsync() + { + await this.ItRunsTestAsync("ai-plugin.json"); + } + + [Fact] + public async Task ItUsesHttpAuthFromOpenAiPluginManifestWhenFetchingOpenApiSpecAsync() + { + await this.ItRunsTestAsync("ai-plugin2.json"); + } + + private async Task ItRunsTestAsync(string resourceName) + { + //Arrange + using var reader = new StreamReader(ResourcePluginsProvider.LoadFromResource(resourceName), Encoding.UTF8); + JsonNode openAIDocumentContent = JsonNode.Parse(await reader.ReadToEndAsync())!; + var actualOpenAIAuthConfig = + openAIDocumentContent["auth"].Deserialize( + new JsonSerializerOptions + { + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }, + })!; + + using var openAiDocument = ResourcePluginsProvider.LoadFromResource(resourceName); + using var messageHandlerStub = new HttpMessageHandlerStub(this._openApiDocument); + + using var httpClient = new HttpClient(messageHandlerStub, false); + var authCallbackMock = new Mock(); + var executionParameters = new OpenAIFunctionExecutionParameters { HttpClient = httpClient, AuthCallback = authCallbackMock.Object }; + + var pluginName = "fakePlugin"; + + //Act + var plugin = await this._kernel.ImportPluginFromOpenAIAsync(pluginName, openAiDocument, executionParameters); + + //Assert + var setSecretFunction = plugin["SetSecret"]; + Assert.NotNull(setSecretFunction); + + authCallbackMock.Verify(target => target.Invoke( + It.IsAny(), + It.Is(expectedPluginName => expectedPluginName == pluginName), + It.Is(expectedOpenAIAuthConfig => expectedOpenAIAuthConfig.Scope == actualOpenAIAuthConfig!.Scope), + It.IsAny()), + Times.Exactly(1)); + } + + public void Dispose() + { + this._openApiDocument.Dispose(); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV20Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV20Tests.cs new file mode 100644 index 000000000000..ab3150adb130 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV20Tests.cs @@ -0,0 +1,376 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using SemanticKernel.Functions.UnitTests.OpenApi.TestPlugins; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public sealed class OpenApiDocumentParserV20Tests : IDisposable +{ + /// + /// System under test - an instance of OpenApiDocumentParser class. + /// + private readonly OpenApiDocumentParser _sut; + + /// + /// OpenAPI document stream. + /// + private readonly Stream _openApiDocument; + + /// + /// Creates an instance of a class. + /// + public OpenApiDocumentParserV20Tests() + { + this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV2_0.json"); + + this._sut = new OpenApiDocumentParser(); + } + + [Fact] + public async Task ItCanParsePutOperationBodySuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var putOperation = operations.Single(o => o.Id == "SetSecret"); + Assert.NotNull(putOperation); + + var payload = putOperation.Payload; + Assert.NotNull(payload); + Assert.Equal("application/json", payload.MediaType); + + var properties = payload.Properties; + Assert.NotNull(properties); + Assert.Equal(2, properties.Count); + + var valueProperty = properties.FirstOrDefault(p => p.Name == "value"); + Assert.NotNull(valueProperty); + Assert.True(valueProperty.IsRequired); + Assert.Equal("The value of the secret.", valueProperty.Description); + Assert.Equal("string", valueProperty.Type); + Assert.NotNull(valueProperty.Properties); + Assert.False(valueProperty.Properties.Any()); + Assert.NotNull(valueProperty.Schema); + Assert.Equal("string", valueProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("The value of the secret.", valueProperty.Schema.RootElement.GetProperty("description").GetString()); + + var attributesProperty = properties.FirstOrDefault(p => p.Name == "attributes"); + Assert.NotNull(attributesProperty); + Assert.False(attributesProperty.IsRequired); + Assert.Equal("attributes", attributesProperty.Description); + Assert.Equal("object", attributesProperty.Type); + Assert.NotNull(attributesProperty.Properties); + Assert.True(attributesProperty.Properties.Any()); + Assert.NotNull(attributesProperty.Schema); + Assert.Equal("object", attributesProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("attributes", attributesProperty.Schema.RootElement.GetProperty("description").GetString()); + + var enabledProperty = attributesProperty.Properties.FirstOrDefault(p => p.Name == "enabled"); + Assert.NotNull(enabledProperty); + Assert.False(enabledProperty.IsRequired); + Assert.Equal("Determines whether the object is enabled.", enabledProperty.Description); + Assert.Equal("boolean", enabledProperty.Type); + Assert.False(enabledProperty.Properties?.Any()); + Assert.NotNull(enabledProperty.Schema); + Assert.Equal("boolean", enabledProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("Determines whether the object is enabled.", enabledProperty.Schema.RootElement.GetProperty("description").GetString()); + } + + [Fact] + public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var putOperation = operations.Single(o => o.Id == "SetSecret"); + Assert.NotNull(putOperation); + Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); + Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); + Assert.Equal(HttpMethod.Put, putOperation.Method); + Assert.Equal("/secrets/{secret-name}", putOperation.Path); + + var parameters = putOperation.GetParameters(addPayloadParamsFromMetadata: false); + Assert.NotNull(parameters); + Assert.True(parameters.Count >= 5); + + var pathParameter = parameters.Single(p => p.Name == "secret-name"); //'secret-name' path parameter. + Assert.True(pathParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Path, pathParameter.Location); + Assert.Null(pathParameter.DefaultValue); + Assert.NotNull(pathParameter.Schema); + Assert.Equal("string", pathParameter.Schema.RootElement.GetProperty("type").GetString()); + + var apiVersionParameter = parameters.Single(p => p.Name == "api-version"); //'api-version' query string parameter. + Assert.True(apiVersionParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Query, apiVersionParameter.Location); + Assert.Equal("7.0", apiVersionParameter.DefaultValue); + Assert.NotNull(apiVersionParameter.Schema); + Assert.Equal("string", apiVersionParameter.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("7.0", apiVersionParameter.Schema.RootElement.GetProperty("default").GetString()); + + var payloadParameter = parameters.Single(p => p.Name == "payload"); //'payload' artificial parameter. + Assert.True(payloadParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Body, payloadParameter.Location); + Assert.Null(payloadParameter.DefaultValue); + Assert.Equal("REST API request body.", payloadParameter.Description); + Assert.NotNull(payloadParameter.Schema); + Assert.Equal("object", payloadParameter.Schema.RootElement.GetProperty("type").GetString()); + + var contentTypeParameter = parameters.Single(p => p.Name == "content-type"); //'content-type' artificial parameter. + Assert.False(contentTypeParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Body, contentTypeParameter.Location); + Assert.Null(contentTypeParameter.DefaultValue); + Assert.Equal("Content type of REST API request body.", contentTypeParameter.Description); + Assert.Null(contentTypeParameter.Schema); + } + + [Fact] + public async Task ItCanUseOperationSummaryAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + Assert.Equal("Turn a scenario into a creative or humorous excuse to send your boss", operation.Description); + } + + [Fact] + public async Task ItCanExtractSimpleTypeHeaderParameterMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert string header parameter metadata + var accept = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "Accept"); + + Assert.Equal("string", accept.Type); + Assert.Equal("application/json", accept.DefaultValue); + Assert.Equal("Indicates which content types, expressed as MIME types, the client is able to understand.", accept.Description); + Assert.False(accept.IsRequired); + + //Assert integer header parameter metadata + var apiVersion = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-API-Version"); + + Assert.Equal("integer", apiVersion.Type); + Assert.Equal(10, apiVersion.DefaultValue); + Assert.Equal("Requested API version.", apiVersion.Description); + Assert.True(apiVersion.IsRequired); + } + + [Fact] + public async Task ItCanExtractCsvStyleHeaderParameterMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert header parameters metadata + var acceptParameter = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-Operation-Csv-Ids"); + + Assert.Null(acceptParameter.DefaultValue); + Assert.False(acceptParameter.IsRequired); + Assert.Equal("array", acceptParameter.Type); + Assert.Equal(RestApiOperationParameterStyle.Simple, acceptParameter.Style); + Assert.Equal("The comma separated list of operation ids.", acceptParameter.Description); + Assert.Equal("string", acceptParameter.ArrayItemType); + } + + [Fact] + public async Task ItCanExtractHeadersSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "SetSecret"); + + var headerParameters = operation.Parameters.Where(p => p.Location == RestApiOperationParameterLocation.Header); + + Assert.NotNull(headerParameters); + Assert.Equal(3, headerParameters.Count()); + + Assert.Contains(headerParameters, (p) => p.Name == "Accept"); + Assert.Contains(headerParameters, (p) => p.Name == "X-API-Version"); + Assert.Contains(headerParameters, (p) => p.Name == "X-Operation-Csv-Ids"); + } + + [Fact] + public async Task ItCanExtractAllPathsAsOperationsAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.Equal(4, operations.Count); + } + + [Fact] + public async Task ItCanParseOperationHavingTextPlainBodySuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + + var payload = operation.Payload; + Assert.NotNull(payload); + Assert.Equal("text/plain", payload.MediaType); + Assert.Equal("excuse event", payload.Description); + Assert.NotNull(payload.Schema); + + var properties = payload.Properties; + Assert.NotNull(properties); + Assert.Empty(properties); + } + + [Fact] + public async Task ItCanWorkWithDocumentsWithoutHostAndSchemaAttributesAsync() + { + //Arrange + using var stream = OpenApiTestHelper.ModifyOpenApiDocument(this._openApiDocument, (doc) => + { + doc.Remove("host"); + doc.Remove("schemes"); + }); + + //Act + var operations = await this._sut.ParseAsync(stream); + + //Assert + Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); + } + + [Fact] + public async Task ItCanParseResponsesSuccessfullyAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + + operation.Responses.TryGetValue("200", out var response); + Assert.NotNull(response); + Assert.Equal("text/plain", response.MediaType); + Assert.Equal("The OK response", response.Description); + Assert.NotNull(response.Schema); + Assert.Equal("string", response.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal( + JsonSerializer.Serialize(KernelJsonSchema.Parse("{\"type\": \"string\"}")), + JsonSerializer.Serialize(response.Schema)); + } + + [Fact] + public async Task ItCanWorkWithDefaultParametersOfVariousTypesAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "TestDefaultValues"); + Assert.NotNull(operation); + + var parameters = operation.GetParameters(); + Assert.Equal(11, parameters.Count); + + var stringParameter = parameters.Single(p => p.Name == "string-parameter"); + Assert.Equal("string-value", stringParameter.DefaultValue); + + var booleanParameter = parameters.Single(p => p.Name == "boolean-parameter"); + Assert.True(booleanParameter.DefaultValue is bool value); + + var integerParameter = parameters.Single(p => p.Name == "integer-parameter"); + Assert.True(integerParameter.DefaultValue is int); + Assert.Equal(281, integerParameter.DefaultValue); + + var longParameter = parameters.Single(p => p.Name == "long-parameter"); + Assert.True(longParameter.DefaultValue is long); + Assert.Equal((long)-2814, longParameter.DefaultValue); + + var floatParameter = parameters.Single(p => p.Name == "float-parameter"); + Assert.True(floatParameter.DefaultValue is float); + Assert.Equal((float)12.01, floatParameter.DefaultValue); + + var doubleParameter = parameters.Single(p => p.Name == "double-parameter"); + Assert.True(doubleParameter.DefaultValue is double); + Assert.Equal((double)-12.01, doubleParameter.DefaultValue); + + var encodedCharactersParameter = parameters.Single(p => p.Name == "encoded-characters-parameter"); + Assert.True(encodedCharactersParameter.DefaultValue is byte[]); + Assert.Equal(new byte[] { 1, 2, 3, 4, 5 }, encodedCharactersParameter.DefaultValue); + + var binaryDataParameter = parameters.Single(p => p.Name == "binary-data-parameter"); + Assert.True(binaryDataParameter.DefaultValue is byte[]); + Assert.Equal(new byte[] { 50, 51, 52, 53, 54 }, binaryDataParameter.DefaultValue); + + var dateParameter = parameters.Single(p => p.Name == "date-parameter"); + Assert.True(dateParameter.DefaultValue is DateTime); + Assert.Equal(new DateTime(2017, 07, 21), dateParameter.DefaultValue); + + var dateTimeParameter = parameters.Single(p => p.Name == "date-time-parameter"); + Assert.True(dateTimeParameter.DefaultValue is DateTimeOffset); + Assert.Equal(new DateTimeOffset(2017, 07, 21, 17, 32, 28, TimeSpan.Zero), dateTimeParameter.DefaultValue); + + var passwordParameter = parameters.Single(p => p.Name == "password-parameter"); + Assert.True(passwordParameter.DefaultValue is string); + Assert.Equal("password-value", passwordParameter.DefaultValue); + } + + private static RestApiOperationParameter GetParameterMetadata(IList operations, string operationId, + RestApiOperationParameterLocation location, string name) + { + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == operationId); + Assert.NotNull(operation.Parameters); + Assert.True(operation.Parameters.Any()); + + var parameters = operation.Parameters.Where(p => p.Location == location); + + var parameter = parameters.Single(p => p.Name == name); + Assert.NotNull(parameter); + + return parameter; + } + + public void Dispose() + { + this._openApiDocument.Dispose(); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV30Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV30Tests.cs new file mode 100644 index 000000000000..46dfdf8da801 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV30Tests.cs @@ -0,0 +1,464 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using SemanticKernel.Functions.UnitTests.OpenApi.TestPlugins; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public sealed class OpenApiDocumentParserV30Tests : IDisposable +{ + /// + /// System under test - an instance of OpenApiDocumentParser class. + /// + private readonly OpenApiDocumentParser _sut; + + /// + /// OpenAPI document stream. + /// + private readonly Stream _openApiDocument; + + /// + /// Creates an instance of a class. + /// + public OpenApiDocumentParserV30Tests() + { + this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV3_0.json"); + + this._sut = new OpenApiDocumentParser(); + } + + [Fact] + public async Task ItCanParsePutOperationBodySuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var putOperation = operations.Single(o => o.Id == "SetSecret"); + Assert.NotNull(putOperation); + + var payload = putOperation.Payload; + Assert.NotNull(payload); + Assert.Equal("application/json", payload.MediaType); + + var properties = payload.Properties; + Assert.NotNull(properties); + Assert.Equal(2, properties.Count); + + var valueProperty = properties.FirstOrDefault(p => p.Name == "value"); + Assert.NotNull(valueProperty); + Assert.True(valueProperty.IsRequired); + Assert.Equal("The value of the secret.", valueProperty.Description); + Assert.Equal("string", valueProperty.Type); + Assert.NotNull(valueProperty.Properties); + Assert.False(valueProperty.Properties.Any()); + Assert.NotNull(valueProperty.Schema); + Assert.Equal("string", valueProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("The value of the secret.", valueProperty.Schema.RootElement.GetProperty("description").GetString()); + + var attributesProperty = properties.FirstOrDefault(p => p.Name == "attributes"); + Assert.NotNull(attributesProperty); + Assert.False(attributesProperty.IsRequired); + Assert.Equal("attributes", attributesProperty.Description); + Assert.Equal("object", attributesProperty.Type); + Assert.NotNull(attributesProperty.Properties); + Assert.True(attributesProperty.Properties.Any()); + Assert.NotNull(attributesProperty.Schema); + Assert.Equal("object", attributesProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("attributes", attributesProperty.Schema.RootElement.GetProperty("description").GetString()); + + var enabledProperty = attributesProperty.Properties.FirstOrDefault(p => p.Name == "enabled"); + Assert.NotNull(enabledProperty); + Assert.False(enabledProperty.IsRequired); + Assert.Equal("Determines whether the object is enabled.", enabledProperty.Description); + Assert.Equal("boolean", enabledProperty.Type); + Assert.False(enabledProperty.Properties?.Any()); + Assert.NotNull(enabledProperty.Schema); + Assert.Equal("boolean", enabledProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("Determines whether the object is enabled.", enabledProperty.Schema.RootElement.GetProperty("description").GetString()); + } + + [Fact] + public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var putOperation = operations.Single(o => o.Id == "SetSecret"); + Assert.NotNull(putOperation); + Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); + Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); + Assert.Equal(HttpMethod.Put, putOperation.Method); + Assert.Equal("/secrets/{secret-name}", putOperation.Path); + + var parameters = putOperation.GetParameters(addPayloadParamsFromMetadata: false); + Assert.NotNull(parameters); + Assert.True(parameters.Count >= 5); + + var pathParameter = parameters.Single(p => p.Name == "secret-name"); //'secret-name' path parameter. + Assert.True(pathParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Path, pathParameter.Location); + Assert.Null(pathParameter.DefaultValue); + Assert.NotNull(pathParameter.Schema); + Assert.Equal("string", pathParameter.Schema.RootElement.GetProperty("type").GetString()); + + var apiVersionParameter = parameters.Single(p => p.Name == "api-version"); //'api-version' query string parameter. + Assert.True(apiVersionParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Query, apiVersionParameter.Location); + Assert.Equal("7.0", apiVersionParameter.DefaultValue); + Assert.NotNull(apiVersionParameter.Schema); + Assert.Equal("string", apiVersionParameter.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("7.0", apiVersionParameter.Schema.RootElement.GetProperty("default").GetString()); + + var payloadParameter = parameters.Single(p => p.Name == "payload"); //'payload' artificial parameter. + Assert.True(payloadParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Body, payloadParameter.Location); + Assert.Null(payloadParameter.DefaultValue); + Assert.Equal("REST API request body.", payloadParameter.Description); + Assert.NotNull(payloadParameter.Schema); + Assert.Equal("object", payloadParameter.Schema.RootElement.GetProperty("type").GetString()); + + var contentTypeParameter = parameters.Single(p => p.Name == "content-type"); //'content-type' artificial parameter. + Assert.False(contentTypeParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Body, contentTypeParameter.Location); + Assert.Null(contentTypeParameter.DefaultValue); + Assert.Equal("Content type of REST API request body.", contentTypeParameter.Description); + Assert.Null(contentTypeParameter.Schema); + } + + [Fact] + public async Task ItCanExtractSimpleTypeHeaderParameterMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert string header parameter metadata + var accept = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "Accept"); + + Assert.Equal("string", accept.Type); + Assert.Equal("application/json", accept.DefaultValue); + Assert.Equal("Indicates which content types, expressed as MIME types, the client is able to understand.", accept.Description); + Assert.False(accept.IsRequired); + + //Assert integer header parameter metadata + var apiVersion = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-API-Version"); + + Assert.Equal("integer", apiVersion.Type); + Assert.Equal(10, apiVersion.DefaultValue); + Assert.Equal("Requested API version.", apiVersion.Description); + Assert.True(apiVersion.IsRequired); + } + + [Fact] + public async Task ItCanUseOperationSummaryAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + Assert.Equal("Turn a scenario into a creative or humorous excuse to send your boss", operation.Description); + } + + [Fact] + public async Task ItCanExtractCsvStyleHeaderParameterMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert header parameters metadata + var acceptParameter = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-Operation-Csv-Ids"); + + Assert.Null(acceptParameter.DefaultValue); + Assert.False(acceptParameter.IsRequired); + Assert.Equal("array", acceptParameter.Type); + Assert.Equal(RestApiOperationParameterStyle.Simple, acceptParameter.Style); + Assert.Equal("The comma separated list of operation ids.", acceptParameter.Description); + Assert.Equal("string", acceptParameter.ArrayItemType); + } + + [Fact] + public async Task ItCanExtractHeadersSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "SetSecret"); + + var headerParameters = operation.Parameters.Where(p => p.Location == RestApiOperationParameterLocation.Header); + + Assert.NotNull(headerParameters); + Assert.Equal(3, headerParameters.Count()); + + Assert.Contains(headerParameters, (p) => p.Name == "Accept"); + Assert.Contains(headerParameters, (p) => p.Name == "X-API-Version"); + Assert.Contains(headerParameters, (p) => p.Name == "X-Operation-Csv-Ids"); + } + + [Fact] + public async Task ItCanExtractAllPathsAsOperationsAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.Equal(4, operations.Count); + } + + [Fact] + public async Task ItCanParseOperationHavingTextPlainBodySuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + + var payload = operation.Payload; + Assert.NotNull(payload); + Assert.Equal("text/plain", payload.MediaType); + Assert.Equal("excuse event", payload.Description); + Assert.NotNull(payload.Schema); + + var properties = payload.Properties; + Assert.NotNull(properties); + Assert.Empty(properties); + } + + [Fact] + public async Task ItShouldThrowExceptionForNonCompliantDocumentAsync() + { + // Arrange + var nonComplaintOpenApiDocument = ResourcePluginsProvider.LoadFromResource("nonCompliant_documentV3_0.json"); + + // Act and Assert + await Assert.ThrowsAsync(async () => await this._sut.ParseAsync(nonComplaintOpenApiDocument)); + } + + [Fact] + public async Task ItShouldWorkWithNonCompliantDocumentIfAllowedAsync() + { + // Arrange + var nonComplaintOpenApiDocument = ResourcePluginsProvider.LoadFromResource("nonCompliant_documentV3_0.json"); + + // Act + await this._sut.ParseAsync(nonComplaintOpenApiDocument, ignoreNonCompliantErrors: true); + + // Assert + // The absence of any thrown exceptions serves as evidence of the functionality's success. + } + + [Fact] + public async Task ItCanWorkWithDocumentsWithoutServersAttributeAsync() + { + //Arrange + using var stream = ModifyOpenApiDocument(this._openApiDocument, (doc) => + { + doc.Remove("servers"); + }); + + //Act + var operations = await this._sut.ParseAsync(stream); + + //Assert + Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); + } + + [Fact] + public async Task ItCanWorkWithDocumentsWithEmptyServersAttributeAsync() + { + //Arrange + using var stream = ModifyOpenApiDocument(this._openApiDocument, (doc) => + { + doc["servers"] = new JsonArray(); + }); + + //Act + var operations = await this._sut.ParseAsync(stream); + + //Assert + Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); + } + + [Theory] + [InlineData("explodeFormParam")] + [InlineData("anotherExplodeFormParam")] + public async Task ItShouldSupportsAmpersandSeparatedParametersForFormStyleArrayQueryStringParametersAsync(string parameterName) + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "GetSecret"); + + var explodeFormParam = operation.Parameters.Single(p => p.Name == parameterName); + + Assert.True(explodeFormParam.Expand); + } + + [Fact] + public async Task ItShouldSupportsCommaSeparatedValuesForFormStyleArrayQueryStringParametersAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "GetSecret"); + + var explodeFormParam = operation.Parameters.Single(p => p.Name == "nonExplodeFormParam"); + + Assert.False(explodeFormParam.Expand); + } + + [Fact] + public async Task ItCanParseResponsesSuccessfullyAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + + operation.Responses.TryGetValue("200", out var response); + Assert.NotNull(response); + Assert.Equal("text/plain", response.MediaType); + Assert.Equal("The OK response", response.Description); + Assert.NotNull(response.Schema); + Assert.Equal("string", response.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal( + JsonSerializer.Serialize(KernelJsonSchema.Parse("{\"type\": \"string\"}")), + JsonSerializer.Serialize(response.Schema)); + } + + [Fact] + public async Task ItCanWorkWithDefaultParametersOfVariousTypesAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "TestDefaultValues"); + Assert.NotNull(operation); + + var parameters = operation.GetParameters(); + Assert.Equal(11, parameters.Count); + + var stringParameter = parameters.Single(p => p.Name == "string-parameter"); + Assert.Equal("string-value", stringParameter.DefaultValue); + + var booleanParameter = parameters.Single(p => p.Name == "boolean-parameter"); + Assert.True(booleanParameter.DefaultValue is bool value); + + var integerParameter = parameters.Single(p => p.Name == "integer-parameter"); + Assert.True(integerParameter.DefaultValue is int); + Assert.Equal(281, integerParameter.DefaultValue); + + var longParameter = parameters.Single(p => p.Name == "long-parameter"); + Assert.True(longParameter.DefaultValue is long); + Assert.Equal((long)-2814, longParameter.DefaultValue); + + var floatParameter = parameters.Single(p => p.Name == "float-parameter"); + Assert.True(floatParameter.DefaultValue is float); + Assert.Equal((float)12.01, floatParameter.DefaultValue); + + var doubleParameter = parameters.Single(p => p.Name == "double-parameter"); + Assert.True(doubleParameter.DefaultValue is double); + Assert.Equal((double)-12.01, doubleParameter.DefaultValue); + + var encodedCharactersParameter = parameters.Single(p => p.Name == "encoded-characters-parameter"); + Assert.True(encodedCharactersParameter.DefaultValue is byte[]); + Assert.Equal(new byte[] { 1, 2, 3, 4, 5 }, encodedCharactersParameter.DefaultValue); + + var binaryDataParameter = parameters.Single(p => p.Name == "binary-data-parameter"); + Assert.True(binaryDataParameter.DefaultValue is byte[]); + Assert.Equal(new byte[] { 50, 51, 52, 53, 54 }, binaryDataParameter.DefaultValue); + + var dateParameter = parameters.Single(p => p.Name == "date-parameter"); + Assert.True(dateParameter.DefaultValue is DateTime); + Assert.Equal(new DateTime(2017, 07, 21), dateParameter.DefaultValue); + + var dateTimeParameter = parameters.Single(p => p.Name == "date-time-parameter"); + Assert.True(dateTimeParameter.DefaultValue is DateTimeOffset); + Assert.Equal(new DateTimeOffset(2017, 07, 21, 17, 32, 28, TimeSpan.Zero), dateTimeParameter.DefaultValue); + + var passwordParameter = parameters.Single(p => p.Name == "password-parameter"); + Assert.True(passwordParameter.DefaultValue is string); + Assert.Equal("password-value", passwordParameter.DefaultValue); + } + + private static MemoryStream ModifyOpenApiDocument(Stream openApiDocument, Action transformer) + { + var json = JsonSerializer.Deserialize(openApiDocument); + + transformer(json!); + + var stream = new MemoryStream(); + + JsonSerializer.Serialize(stream, json); + + stream.Seek(0, SeekOrigin.Begin); + + return stream; + } + + private static RestApiOperationParameter GetParameterMetadata(IList operations, string operationId, + RestApiOperationParameterLocation location, string name) + { + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == operationId); + Assert.NotNull(operation.Parameters); + Assert.True(operation.Parameters.Any()); + + var parameters = operation.Parameters.Where(p => p.Location == location); + + var parameter = parameters.Single(p => p.Name == name); + Assert.NotNull(parameter); + + return parameter; + } + + public void Dispose() + { + this._openApiDocument.Dispose(); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV31Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV31Tests.cs new file mode 100644 index 000000000000..b927829e2e18 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV31Tests.cs @@ -0,0 +1,445 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Dynamic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using SemanticKernel.Functions.UnitTests.OpenApi.TestPlugins; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public sealed class OpenApiDocumentParserV31Tests : IDisposable +{ + /// + /// System under test - an instance of OpenApiDocumentParser class. + /// + private readonly OpenApiDocumentParser _sut; + + /// + /// OpenAPI document stream. + /// + private readonly Stream _openApiDocument; + + /// + /// Creates an instance of a class. + /// + public OpenApiDocumentParserV31Tests() + { + this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV3_1.yaml"); + + this._sut = new OpenApiDocumentParser(); + } + + [Fact] + public async Task ItCanParsePutOperationBodySuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var putOperation = operations.Single(o => o.Id == "SetSecret"); + Assert.NotNull(putOperation); + + var payload = putOperation.Payload; + Assert.NotNull(payload); + Assert.Equal("application/json", payload.MediaType); + + var properties = payload.Properties; + Assert.NotNull(properties); + Assert.Equal(2, properties.Count); + + var valueProperty = properties.FirstOrDefault(p => p.Name == "value"); + Assert.NotNull(valueProperty); + Assert.True(valueProperty.IsRequired); + Assert.Equal("The value of the secret.", valueProperty.Description); + Assert.Equal("string", valueProperty.Type); + Assert.NotNull(valueProperty.Properties); + Assert.False(valueProperty.Properties.Any()); + Assert.NotNull(valueProperty.Schema); + Assert.Equal("string", valueProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("The value of the secret.", valueProperty.Schema.RootElement.GetProperty("description").GetString()); + + var attributesProperty = properties.FirstOrDefault(p => p.Name == "attributes"); + Assert.NotNull(attributesProperty); + Assert.False(attributesProperty.IsRequired); + Assert.Equal("attributes", attributesProperty.Description); + Assert.Equal("object", attributesProperty.Type); + Assert.NotNull(attributesProperty.Properties); + Assert.True(attributesProperty.Properties.Any()); + Assert.NotNull(attributesProperty.Schema); + Assert.Equal("object", attributesProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("attributes", attributesProperty.Schema.RootElement.GetProperty("description").GetString()); + + var enabledProperty = attributesProperty.Properties.FirstOrDefault(p => p.Name == "enabled"); + Assert.NotNull(enabledProperty); + Assert.False(enabledProperty.IsRequired); + Assert.Equal("Determines whether the object is enabled.", enabledProperty.Description); + Assert.Equal("boolean", enabledProperty.Type); + Assert.False(enabledProperty.Properties?.Any()); + Assert.NotNull(enabledProperty.Schema); + Assert.Equal("boolean", enabledProperty.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("Determines whether the object is enabled.", enabledProperty.Schema.RootElement.GetProperty("description").GetString()); + } + + [Fact] + public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var putOperation = operations.Single(o => o.Id == "SetSecret"); + Assert.NotNull(putOperation); + Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); + Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); + Assert.Equal(HttpMethod.Put, putOperation.Method); + Assert.Equal("/secrets/{secret-name}", putOperation.Path); + + var parameters = putOperation.GetParameters(addPayloadParamsFromMetadata: false); + Assert.NotNull(parameters); + Assert.True(parameters.Count >= 5); + + var pathParameter = parameters.Single(p => p.Name == "secret-name"); //'secret-name' path parameter. + Assert.True(pathParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Path, pathParameter.Location); + Assert.Null(pathParameter.DefaultValue); + Assert.NotNull(pathParameter.Schema); + Assert.Equal("string", pathParameter.Schema.RootElement.GetProperty("type").GetString()); + + var apiVersionParameter = parameters.Single(p => p.Name == "api-version"); //'api-version' query string parameter. + Assert.True(apiVersionParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Query, apiVersionParameter.Location); + Assert.Equal("7.0", apiVersionParameter.DefaultValue); + Assert.NotNull(apiVersionParameter.Schema); + Assert.Equal("string", apiVersionParameter.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal("7.0", apiVersionParameter.Schema.RootElement.GetProperty("default").GetString()); + + var payloadParameter = parameters.Single(p => p.Name == "payload"); //'payload' artificial parameter. + Assert.True(payloadParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Body, payloadParameter.Location); + Assert.Null(payloadParameter.DefaultValue); + Assert.Equal("REST API request body.", payloadParameter.Description); + Assert.NotNull(payloadParameter.Schema); + Assert.Equal("object", payloadParameter.Schema.RootElement.GetProperty("type").GetString()); + + var contentTypeParameter = parameters.Single(p => p.Name == "content-type"); //'content-type' artificial parameter. + Assert.False(contentTypeParameter.IsRequired); + Assert.Equal(RestApiOperationParameterLocation.Body, contentTypeParameter.Location); + Assert.Null(contentTypeParameter.DefaultValue); + Assert.Equal("Content type of REST API request body.", contentTypeParameter.Description); + Assert.Null(contentTypeParameter.Schema); + } + + [Fact] + public async Task ItCanUseOperationSummaryAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + Assert.Equal("Turn a scenario into a creative or humorous excuse to send your boss", operation.Description); + } + + [Fact] + public async Task ItCanExtractSimpleTypeHeaderParameterMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert string header parameter metadata + var accept = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "Accept"); + + Assert.Equal("string", accept.Type); + Assert.Equal("application/json", accept.DefaultValue); + Assert.Equal("Indicates which content types, expressed as MIME types, the client is able to understand.", accept.Description); + Assert.False(accept.IsRequired); + + //Assert integer header parameter metadata + var apiVersion = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-API-Version"); + + Assert.Equal("integer", apiVersion.Type); + Assert.Equal(10, apiVersion.DefaultValue); + Assert.Equal("Requested API version.", apiVersion.Description); + Assert.True(apiVersion.IsRequired); + } + + [Fact] + public async Task ItCanExtractCsvStyleHeaderParameterMetadataSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert header parameters metadata + var acceptParameter = GetParameterMetadata(operations, "SetSecret", RestApiOperationParameterLocation.Header, "X-Operation-Csv-Ids"); + + Assert.Null(acceptParameter.DefaultValue); + Assert.False(acceptParameter.IsRequired); + Assert.Equal("array", acceptParameter.Type); + Assert.Equal(RestApiOperationParameterStyle.Simple, acceptParameter.Style); + Assert.Equal("The comma separated list of operation ids.", acceptParameter.Description); + Assert.Equal("string", acceptParameter.ArrayItemType); + } + + [Fact] + public async Task ItCanExtractHeadersSuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "SetSecret"); + + var headerParameters = operation.Parameters.Where(p => p.Location == RestApiOperationParameterLocation.Header); + + Assert.NotNull(headerParameters); + Assert.Equal(3, headerParameters.Count()); + + Assert.Contains(headerParameters, (p) => p.Name == "Accept"); + Assert.Contains(headerParameters, (p) => p.Name == "X-API-Version"); + Assert.Contains(headerParameters, (p) => p.Name == "X-Operation-Csv-Ids"); + } + + [Fact] + public async Task ItCanExtractAllPathsAsOperationsAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.Equal(4, operations.Count); + } + + [Fact] + public async Task ItCanParseOperationHavingTextPlainBodySuccessfullyAsync() + { + // Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + // Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + + var payload = operation.Payload; + Assert.NotNull(payload); + Assert.Equal("text/plain", payload.MediaType); + Assert.Equal("excuse event", payload.Description); + Assert.NotNull(payload.Schema); + + var properties = payload.Properties; + Assert.NotNull(properties); + Assert.Empty(properties); + } + + [Fact] + public async Task ItCanWorkWithDocumentsWithoutServersAttributeAsync() + { + //Arrange + using var stream = ModifyOpenApiDocument(this._openApiDocument, (yaml) => + { + yaml.Remove("servers"); + }); + + //Act + var operations = await this._sut.ParseAsync(stream); + + //Assert + Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); + } + + [Fact] + public async Task ItCanWorkWithDocumentsWithEmptyServersAttributeAsync() + { + //Arrange + using var stream = ModifyOpenApiDocument(this._openApiDocument, (yaml) => + { + yaml["servers"] = Array.Empty(); + }); + + //Act + var operations = await this._sut.ParseAsync(stream); + + //Assert + Assert.All(operations, (op) => Assert.Null(op.ServerUrl)); + } + + [Theory] + [InlineData("explodeFormParam")] + [InlineData("anotherExplodeFormParam")] + public async Task ItShouldSupportsAmpersandSeparatedParametersForFormStyleArrayQueryStringParametersAsync(string parameterName) + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "GetSecret"); + + var explodeFormParam = operation.Parameters.Single(p => p.Name == parameterName); + + Assert.True(explodeFormParam.Expand); + } + + [Fact] + public async Task ItShouldSupportsCommaSeparatedValuesForFormStyleArrayQueryStringParametersAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "GetSecret"); + + var explodeFormParam = operation.Parameters.Single(p => p.Name == "nonExplodeFormParam"); + + Assert.False(explodeFormParam.Expand); + } + + [Fact] + public async Task ItCanParseResponsesSuccessfullyAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "Excuses"); + Assert.NotNull(operation); + + operation.Responses.TryGetValue("200", out var response); + Assert.NotNull(response); + Assert.Equal("text/plain", response.MediaType); + Assert.Equal("The OK response", response.Description); + Assert.NotNull(response.Schema); + Assert.Equal("string", response.Schema.RootElement.GetProperty("type").GetString()); + Assert.Equal( + JsonSerializer.Serialize(KernelJsonSchema.Parse("{\"type\": \"string\"}")), + JsonSerializer.Serialize(response.Schema)); + } + + [Fact] + public async Task ItCanWorkWithDefaultParametersOfVariousTypesAsync() + { + //Act + var operations = await this._sut.ParseAsync(this._openApiDocument); + + //Assert + Assert.NotNull(operations); + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == "TestDefaultValues"); + Assert.NotNull(operation); + + var parameters = operation.GetParameters(); + Assert.Equal(11, parameters.Count); + + var stringParameter = parameters.Single(p => p.Name == "string-parameter"); + Assert.Equal("string-value", stringParameter.DefaultValue); + + var booleanParameter = parameters.Single(p => p.Name == "boolean-parameter"); + Assert.True(booleanParameter.DefaultValue is bool value); + + var integerParameter = parameters.Single(p => p.Name == "integer-parameter"); + Assert.True(integerParameter.DefaultValue is int); + Assert.Equal(281, integerParameter.DefaultValue); + + var longParameter = parameters.Single(p => p.Name == "long-parameter"); + Assert.True(longParameter.DefaultValue is long); + Assert.Equal((long)-2814, longParameter.DefaultValue); + + var floatParameter = parameters.Single(p => p.Name == "float-parameter"); + Assert.True(floatParameter.DefaultValue is float); + Assert.Equal((float)12.01, floatParameter.DefaultValue); + + var doubleParameter = parameters.Single(p => p.Name == "double-parameter"); + Assert.True(doubleParameter.DefaultValue is double); + Assert.Equal((double)-12.01, doubleParameter.DefaultValue); + + var encodedCharactersParameter = parameters.Single(p => p.Name == "encoded-characters-parameter"); + Assert.True(encodedCharactersParameter.DefaultValue is byte[]); + Assert.Equal(new byte[] { 1, 2, 3, 4, 5 }, encodedCharactersParameter.DefaultValue); + + var binaryDataParameter = parameters.Single(p => p.Name == "binary-data-parameter"); + Assert.True(binaryDataParameter.DefaultValue is byte[]); + Assert.Equal(new byte[] { 50, 51, 52, 53, 54 }, binaryDataParameter.DefaultValue); + + var dateParameter = parameters.Single(p => p.Name == "date-parameter"); + Assert.True(dateParameter.DefaultValue is DateTime); + Assert.Equal(new DateTime(2017, 07, 21), dateParameter.DefaultValue); + + var dateTimeParameter = parameters.Single(p => p.Name == "date-time-parameter"); + Assert.True(dateTimeParameter.DefaultValue is DateTimeOffset); + Assert.Equal(new DateTimeOffset(2017, 07, 21, 17, 32, 28, TimeSpan.Zero), dateTimeParameter.DefaultValue); + + var passwordParameter = parameters.Single(p => p.Name == "password-parameter"); + Assert.True(passwordParameter.DefaultValue is string); + Assert.Equal("password-value", passwordParameter.DefaultValue); + } + + private static MemoryStream ModifyOpenApiDocument(Stream openApiDocument, Action> transformer) + { + var serializer = new SharpYaml.Serialization.Serializer(); + + //Deserialize yaml + var yaml = serializer.Deserialize(openApiDocument); + + //Modify yaml + transformer(yaml!); + + //Serialize yaml + var stream = new MemoryStream(); + + serializer.Serialize(stream, yaml); + + stream.Seek(0, SeekOrigin.Begin); + + return stream; + } + + private static RestApiOperationParameter GetParameterMetadata(IList operations, string operationId, RestApiOperationParameterLocation location, string name) + { + Assert.True(operations.Any()); + + var operation = operations.Single(o => o.Id == operationId); + Assert.NotNull(operation.Parameters); + Assert.True(operation.Parameters.Any()); + + var parameters = operation.Parameters.Where(p => p.Location == location); + + var parameter = parameters.Single(p => p.Name == name); + Assert.NotNull(parameter); + + return parameter; + } + + public void Dispose() + { + this._openApiDocument.Dispose(); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiTestHelper.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiTestHelper.cs similarity index 75% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiTestHelper.cs rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiTestHelper.cs index 3c487ecd1003..c398e82b8b26 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/OpenApiTestHelper.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiTestHelper.cs @@ -5,17 +5,17 @@ using System.Text.Json; using System.Text.Json.Nodes; -namespace SemanticKernel.Functions.UnitTests.OpenAPI; +namespace SemanticKernel.Functions.UnitTests.OpenApi; /// -/// Contains helper methods for OpenApi related tests. +/// Contains helper methods for OpenAPI related tests. /// internal static class OpenApiTestHelper { /// - /// Modifies OpenApi document for testing different scenarios. + /// Modifies OpenAPI document for testing different scenarios. /// - /// The OpenApi document content. + /// The OpenAPI document content. /// Delegate with document modifications. internal static MemoryStream ModifyOpenApiDocument(Stream openApiDocument, Action transformer) { diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationResponseConverterTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationResponseConverterTests.cs similarity index 89% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationResponseConverterTests.cs rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationResponseConverterTests.cs index 783969e9d40f..96416594e5fb 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/RestApiOperationResponseConverterTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationResponseConverterTests.cs @@ -1,9 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; +using Microsoft.SemanticKernel; using Xunit; -namespace SemanticKernel.Functions.UnitTests.OpenAPI; +namespace SemanticKernel.Functions.UnitTests.OpenApi; + public class RestApiOperationResponseConverterTests { private readonly RestApiOperationResponseConverter _sut; diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationResponseTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationResponseTests.cs new file mode 100644 index 000000000000..9e96fa599140 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationResponseTests.cs @@ -0,0 +1,119 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using SemanticKernel.Functions.UnitTests.OpenApi.TestResponses; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public class RestApiOperationResponseTests +{ + [Fact] + public void ItShouldValidateStringContentWithNoSchema() + { + //Arrange + var response = new RestApiOperationResponse("fake-content", "fake-content-type"); + + //Act + var result = response.IsValid(); + + //Assert + Assert.True(result); + } + + [Fact] + public void ItShouldValidateByteContentTWithNoSchema() + { + //Arrange + var response = new RestApiOperationResponse(new byte[] { 00, 01, 02 }, "fake-content-type"); + + //Act + var result = response.IsValid(); + + //Assert + Assert.True(result); + } + + [Theory] + [InlineData("fake-content", "application/json", "{\"type\": \"string\"}")] + [InlineData("{\"fake\": \"content\"}", "text/plain", "{\"type\": \"string\"}")] + [InlineData("{\"fake\": \"content\"}", "application/json", "{\"type\": \"string\"}")] + public void ItShouldFailValidationWithSchema(string content, string contentType, string schemaJson) + { + //Arrange + var response = new RestApiOperationResponse(content, contentType, KernelJsonSchema.Parse(schemaJson)); + + //Act + var result = response.IsValid(); + + //Assert + Assert.False(result); + } + + [Theory] + [InlineData("\"fake-content\"", "application/json", "{\"type\": \"string\"}")] + [InlineData("fake-content", "text/plain", "{\"type\": \"string\"}")] + [InlineData("fake-content", "application/xml", "{\"type\": \"string\"}")] + [InlineData("fake-content", "image", "{\"type\": \"string\"}")] + public void ItShouldPassValidationWithSchema(string content, string contentType, string schemaJson) + { + //Arrange + var response = new RestApiOperationResponse(content, contentType, KernelJsonSchema.Parse(schemaJson)); + + //Act + var result = response.IsValid(); + + //Assert + Assert.True(result); + } + + [Theory] + [InlineData("ValidProductContent.json", "application/json", "ObjectResponseSchema.json")] + [InlineData("ValidProductContent.json", "application/json", "ProductResponseSchema.json")] + public void IsValidShouldBeTrue(string contentFileName, string contentType, string schemaJsonFilename) + { + //Arrange + var contentText = ResourceResponseProvider.LoadFromResource(contentFileName); + var productJson = ResourceResponseProvider.LoadFromResource(schemaJsonFilename); + var response = new RestApiOperationResponse(contentText, contentType, KernelJsonSchema.Parse(productJson)); + + //Act + var result = response.IsValid(); + + //Assert + Assert.True(result); + } + + [Theory] + [InlineData("NotProductContent.json", "application/json", "ProductResponseSchema.json")] + [InlineData("InvalidProductContent.json", "application/json", "ProductResponseSchema.json")] + public void IsValidShouldBeFalse(string contentFileName, string contentType, string schemaJsonFilename) + { + //Arrange + var contentText = ResourceResponseProvider.LoadFromResource(contentFileName); + var productJson = ResourceResponseProvider.LoadFromResource(schemaJsonFilename); + var response = new RestApiOperationResponse(contentText, contentType, KernelJsonSchema.Parse(productJson)); + + //Act + var result = response.IsValid(); + + //Assert + Assert.False(result); + } + + [Theory] + [InlineData(null, "")] + [InlineData("content", "content")] + public void ToStringReturnsString(object? content, string expectedContent) + { + // Arrange + var response = new RestApiOperationResponse(content!, "application/json"); + + // Act + var result = response.ToString(); + + // Assert + Assert.Equal(expectedContent, result); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs new file mode 100644 index 000000000000..50cd1846336c --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs @@ -0,0 +1,1145 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.Mime; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Moq; +using SemanticKernel.Functions.UnitTests.OpenApi.TestResponses; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public sealed class RestApiOperationRunnerTests : IDisposable +{ + /// + /// A mock instance of the authentication callback. + /// + private readonly Mock _authenticationHandlerMock; + + /// + /// An instance of HttpMessageHandlerStub class used to get access to various properties of HttpRequestMessage sent by HTTP client. + /// + private readonly HttpMessageHandlerStub _httpMessageHandlerStub; + + /// + /// An instance of HttpClient class used by the tests. + /// + private readonly HttpClient _httpClient; + + /// + /// Creates an instance of a class. + /// + public RestApiOperationRunnerTests() + { + this._authenticationHandlerMock = new Mock(); + + this._httpMessageHandlerStub = new HttpMessageHandlerStub(); + + this._httpClient = new HttpClient(this._httpMessageHandlerStub); + } + + [Fact] + public async Task ItCanRunCreateAndUpdateOperationsWithJsonPayloadSuccessfullyAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var payload = new + { + value = "fake-value", + attributes = new + { + enabled = true + } + }; + + var arguments = new KernelArguments + { + { "payload", JsonSerializer.Serialize(payload) }, + { "content-type", "application/json" } + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.RequestUri); + Assert.Equal("https://fake-random-test-host/fake-path", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); + + Assert.Equal(HttpMethod.Post, this._httpMessageHandlerStub.Method); + + Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); + Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("application/json; charset=utf-8")); + + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var deserializedPayload = await JsonNode.ParseAsync(new MemoryStream(messageContent)); + Assert.NotNull(deserializedPayload); + + var valueProperty = deserializedPayload["value"]?.ToString(); + Assert.Equal("fake-value", valueProperty); + + var attributesProperty = deserializedPayload["attributes"]; + Assert.NotNull(attributesProperty); + + var enabledProperty = attributesProperty["enabled"]?.AsValue(); + Assert.NotNull(enabledProperty); + Assert.Equal("true", enabledProperty.ToString()); + + Assert.NotNull(result); + + Assert.Equal("fake-content", result.Content); + + Assert.Equal("application/json; charset=utf-8", result.ContentType); + + this._authenticationHandlerMock.Verify(x => x(It.IsAny(), It.IsAny()), Times.Once); + } + + [Fact] + public async Task ItCanRunCreateAndUpdateOperationsWithPlainTextPayloadSuccessfullyAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Text.Plain); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var arguments = new KernelArguments + { + { "payload", "fake-input-value" }, + { "content-type", "text/plain"} + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.RequestUri); + Assert.Equal("https://fake-random-test-host/fake-path", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); + + Assert.Equal(HttpMethod.Post, this._httpMessageHandlerStub.Method); + + Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); + Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("text/plain; charset=utf-8")); + + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var payloadText = Encoding.UTF8.GetString(messageContent, 0, messageContent.Length); + Assert.Equal("fake-input-value", payloadText); + + Assert.NotNull(result); + + Assert.Equal("fake-content", result.Content); + + Assert.Equal("text/plain; charset=utf-8", result.ContentType); + + this._authenticationHandlerMock.Verify(x => x(It.IsAny(), It.IsAny()), Times.Once); + } + + [Fact] + public async Task ItShouldAddHeadersToHttpRequestAsync() + { + // Arrange + var parameters = new List + { + new(name: "X-HS-1", type: "string", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HA-1", type: "array", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HA-2", type: "array", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HB-1", type: "boolean", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HB-2", type: "boolean", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HI-1", type: "integer", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HI-2", type: "integer", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HN-1", type: "number", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HN-2", type: "number", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HD-1", type: "string", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HD-2", type: "string", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "X-HD-3", type: "string", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + }; + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Get, + "fake-description", + parameters + ); + + var arguments = new KernelArguments + { + ["X-HS-1"] = "fake-header-value", + ["X-HA-1"] = "[1,2,3]", + ["X-HA-2"] = new Collection() { "3", "4", "5" }, + ["X-HB-1"] = "true", + ["X-HB-2"] = false, + ["X-HI-1"] = "10", + ["X-HI-2"] = 20, + ["X-HN-1"] = 5698.4567, + ["X-HN-2"] = "5698.4567", + ["X-HD-1"] = "2023-12-06T11:53:36Z", + ["X-HD-2"] = new DateTime(2023, 12, 06, 11, 53, 36, DateTimeKind.Utc), + ["X-HD-3"] = new DateTimeOffset(2023, 12, 06, 11, 53, 36, TimeSpan.FromHours(-2)), + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, userAgent: "fake-agent"); + + // Act + await sut.RunAsync(operation, arguments); + + // Assert - 13 headers: 12 from the test and the User-Agent added internally + Assert.NotNull(this._httpMessageHandlerStub.RequestHeaders); + Assert.Equal(13, this._httpMessageHandlerStub.RequestHeaders.Count()); + + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "User-Agent" && h.Value.Contains("fake-agent")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HS-1" && h.Value.Contains("fake-header-value")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HA-1" && h.Value.Contains("1,2,3")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HA-2" && h.Value.Contains("3,4,5")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HB-1" && h.Value.Contains("true")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HB-2" && h.Value.Contains("false")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HI-1" && h.Value.Contains("10")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HI-2" && h.Value.Contains("20")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HN-1" && h.Value.Contains("5698.4567")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HN-2" && h.Value.Contains("5698.4567")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HD-1" && h.Value.Contains("2023-12-06T11:53:36Z")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HD-2" && h.Value.Contains("2023-12-06T11:53:36Z")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "X-HD-3" && h.Value.Contains("2023-12-06T11:53:36-02:00")); + } + + [Fact] + public async Task ItShouldAddUserAgentHeaderToHttpRequestIfConfiguredAsync() + { + // Arrange + var parameters = new List + { + new( + name: "fake-header", + type: "string", + isRequired: true, + expand: false, + location: RestApiOperationParameterLocation.Header, + style: RestApiOperationParameterStyle.Simple) + }; + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Get, + "fake-description", + parameters + ); + + var arguments = new KernelArguments + { + { "fake-header", "fake-header-value" } + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, "fake-user-agent"); + + // Act + await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.RequestHeaders); + Assert.Equal(2, this._httpMessageHandlerStub.RequestHeaders.Count()); + + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "fake-header" && h.Value.Contains("fake-header-value")); + Assert.Contains(this._httpMessageHandlerStub.RequestHeaders, h => h.Key == "User-Agent" && h.Value.Contains("fake-user-agent")); + } + + [Fact] + public async Task ItShouldBuildJsonPayloadDynamicallyAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + List payloadProperties = new() + { + new("name", "string", true, new List()), + new("attributes", "object", false, new List() + { + new("enabled", "boolean", false, new List()), + }) + }; + + var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload + ); + + var arguments = new KernelArguments(); + arguments.Add("name", "fake-name-value"); + arguments.Add("enabled", true); + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); + Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("application/json; charset=utf-8")); + + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var deserializedPayload = await JsonNode.ParseAsync(new MemoryStream(messageContent)); + Assert.NotNull(deserializedPayload); + + var name = deserializedPayload["name"]?.ToString(); + Assert.Equal("fake-name-value", name); + + var attributes = deserializedPayload["attributes"]; + Assert.NotNull(attributes); + + var enabled = attributes["enabled"]?.ToString(); + Assert.NotNull(enabled); + Assert.Equal("true", enabled); + } + + [Fact] + public async Task ItShouldBuildJsonPayloadDynamicallyUsingPayloadMetadataDataTypesAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + List payloadProperties = new() + { + new("name", "string", true, new List()), + new("attributes", "object", false, new List() + { + new("enabled", "boolean", false, new List()), + new("cardinality", "number", false, new List()), + new("coefficient", "number", false, new List()), + new("count", "integer", false, new List()), + new("params", "array", false, new List()), + }) + }; + + var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload + ); + + var arguments = new KernelArguments(); + arguments.Add("name", "fake-string-value"); + arguments.Add("enabled", "true"); + arguments.Add("cardinality", 8); + arguments.Add("coefficient", "0.8"); + arguments.Add("count", 1); + arguments.Add("params", "[1,2,3]"); + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + + var deserializedPayload = await JsonNode.ParseAsync(new MemoryStream(messageContent)); + Assert.NotNull(deserializedPayload); + + var name = deserializedPayload["name"]?.GetValue(); + Assert.NotNull(name); + Assert.Equal(JsonValueKind.String, name.Value.ValueKind); + Assert.Equal("fake-string-value", name.ToString()); + + var attributes = deserializedPayload["attributes"]; + Assert.True(attributes is JsonObject); + + var enabled = attributes["enabled"]?.GetValue(); + Assert.NotNull(enabled); + Assert.Equal(JsonValueKind.True, enabled.Value.ValueKind); + + var cardinality = attributes["cardinality"]?.GetValue(); + Assert.NotNull(cardinality); + Assert.Equal(JsonValueKind.Number, cardinality.Value.ValueKind); + Assert.Equal("8", cardinality.Value.ToString()); + + var coefficient = attributes["coefficient"]?.GetValue(); + Assert.NotNull(coefficient); + Assert.Equal(JsonValueKind.Number, coefficient.Value.ValueKind); + Assert.Equal("0.8", coefficient.Value.ToString()); + + var count = attributes["count"]?.GetValue(); + Assert.NotNull(count); + Assert.Equal(JsonValueKind.Number, coefficient.Value.ValueKind); + Assert.Equal("1", count.Value.ToString()); + + var parameters = attributes["params"]; + Assert.NotNull(parameters); + Assert.True(parameters is JsonArray); + } + + [Fact] + public async Task ItShouldBuildJsonPayloadDynamicallyResolvingArgumentsByFullNamesAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + List payloadProperties = new() + { + new("upn", "string", true, new List()), + new("receiver", "object", false, new List() + { + new("upn", "string", false, new List()), + new("alternative", "object", false, new List() + { + new("upn", "string", false, new List()), + }), + }), + new("cc", "object", false, new List() + { + new("upn", "string", false, new List()), + }) + }; + + var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload + ); + + var arguments = new KernelArguments(); + arguments.Add("upn", "fake-sender-upn"); + arguments.Add("receiver.upn", "fake-receiver-upn"); + arguments.Add("receiver.alternative.upn", "fake-receiver-alternative-upn"); + arguments.Add("cc.upn", "fake-cc-upn"); + + var sut = new RestApiOperationRunner( + this._httpClient, + this._authenticationHandlerMock.Object, + enableDynamicPayload: true, + enablePayloadNamespacing: true); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); + Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("application/json; charset=utf-8")); + + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var deserializedPayload = await JsonNode.ParseAsync(new MemoryStream(messageContent)); + Assert.NotNull(deserializedPayload); + + //Sender props + var senderUpn = deserializedPayload["upn"]?.ToString(); + Assert.Equal("fake-sender-upn", senderUpn); + + //Receiver props + var receiver = deserializedPayload["receiver"]; + Assert.NotNull(receiver); + + var receiverUpn = receiver["upn"]?.AsValue(); + Assert.NotNull(receiverUpn); + Assert.Equal("fake-receiver-upn", receiverUpn.ToString()); + + var alternative = receiver["alternative"]; + Assert.NotNull(alternative); + + var alternativeUpn = alternative["upn"]?.AsValue(); + Assert.NotNull(alternativeUpn); + Assert.Equal("fake-receiver-alternative-upn", alternativeUpn.ToString()); + + //CC props + var carbonCopy = deserializedPayload["cc"]; + Assert.NotNull(carbonCopy); + + var ccUpn = carbonCopy["upn"]?.AsValue(); + Assert.NotNull(ccUpn); + Assert.Equal("fake-cc-upn", ccUpn.ToString()); + } + + [Fact] + public async Task ItShouldThrowExceptionIfPayloadMetadataDoesNotHaveContentTypeAsync() + { + // Arrange + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var arguments = new KernelArguments(); + + var sut = new RestApiOperationRunner( + this._httpClient, + this._authenticationHandlerMock.Object, + enableDynamicPayload: true); + + // Act + var exception = await Assert.ThrowsAsync(async () => await sut.RunAsync(operation, arguments)); + + Assert.Contains("No media type is provided", exception.Message, StringComparison.InvariantCulture); + } + + [Fact] + public async Task ItShouldThrowExceptionIfContentTypeArgumentIsNotProvidedAsync() + { + // Arrange + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var arguments = new KernelArguments(); + + var sut = new RestApiOperationRunner( + this._httpClient, + this._authenticationHandlerMock.Object, + enableDynamicPayload: false); + + // Act + var exception = await Assert.ThrowsAsync(async () => await sut.RunAsync(operation, arguments)); + + Assert.Contains("No media type is provided", exception.Message, StringComparison.InvariantCulture); + } + + [Fact] + public async Task ItShouldUsePayloadArgumentForPlainTextContentTypeWhenBuildingPayloadDynamicallyAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Text.Plain); + + var payload = new RestApiOperationPayload(MediaTypeNames.Text.Plain, new List()); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload + ); + + var arguments = new KernelArguments + { + { "payload", "fake-input-value" }, + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); + Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains("text/plain; charset=utf-8")); + + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var payloadText = Encoding.UTF8.GetString(messageContent, 0, messageContent.Length); + Assert.Equal("fake-input-value", payloadText); + } + + [Theory] + [InlineData(MediaTypeNames.Text.Plain)] + [InlineData(MediaTypeNames.Application.Json)] + public async Task ItShouldUsePayloadAndContentTypeArgumentsIfDynamicPayloadBuildingIsNotRequiredAsync(string contentType) + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Text.Plain); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var arguments = new KernelArguments + { + { "payload", "fake-input-value" }, + { "content-type", $"{contentType}" }, + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: false); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.ContentHeaders); + Assert.Contains(this._httpMessageHandlerStub.ContentHeaders, h => h.Key == "Content-Type" && h.Value.Contains($"{contentType}; charset=utf-8")); + + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var payloadText = Encoding.UTF8.GetString(messageContent, 0, messageContent.Length); + Assert.Equal("fake-input-value", payloadText); + } + + [Fact] + public async Task ItShouldBuildJsonPayloadDynamicallyExcludingOptionalParametersIfTheirArgumentsNotProvidedAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + List payloadProperties = new() + { + new("upn", "string", false, new List()), + }; + + var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload + ); + + var arguments = new KernelArguments(); + + var sut = new RestApiOperationRunner( + this._httpClient, + this._authenticationHandlerMock.Object, + enableDynamicPayload: true, + enablePayloadNamespacing: true); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var deserializedPayload = await JsonNode.ParseAsync(new MemoryStream(messageContent)); + Assert.NotNull(deserializedPayload); + + var senderUpn = deserializedPayload["upn"]?.ToString(); + Assert.Null(senderUpn); + } + + [Fact] + public async Task ItShouldBuildJsonPayloadDynamicallyIncludingOptionalParametersIfTheirArgumentsProvidedAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + List payloadProperties = new() + { + new("upn", "string", false, new List()), + }; + + var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload + ); + + var arguments = new KernelArguments { ["upn"] = "fake-sender-upn" }; + + var sut = new RestApiOperationRunner( + this._httpClient, + this._authenticationHandlerMock.Object, + enableDynamicPayload: true, + enablePayloadNamespacing: true); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + var messageContent = this._httpMessageHandlerStub.RequestContent; + Assert.NotNull(messageContent); + Assert.True(messageContent.Length != 0); + + var deserializedPayload = await JsonNode.ParseAsync(new MemoryStream(messageContent)); + Assert.NotNull(deserializedPayload); + + var senderUpn = deserializedPayload["upn"]?.ToString(); + Assert.Equal("fake-sender-upn", senderUpn); + } + + [Fact] + public async Task ItShouldAddRequiredQueryStringParametersIfTheirArgumentsProvidedAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + var firstParameter = new RestApiOperationParameter( + "p1", + "string", + isRequired: true, //Marking the parameter as required + false, + RestApiOperationParameterLocation.Query, + RestApiOperationParameterStyle.Form); + + var secondParameter = new RestApiOperationParameter( + "p2", + "integer", + isRequired: true, //Marking the parameter as required + false, + RestApiOperationParameterLocation.Query, + RestApiOperationParameterStyle.Form); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Get, + "fake-description", + new List() { firstParameter, secondParameter }, + payload: null + ); + + var arguments = new KernelArguments + { + { "p1", "v1" }, + { "p2", 28 }, + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.RequestUri); + Assert.Equal("https://fake-random-test-host/fake-path?p1=v1&p2=28", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); + } + + [Fact] + public async Task ItShouldAddNotRequiredQueryStringParametersIfTheirArgumentsProvidedAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + var firstParameter = new RestApiOperationParameter( + "p1", + "string", + isRequired: false, //Marking the parameter as not required + false, + RestApiOperationParameterLocation.Query, + RestApiOperationParameterStyle.Form); + + var secondParameter = new RestApiOperationParameter( + "p2", + "string", + isRequired: false, //Marking the parameter as not required + false, + RestApiOperationParameterLocation.Query, + RestApiOperationParameterStyle.Form); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Get, + "fake-description", + new List() { firstParameter, secondParameter }, + payload: null + ); + + var arguments = new KernelArguments + { + { "p1", new DateTime(2023, 12, 06, 11, 53, 36, DateTimeKind.Utc) }, + { "p2", "v2" }, + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.RequestUri); + Assert.Equal("https://fake-random-test-host/fake-path?p1=2023-12-06T11%3a53%3a36Z&p2=v2", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); + } + + [Fact] + public async Task ItShouldSkipNotRequiredQueryStringParametersIfNoArgumentsProvidedAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + var firstParameter = new RestApiOperationParameter( + "p1", + "string", + isRequired: false, //Marking the parameter as not required + false, + RestApiOperationParameterLocation.Query, + RestApiOperationParameterStyle.Form); + + var secondParameter = new RestApiOperationParameter( + "p2", + "string", + isRequired: true, //Marking the parameter as required + false, + RestApiOperationParameterLocation.Query, + RestApiOperationParameterStyle.Form); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Get, + "fake-description", + new List() { firstParameter, secondParameter }, + payload: null + ); + + var arguments = new KernelArguments + { + { "p2", "v2" }, //Providing argument for the required parameter only + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(this._httpMessageHandlerStub.RequestUri); + Assert.Equal("https://fake-random-test-host/fake-path?p2=v2", this._httpMessageHandlerStub.RequestUri.AbsoluteUri); + } + + [Fact] + public async Task ItShouldThrowExceptionIfNoArgumentProvidedForRequiredQueryStringParameterAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json); + + var parameter = new RestApiOperationParameter( + "p1", + "string", + isRequired: true, //Marking the parameter as required + false, + RestApiOperationParameterLocation.Query, + RestApiOperationParameterStyle.Form); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Get, + "fake-description", + new List() { parameter }, + payload: null + ); + + var arguments = new KernelArguments(); //Providing no arguments + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act and Assert + await Assert.ThrowsAsync(() => sut.RunAsync(operation, arguments)); + } + + [Theory] + [InlineData(MediaTypeNames.Application.Json)] + [InlineData(MediaTypeNames.Application.Xml)] + [InlineData(MediaTypeNames.Text.Plain)] + [InlineData(MediaTypeNames.Text.Html)] + [InlineData(MediaTypeNames.Text.Xml)] + [InlineData("text/csv")] + [InlineData("text/markdown")] + public async Task ItShouldReadContentAsStringSuccessfullyAsync(string contentType) + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, contentType); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var arguments = new KernelArguments + { + { "payload", JsonSerializer.Serialize(new { value = "fake-value" }) }, + { "content-type", "application/json" } + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(result); + + Assert.Equal("fake-content", result.Content); + + Assert.Equal($"{contentType}; charset=utf-8", result.ContentType); + } + + [Theory] + [InlineData("image/jpeg")] + [InlineData("image/png")] + [InlineData("image/gif")] + [InlineData("image/svg+xml")] + [InlineData("image/bmp")] + [InlineData("image/x-icon")] + public async Task ItShouldReadContentAsBytesSuccessfullyAsync(string contentType) + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new ByteArrayContent(new byte[] { 00, 01, 02 }); + this._httpMessageHandlerStub.ResponseToReturn.Content.Headers.ContentType = new MediaTypeHeaderValue(contentType); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var arguments = new KernelArguments + { + { "payload", JsonSerializer.Serialize(new { value = "fake-value" }) }, + { "content-type", "application/json" } + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, arguments); + + // Assert + Assert.NotNull(result); + + Assert.Equal(new byte[] { 00, 01, 02 }, result.Content); + + Assert.Equal($"{contentType}", result.ContentType); + } + + [Fact] + public async Task ItShouldThrowExceptionForUnsupportedContentTypeAsync() + { + // Arrange + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, "fake/type"); + + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Post, + "fake-description", + new List(), + payload: null + ); + + var arguments = new KernelArguments + { + { "payload", JsonSerializer.Serialize(new { value = "fake-value" }) }, + { "content-type", "application/json" } + }; + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act & Assert + await Assert.ThrowsAsync(() => sut.RunAsync(operation, arguments)); + } + + public class SchemaTestData : IEnumerable + { + public IEnumerator GetEnumerator() + { + yield return new object[] { + "default", + new (string, RestApiOperationExpectedResponse)[] { + ("400", new RestApiOperationExpectedResponse("fake-content", "fake-content-type", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("FakeResponseSchema.json")))), + ("default", new RestApiOperationExpectedResponse("Default response content", "application/json", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("DefaultResponseSchema.json")))), + }, + }; + yield return new object[] { + "200", + new (string, RestApiOperationExpectedResponse)[] { + ("200", new RestApiOperationExpectedResponse("fake-content", "fake-content-type", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("FakeResponseSchema.json")))), + ("default", new RestApiOperationExpectedResponse("Default response content", "application/json", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("DefaultResponseSchema.json")))), + }, + }; + yield return new object[] { + "2XX", + new (string, RestApiOperationExpectedResponse)[] { + ("2XX", new RestApiOperationExpectedResponse("fake-content", "fake-content-type", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("FakeResponseSchema.json")))), + ("default", new RestApiOperationExpectedResponse("Default response content", "application/json", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("DefaultResponseSchema.json")))), + }, + }; + yield return new object[] { + "2XX", + new (string, RestApiOperationExpectedResponse)[] { + ("2XX", new RestApiOperationExpectedResponse("fake-content", "fake-content-type", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("FakeResponseSchema.json")))), + ("default", new RestApiOperationExpectedResponse("Default response content", "application/json", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("DefaultResponseSchema.json")))), + }, + }; + yield return new object[] { + "200", + new (string, RestApiOperationExpectedResponse)[] { + ("default", new RestApiOperationExpectedResponse("Default response content", "application/json", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("DefaultResponseSchema.json")))), + ("2XX", new RestApiOperationExpectedResponse("fake-content", "fake-content-type", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("2XXFakeResponseSchema.json")))), + ("200", new RestApiOperationExpectedResponse("fake-content", "fake-content-type", KernelJsonSchema.Parse(ResourceResponseProvider.LoadFromResource("200FakeResponseSchema.json")))), + }, + }; + } + + IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); + } + + [Theory] + [ClassData(typeof(SchemaTestData))] + public async Task ItShouldReturnExpectedSchemaAsync(string expectedStatusCode, params (string, RestApiOperationExpectedResponse)[] responses) + { + var operation = new RestApiOperation( + "fake-id", + new Uri("https://fake-random-test-host"), + "fake-path", + HttpMethod.Get, + "fake-description", + new List(), + null, + responses.ToDictionary(item => item.Item1, item => item.Item2) + ); + + var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object); + + // Act + var result = await sut.RunAsync(operation, new KernelArguments()); + + Assert.NotNull(result); + var expected = responses.First(r => r.Item1 == expectedStatusCode).Item2.Schema; + Assert.Equal(JsonSerializer.Serialize(expected), JsonSerializer.Serialize(result.ExpectedSchema)); + } + + /// + /// Disposes resources used by this class. + /// + public void Dispose() + { + this._httpMessageHandlerStub.Dispose(); + + this._httpClient.Dispose(); + } + + private sealed class HttpMessageHandlerStub : DelegatingHandler + { + public HttpRequestHeaders? RequestHeaders { get; private set; } + + public HttpContentHeaders? ContentHeaders { get; private set; } + + public byte[]? RequestContent { get; private set; } + + public Uri? RequestUri { get; private set; } + + public HttpMethod? Method { get; private set; } + + public HttpResponseMessage ResponseToReturn { get; set; } + + public HttpMessageHandlerStub() + { + this.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent("{}", Encoding.UTF8, MediaTypeNames.Application.Json) + }; + } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + this.Method = request.Method; + this.RequestUri = request.RequestUri; + this.RequestHeaders = request.Headers; + this.RequestContent = request.Content == null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken); + this.ContentHeaders = request.Content?.Headers; + + return await Task.FromResult(this.ResponseToReturn); + } + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs new file mode 100644 index 000000000000..ed05fb800c6c --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs @@ -0,0 +1,555 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi; + +public class RestApiOperationTests +{ + [Fact] + public void ItShouldUseHostUrlIfNoOverrideProvided() + { + // Arrange + var sut = new RestApiOperation( + "fake_id", + new Uri("https://fake-random-test-host"), + "/", + HttpMethod.Get, + "fake_description", + new List() + ); + + var arguments = new Dictionary(); + + // Act + var url = sut.BuildOperationUrl(arguments); + + // Assert + Assert.Equal("https://fake-random-test-host/", url.OriginalString); + } + + [Fact] + public void ItShouldUseHostUrlOverrideIfProvided() + { + // Arrange + var sut = new RestApiOperation( + "fake_id", + new Uri("https://fake-random-test-host"), + "/", + HttpMethod.Get, + "fake_description", + new List() + ); + + var fakeHostUrlOverride = "https://fake-random-test-host-override"; + + var arguments = new Dictionary(); + + // Act + var url = sut.BuildOperationUrl(arguments, serverUrlOverride: new Uri(fakeHostUrlOverride)); + + // Assert + Assert.Equal(fakeHostUrlOverride, url.OriginalString.TrimEnd('/')); + } + + [Fact] + public void ItShouldReplacePathParametersByValuesFromArguments() + { + // Arrange + var parameters = new List { + new( + name: "p1", + type: "string", + isRequired: true, + expand: false, + location: RestApiOperationParameterLocation.Path, + style: RestApiOperationParameterStyle.Simple), + new( + name: "p2", + type: "number", + isRequired: true, + expand: false, + location: RestApiOperationParameterLocation.Path, + style: RestApiOperationParameterStyle.Simple) + }; + + var sut = new RestApiOperation( + "fake_id", + new Uri("https://fake-random-test-host"), + "/{p1}/{p2}/other_fake_path_section", + HttpMethod.Get, + "fake_description", + parameters + ); + + var arguments = new Dictionary + { + { "p1", "v1" }, + { "p2", 34 } + }; + + // Act + var url = sut.BuildOperationUrl(arguments); + + // Assert + Assert.Equal("https://fake-random-test-host/v1/34/other_fake_path_section", url.OriginalString); + } + + [Fact] + public void ShouldBuildResourceUrlWithoutQueryString() + { + // Arrange + var parameters = new List { + new( + name: "p1", + type: "string", + isRequired: false, + expand: false, + location: RestApiOperationParameterLocation.Query, + defaultValue: "dv1"), + new( + name: "fake-path", + type: "string", + isRequired: false, + expand: false, + location: RestApiOperationParameterLocation.Path) + }; + + var sut = new RestApiOperation( + "fake_id", + new Uri("https://fake-random-test-host"), + "{fake-path}/", + HttpMethod.Get, + "fake_description", + parameters); + + var fakeHostUrlOverride = "https://fake-random-test-host-override"; + + var arguments = new Dictionary + { + { "fake-path", "fake-path-value" }, + }; + + // Act + var url = sut.BuildOperationUrl(arguments, serverUrlOverride: new Uri(fakeHostUrlOverride)); + + // Assert + Assert.Equal($"{fakeHostUrlOverride}/fake-path-value/", url.OriginalString); + } + + [Fact] + public void ItShouldRenderHeaderValuesFromArguments() + { + // Arrange + var parameters = new List + { + new( + name: "fake_header_one", + type: "string", + isRequired: true, + expand: false, + location: RestApiOperationParameterLocation.Header, + style: RestApiOperationParameterStyle.Simple), + + new( + name: "fake_header_two", + type: "string", + isRequired: true, + expand: false, + location: RestApiOperationParameterLocation.Header, + style: RestApiOperationParameterStyle.Simple) + }; + + var arguments = new Dictionary + { + { "fake_header_one", "fake_header_one_value" }, + { "fake_header_two", "fake_header_two_value" } + }; + + var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", parameters); + + // Act + var headers = sut.BuildHeaders(arguments); + + // Assert + Assert.Equal(2, headers.Count); + + var headerOne = headers["fake_header_one"]; + Assert.Equal("fake_header_one_value", headerOne); + + var headerTwo = headers["fake_header_two"]; + Assert.Equal("fake_header_two_value", headerTwo); + } + + [Fact] + public void ShouldThrowExceptionIfNoValueProvidedForRequiredHeader() + { + // Arrange + var metadata = new List + { + new(name: "fake_header_one", type: "string", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "fake_header_two", type : "string", isRequired : false, expand : false, location : RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple) + }; + + var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata); + + // Act + void Act() => sut.BuildHeaders(new Dictionary()); + + // Assert + Assert.Throws(Act); + } + + [Fact] + public void ItShouldSkipOptionalHeaderHavingNoValue() + { + // Arrange + var metadata = new List + { + new(name: "fake_header_one", type : "string", isRequired : true, expand : false, location : RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "fake_header_two", type : "string", isRequired : false, expand : false, location : RestApiOperationParameterLocation.Header, style : RestApiOperationParameterStyle.Simple) + }; + + var arguments = new Dictionary + { + ["fake_header_one"] = "fake_header_one_value" + }; + + var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata); + + // Act + var headers = sut.BuildHeaders(arguments); + + // Assert + Assert.Single(headers); + + var headerOne = headers["fake_header_one"]; + Assert.Equal("fake_header_one_value", headerOne); + } + + [Fact] + public void ItShouldCreateHeaderWithCommaSeparatedValues() + { + // Arrange + var metadata = new List + { + new( name: "h1", type: "array", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple, arrayItemType: "string"), + new( name: "h2", type: "array", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple, arrayItemType: "integer") + }; + + var arguments = new Dictionary + { + ["h1"] = "[\"a\",\"b\",\"c\"]", + ["h2"] = "[1,2,3]" + }; + + var sut = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); + + // Act + var headers = sut.BuildHeaders(arguments); + + // Assert + Assert.NotNull(headers); + Assert.Equal(2, headers.Count); + + Assert.Equal("a,b,c", headers["h1"]); + Assert.Equal("1,2,3", headers["h2"]); + } + + [Fact] + public void ItShouldCreateHeaderWithPrimitiveValue() + { + // Arrange + var metadata = new List + { + new( name: "h1", type: "string", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new( name: "h2", type: "boolean", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple) + }; + + var arguments = new Dictionary + { + ["h1"] = "v1", + ["h2"] = true + }; + + var sut = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); + + // Act + var headers = sut.BuildHeaders(arguments); + + // Assert + Assert.NotNull(headers); + Assert.Equal(2, headers.Count); + + Assert.Equal("v1", headers["h1"]); + Assert.Equal("true", headers["h2"]); + } + + [Fact] + public void ItShouldMixAndMatchHeadersOfDifferentValueTypes() + { + // Arrange + var metadata = new List + { + new(name: "h1", type: "array", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + new(name: "h2", type: "boolean", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple), + }; + + var arguments = new Dictionary + { + ["h1"] = new List { "a", "b" }, + ["h2"] = "false" + }; + + var sut = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); + + // Act + var headers = sut.BuildHeaders(arguments); + + // Assert + Assert.NotNull(headers); + Assert.Equal(2, headers.Count); + + Assert.Equal("a,b", headers["h1"]); + Assert.Equal("false", headers["h2"]); + } + + [Fact] + public void ItCreatesNewKernelsOnEachBuild() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + Assert.NotSame(builder.Build(), builder.Build()); + } + + [Fact] + public void ItHasIdempotentServicesAndPlugins() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + + Assert.NotNull(builder.Services); + Assert.NotNull(builder.Plugins); + + IServiceCollection services = builder.Services; + IKernelBuilderPlugins plugins = builder.Plugins; + + for (int i = 0; i < 3; i++) + { + Assert.Same(services, builder.Services); + Assert.Same(plugins, builder.Plugins); + Assert.NotNull(builder.Build()); + } + } + + [Fact] + public void ItDefaultsDataToAnEmptyDictionary() + { + Kernel kernel = Kernel.CreateBuilder().Build(); + Assert.Empty(kernel.Data); + } + + [Fact] + public void ItDefaultsServiceSelectorToSingleton() + { + Kernel kernel = Kernel.CreateBuilder().Build(); + Assert.Null(kernel.Services.GetService()); + Assert.NotNull(kernel.ServiceSelector); + Assert.Same(kernel.ServiceSelector, kernel.ServiceSelector); + Assert.Throws(() => kernel.GetRequiredService()); + + kernel = new Kernel(); + Assert.Null(kernel.Services.GetService()); + Assert.NotNull(kernel.ServiceSelector); + Assert.Same(kernel.ServiceSelector, kernel.ServiceSelector); + Assert.Throws(() => kernel.GetRequiredService()); + + NopServiceSelector selector = new(); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(selector); + kernel = builder.Build(); + Assert.Same(selector, kernel.Services.GetService()); + Assert.Same(selector, kernel.ServiceSelector); + Assert.Same(selector, kernel.GetRequiredService()); + } + + private sealed class NopServiceSelector : IAIServiceSelector + { +#pragma warning disable CS8769 // Nullability of reference types in type of parameter doesn't match implemented member (possibly because of nullability attributes). + bool IAIServiceSelector.TrySelectAIService( +#pragma warning restore CS8769 + Kernel kernel, KernelFunction function, KernelArguments arguments, out T? service, out PromptExecutionSettings? serviceSettings) where T : class => + throw new NotImplementedException(); + } + + [Fact] + public void ItPropagatesPluginsToBuiltKernel() + { + KernelPlugin plugin1 = KernelPluginFactory.CreateFromFunctions("plugin1"); + KernelPlugin plugin2 = KernelPluginFactory.CreateFromFunctions("plugin2"); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin1); + builder.Plugins.Add(plugin2); + Kernel kernel = builder.Build(); + + Assert.Contains(plugin1, kernel.Plugins); + Assert.Contains(plugin2, kernel.Plugins); + } + + [Fact] + public void ItSuppliesServicesCollectionToPluginsBuilder() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + Assert.Same(builder.Services, builder.Plugins.Services); + } + + [Fact] + public void ItBuildsServicesIntoKernel() + { + var builder = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddAzureOpenAITextGeneration(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); + + builder.Services.AddSingleton(CultureInfo.InvariantCulture); + builder.Services.AddSingleton(CultureInfo.CurrentCulture); + builder.Services.AddSingleton(new CultureInfo("en-US")); + + Kernel kernel = builder.Build(); + + Assert.IsType(kernel.GetRequiredService("openai")); + Assert.IsType(kernel.GetRequiredService("azureopenai")); + + Assert.Equal(2, kernel.GetAllServices().Count()); + Assert.Single(kernel.GetAllServices()); + + Assert.Equal(3, kernel.GetAllServices().Count()); + } + + [Fact] + public void ItSupportsMultipleEqualNamedServices() + { + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .Build(); + + Assert.Equal(8, kernel.GetAllServices().Count()); + } + + [Fact] + public void ItIsntNeededInDIContexts() + { + KernelPluginCollection plugins = new() { KernelPluginFactory.CreateFromFunctions("plugin1") }; + + var serviceCollection = new ServiceCollection(); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop"); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop"); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop", serviceId: "azureopenai1"); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop", serviceId: "azureopenai2"); + serviceCollection.AddSingleton(plugins); + serviceCollection.AddSingleton(); + + Kernel k = serviceCollection.BuildServiceProvider().GetService()!; + + Assert.NotNull(k); + Assert.Same(plugins, k.Plugins); + Assert.IsAssignableFrom(k.GetRequiredService("azureopenai1")); + Assert.IsAssignableFrom(k.GetRequiredService("azureopenai2")); + + // This should be 4, not 2. However, there is currently a limitation with Microsoft.Extensions.DependencyInjection + // that prevents GetAllServices from enumerating named services. KernelBuilder works around this, + // but when just using DI directly, it will only find unnamed services. Once that issue is fixed and SK + // brings in the new version, it can update the GetAllServices implementation to remove the workaround, + // and then this test should be updated accordingly. + Assert.Equal(2, k.GetAllServices().Count()); + + // It's possible to explicitly use the same workaround outside of KernelBuilder to get all services, + // but it's not recommended. + + //** WORKAROUND + Dictionary> mapping = new(); + foreach (var descriptor in serviceCollection) + { + if (!mapping.TryGetValue(descriptor.ServiceType, out HashSet? keys)) + { + mapping[descriptor.ServiceType] = keys = new HashSet(); + } + keys.Add(descriptor.ServiceKey); + } + serviceCollection.AddKeyedSingleton>>("KernelServiceTypeToKeyMappings", mapping); + //** + + k = serviceCollection.BuildServiceProvider().GetService()!; + Assert.Equal(4, k.GetAllServices().Count()); // now this is 4 as expected + } + + [Fact] + public void ItFindsAllPluginsToPopulatePluginsCollection() + { + KernelPlugin plugin1 = KernelPluginFactory.CreateFromFunctions("plugin1"); + KernelPlugin plugin2 = KernelPluginFactory.CreateFromFunctions("plugin2"); + KernelPlugin plugin3 = KernelPluginFactory.CreateFromFunctions("plugin3"); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(plugin1); + builder.Services.AddSingleton(plugin2); + builder.Services.AddSingleton(plugin3); + Kernel kernel = builder.Build(); + + Assert.Equal(3, kernel.Plugins.Count); + } + + [Fact] + public void ItFindsPluginCollectionToUse() + { + KernelPlugin plugin1 = KernelPluginFactory.CreateFromFunctions("plugin1"); + KernelPlugin plugin2 = KernelPluginFactory.CreateFromFunctions("plugin2"); + KernelPlugin plugin3 = KernelPluginFactory.CreateFromFunctions("plugin3"); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient(_ => new(new[] { plugin1, plugin2, plugin3 })); + + Kernel kernel1 = builder.Build(); + Assert.Equal(3, kernel1.Plugins.Count); + + Kernel kernel2 = builder.Build(); + Assert.Equal(3, kernel2.Plugins.Count); + + Assert.NotSame(kernel1.Plugins, kernel2.Plugins); + } + + [Fact] + public void ItAddsTheRightTypesInAddKernel() + { + IServiceCollection sc = new ServiceCollection(); + + IKernelBuilder builder = sc.AddKernel(); + Assert.NotNull(builder); + Assert.Throws(() => builder.Build()); + + builder.Services.AddSingleton>(new Dictionary()); + + IServiceProvider provider = sc.BuildServiceProvider(); + + Assert.NotNull(provider.GetService>()); + Assert.NotNull(provider.GetService()); + Assert.NotNull(provider.GetService()); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/ArrayParameterSerializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/ArrayParameterSerializerTests.cs similarity index 79% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/ArrayParameterSerializerTests.cs rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/ArrayParameterSerializerTests.cs index aa47a4648a35..f5abe43c9655 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/ArrayParameterSerializerTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/ArrayParameterSerializerTests.cs @@ -2,10 +2,11 @@ using System; using System.Text.Json.Nodes; -using Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; +using Microsoft.SemanticKernel.Plugins.OpenApi; using Xunit; -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Builders.Serialization; +namespace SemanticKernel.Functions.UnitTests.OpenApi.Serialization; + public class ArrayParameterSerializerTests { [Fact] @@ -91,7 +92,7 @@ public void ItShouldAllowDuplicatesWhenCreatingParameterWithDelimitedValuePerArr public void ItShouldEncodeSpecialSymbolsInSeparateParameterValues(string specialSymbol, string encodedEquivalent) { // Arrange - var array = new JsonArray($"{specialSymbol}"); + var array = new JsonArray(specialSymbol); // Act var result = ArrayParameterValueSerializer.SerializeArrayAsSeparateParameters("id", array, delimiter: "&"); @@ -110,7 +111,7 @@ public void ItShouldEncodeSpecialSymbolsInSeparateParameterValues(string special public void ItShouldEncodeSpecialSymbolsInDelimitedParameterValues(string specialSymbol, string encodedEquivalent) { // Arrange - var array = new JsonArray($"{specialSymbol}"); + var array = new JsonArray(specialSymbol); // Act var result = ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: "%20"); @@ -120,4 +121,23 @@ public void ItShouldEncodeSpecialSymbolsInDelimitedParameterValues(string specia Assert.EndsWith(encodedEquivalent, result, StringComparison.Ordinal); } + + [Theory] + [InlineData(":", ":")] + [InlineData("/", "/")] + [InlineData("?", "?")] + [InlineData("#", "#")] + public void ItShouldNotEncodeSpecialSymbolsInDelimitedParameterValuesIfEncodingDisabled(string specialSymbol, string expectedValue) + { + // Arrange + var array = new JsonArray(specialSymbol); + + // Act + var result = ArrayParameterValueSerializer.SerializeArrayAsDelimitedValues(array, delimiter: ",", encode: false); + + // Assert + Assert.NotNull(result); + + Assert.EndsWith(expectedValue, result, StringComparison.Ordinal); + } } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/FormStyleParametersSerializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/FormStyleParametersSerializerTests.cs similarity index 81% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/FormStyleParametersSerializerTests.cs rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/FormStyleParametersSerializerTests.cs index 4a5cd5120d0e..852a88c79b78 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/FormStyleParametersSerializerTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/FormStyleParametersSerializerTests.cs @@ -1,11 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Plugins.OpenApi; using Xunit; -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Builders.Serialization; +namespace SemanticKernel.Functions.UnitTests.OpenApi.Serialization; public class FormStyleParametersSerializerTests { @@ -23,7 +23,7 @@ public void ItShouldCreateAmpersandSeparatedParameterPerArrayItem() arrayItemType: "integer"); // Act - var result = FormStyleParameterSerializer.Serialize(parameter, "[1,2,3]"); + var result = FormStyleParameterSerializer.Serialize(parameter, new JsonArray(1, 2, 3)); // Assert Assert.NotNull(result); @@ -45,7 +45,7 @@ public void ItShouldCreateParameterWithCommaSeparatedValuePerArrayItem() arrayItemType: "integer"); // Act - var result = FormStyleParameterSerializer.Serialize(parameter, "[1,2,3]"); + var result = FormStyleParameterSerializer.Serialize(parameter, new JsonArray(1, 2, 3)); // Assert Assert.NotNull(result); @@ -74,6 +74,27 @@ public void ItShouldCreateParameterForPrimitiveValue() Assert.Equal("id=28", result); } + [Fact] + public void ItShouldCreateParameterForStringValue() + { + // Arrange + var parameter = new RestApiOperationParameter( + name: "id", + type: "string", + isRequired: true, + expand: false, + location: RestApiOperationParameterLocation.Query, + style: RestApiOperationParameterStyle.Form); + + // Act + var result = FormStyleParameterSerializer.Serialize(parameter, JsonValue.Create(new DateTime(2023, 12, 06, 11, 53, 36, DateTimeKind.Utc))); + + // Assert + Assert.NotNull(result); + + Assert.Equal("id=2023-12-06T11%3a53%3a36Z", result); + } + [Theory] [InlineData(":", "%3a")] [InlineData("/", "%2f")] @@ -104,7 +125,7 @@ public void ItShouldEncodeSpecialSymbolsInAmpersandSeparatedParameterValues(stri var parameter = new RestApiOperationParameter("id", "array", false, true, RestApiOperationParameterLocation.Query, RestApiOperationParameterStyle.Form); // Act - var result = FormStyleParameterSerializer.Serialize(parameter, $"[\"{specialSymbol}\"]"); + var result = FormStyleParameterSerializer.Serialize(parameter, new JsonArray($"{specialSymbol}")); // Assert Assert.NotNull(result); @@ -123,7 +144,7 @@ public void ItShouldEncodeSpecialSymbolsInCommaSeparatedParameterValues(string s var parameter = new RestApiOperationParameter("id", "array", false, false, RestApiOperationParameterLocation.Query, RestApiOperationParameterStyle.Form); // Act - var result = FormStyleParameterSerializer.Serialize(parameter, $"[\"{specialSymbol}\"]"); + var result = FormStyleParameterSerializer.Serialize(parameter, new JsonArray($"{specialSymbol}")); // Assert Assert.NotNull(result); diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs new file mode 100644 index 000000000000..9331bb0b55a2 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/OpenApiTypeConverterTests.cs @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Microsoft.VisualBasic; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi.Serialization; + +public class OpenApiTypeConverterTests +{ + [Fact] + public void ItShouldConvertString() + { + // Arrange + object? value = "test"; + + // Act + var result = OpenApiTypeConverter.Convert("id", "string", value); + + // Assert + Assert.Equal("\"test\"", result.ToString()); + } + + [Fact] + public void ItShouldConvertNumber() + { + // Act & Assert + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (byte)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (sbyte)10).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (short)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (ushort)10).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (int)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (uint)10).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (long)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", (ulong)10).ToString()); + + Assert.Equal("10.5", OpenApiTypeConverter.Convert("id", "number", (float)10.5).ToString()); + Assert.Equal("10.5", OpenApiTypeConverter.Convert("id", "number", (double)10.5).ToString()); + Assert.Equal("10.5", OpenApiTypeConverter.Convert("id", "number", (decimal)10.5).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "number", "10").ToString()); + Assert.Equal("10.5", OpenApiTypeConverter.Convert("id", "number", "10.5").ToString()); + } + + [Fact] + public void ItShouldConvertInteger() + { + // Act & Assert + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (byte)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (sbyte)10).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (short)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (ushort)10).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (int)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (uint)10).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (long)10).ToString()); + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", (ulong)10).ToString()); + + Assert.Equal("10", OpenApiTypeConverter.Convert("id", "integer", "10").ToString()); + } + + [Fact] + public void ItShouldConvertBoolean() + { + // Act & Assert + Assert.Equal("true", OpenApiTypeConverter.Convert("id", "boolean", true).ToString()); + + Assert.Equal("false", OpenApiTypeConverter.Convert("id", "boolean", false).ToString()); + + Assert.Equal("true", OpenApiTypeConverter.Convert("id", "boolean", "true").ToString()); + + Assert.Equal("false", OpenApiTypeConverter.Convert("id", "boolean", "false").ToString()); + } + + [Fact] + public void ItShouldConvertDateTime() + { + // Arrange + var dateTime = DateTime.ParseExact("06.12.2023 11:53:36+02:00", "dd.MM.yyyy HH:mm:sszzz", CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal); + + // Act & Assert + Assert.Equal("\"2023-12-06T09:53:36Z\"", OpenApiTypeConverter.Convert("id", "string", dateTime).ToString()); + } + + [Fact] + public void ItShouldConvertDateTimeOffset() + { + // Arrange + var offset = DateTimeOffset.ParseExact("06.12.2023 11:53:36 +02:00", "dd.MM.yyyy HH:mm:ss zzz", CultureInfo.InvariantCulture); + + // Act & Assert + Assert.Equal("\"2023-12-06T11:53:36+02:00\"", OpenApiTypeConverter.Convert("id", "string", offset).ToString()); + } + + [Fact] + public void ItShouldConvertCollections() + { + // Act & Assert + Assert.Equal("[1,2,3]", OpenApiTypeConverter.Convert("id", "array", new[] { 1, 2, 3 }).ToJsonString()); + + Assert.Equal("[1,2,3]", OpenApiTypeConverter.Convert("id", "array", new List { 1, 2, 3 }).ToJsonString()); + + Assert.Equal("[1,2,3]", OpenApiTypeConverter.Convert("id", "array", new Collection() { 1, 2, 3 }).ToJsonString()); + + Assert.Equal("[1,2,3]", OpenApiTypeConverter.Convert("id", "array", "[1, 2, 3]").ToJsonString()); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/PipeDelimitedStyleParametersSerializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/PipeDelimitedStyleParametersSerializerTests.cs similarity index 84% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/PipeDelimitedStyleParametersSerializerTests.cs rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/PipeDelimitedStyleParametersSerializerTests.cs index b499221a5ef5..05fb125924ac 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/PipeDelimitedStyleParametersSerializerTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/PipeDelimitedStyleParametersSerializerTests.cs @@ -1,23 +1,22 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Plugins.OpenApi; using Xunit; -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Builders.Serialization; +namespace SemanticKernel.Functions.UnitTests.OpenApi.Serialization; public class PipeDelimitedStyleParametersSerializerTests { [Fact] public void ItShouldThrowExceptionForUnsupportedParameterStyle() { - //Arrange + // Arrange var parameter = new RestApiOperationParameter(name: "p1", type: "string", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.Form); - //Act & Assert - Assert.Throws(() => PipeDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); + // Act & Assert + Assert.Throws(() => PipeDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); } [Theory] @@ -28,11 +27,11 @@ public void ItShouldThrowExceptionForUnsupportedParameterStyle() [InlineData("object")] public void ItShouldThrowExceptionIfParameterTypeIsNotArray(string parameterType) { - //Arrange + // Arrange var parameter = new RestApiOperationParameter(name: "p1", type: parameterType, isRequired: false, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.PipeDelimited); - //Act & Assert - Assert.Throws(() => PipeDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); + // Act & Assert + Assert.Throws(() => PipeDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); } [Fact] @@ -49,7 +48,7 @@ public void ItShouldCreateAmpersandSeparatedParameterPerArrayItem() arrayItemType: "integer"); // Act - var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, "[1,2,3]"); + var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray(1, 2, 3)); // Assert Assert.NotNull(result); @@ -71,7 +70,7 @@ public void ItShouldCreateParameterWithPipeSeparatedValuePerArrayItem() arrayItemType: "integer"); // Act - var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, "[1,2,3]"); + var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray("1", "2", "3")); // Assert Assert.NotNull(result); @@ -90,7 +89,7 @@ public void ItShouldEncodeSpecialSymbolsInPipeDelimitedParameterValues(string sp var parameter = new RestApiOperationParameter(name: "id", type: "array", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.PipeDelimited); // Act - var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, $"[\"{specialSymbol}\"]"); + var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray(specialSymbol)); // Assert Assert.NotNull(result); @@ -109,7 +108,7 @@ public void ItShouldEncodeSpecialSymbolsInAmpersandDelimitedParameterValues(stri var parameter = new RestApiOperationParameter(name: "id", type: "array", isRequired: false, expand: true, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.PipeDelimited); // Act - var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, $"[\"{specialSymbol}\"]"); + var result = PipeDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray(specialSymbol)); // Assert Assert.NotNull(result); diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/SimpleStyleParametersSerializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/SimpleStyleParametersSerializerTests.cs new file mode 100644 index 000000000000..c29d2353c2d1 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/SimpleStyleParametersSerializerTests.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Plugins.OpenApi; +using Xunit; + +namespace SemanticKernel.Functions.UnitTests.OpenApi.Serialization; + +public class SimpleStyleParametersSerializerTests +{ + [Fact] + public void ItShouldCreateParameterWithCommaSeparatedValuePerArrayItem() + { + // Arrange + var parameter = new RestApiOperationParameter(name: "id", type: "array", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple, arrayItemType: "integer"); + + // Act + var result = SimpleStyleParameterSerializer.Serialize(parameter, new JsonArray(1, 2, 3)); + + // Assert + Assert.NotNull(result); + + Assert.Equal("1,2,3", result); + } + + [Fact] + public void ItShouldCreateParameterWithCommaSeparatedValuePerArrayStringItem() + { + // Arrange + var parameter = new RestApiOperationParameter(name: "id", type: "array", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple, arrayItemType: "integer"); + + // Act + var result = SimpleStyleParameterSerializer.Serialize(parameter, new JsonArray("1", "2", "3")); + + // Assert + Assert.NotNull(result); + + Assert.Equal("1,2,3", result); + } + + [Fact] + public void ItShouldCreateParameterForPrimitiveValue() + { + // Arrange + var parameter = new RestApiOperationParameter(name: "id", type: "integer", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple); + + // Act + var result = SimpleStyleParameterSerializer.Serialize(parameter, "28"); + + // Assert + Assert.NotNull(result); + + Assert.Equal("28", result); + } + + [Theory] + [InlineData(":", ":")] + [InlineData("/", "/")] + [InlineData("?", "?")] + [InlineData("#", "#")] + public void ItShouldNotEncodeSpecialSymbolsInPrimitiveParameterValues(string specialSymbol, string expectedSymbol) + { + // Arrange + var parameter = new RestApiOperationParameter(name: "id", type: "string", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple); + + // Act + var result = SimpleStyleParameterSerializer.Serialize(parameter, $"fake_query_param_value{specialSymbol}"); + + // Assert + Assert.NotNull(result); + + Assert.EndsWith(expectedSymbol, result, StringComparison.Ordinal); + } + + [Theory] + [InlineData(":", ":")] + [InlineData("/", "/")] + [InlineData("?", "?")] + [InlineData("#", "#")] + public void ItShouldEncodeSpecialSymbolsInCommaSeparatedParameterValues(string specialSymbol, string expectedSymbol) + { + // Arrange + var parameter = new RestApiOperationParameter(name: "id", type: "array", isRequired: true, expand: false, location: RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple); + + // Act + var result = SimpleStyleParameterSerializer.Serialize(parameter, new JsonArray(specialSymbol)); + + // Assert + Assert.NotNull(result); + + Assert.EndsWith(expectedSymbol, result, StringComparison.Ordinal); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/SpaceDelimitedStyleParametersSerializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/SpaceDelimitedStyleParametersSerializerTests.cs similarity index 84% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/SpaceDelimitedStyleParametersSerializerTests.cs rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/SpaceDelimitedStyleParametersSerializerTests.cs index 8bba5a4dba73..be37663b2ed6 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/Builders/Serialization/SpaceDelimitedStyleParametersSerializerTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/SpaceDelimitedStyleParametersSerializerTests.cs @@ -1,22 +1,22 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Builders.Serialization; -using Microsoft.SemanticKernel.Functions.OpenAPI.Model; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Plugins.OpenApi; using Xunit; -namespace SemanticKernel.Functions.UnitTests.OpenAPI.Builders.Serialization; +namespace SemanticKernel.Functions.UnitTests.OpenApi.Serialization; + public class SpaceDelimitedStyleParametersSerializerTests { [Fact] public void ItShouldThrowExceptionForUnsupportedParameterStyle() { - //Arrange + // Arrange var parameter = new RestApiOperationParameter(name: "p1", type: "string", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.Label); - //Act & Assert - Assert.Throws(() => SpaceDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); + // Act & Assert + Assert.Throws(() => SpaceDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); } [Theory] @@ -27,11 +27,11 @@ public void ItShouldThrowExceptionForUnsupportedParameterStyle() [InlineData("object")] public void ItShouldThrowExceptionIfParameterTypeIsNotArray(string parameterType) { - //Arrange + // Arrange var parameter = new RestApiOperationParameter(name: "p1", type: parameterType, isRequired: false, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.SpaceDelimited); - //Act & Assert - Assert.Throws(() => SpaceDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); + // Act & Assert + Assert.Throws(() => SpaceDelimitedStyleParameterSerializer.Serialize(parameter, "fake-argument")); } [Fact] @@ -48,7 +48,7 @@ public void ItShouldCreateAmpersandSeparatedParameterPerArrayItem() arrayItemType: "integer"); // Act - var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, "[1,2,3]"); + var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray("1", "2", "3")); // Assert Assert.NotNull(result); @@ -70,7 +70,7 @@ public void ItShouldCreateParameterWithSpaceSeparatedValuePerArrayItem() arrayItemType: "integer"); // Act - var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, "[1,2,3]"); + var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray(1, 2, 3)); // Assert Assert.NotNull(result); @@ -89,7 +89,7 @@ public void ItShouldEncodeSpecialSymbolsInSpaceDelimitedParameterValues(string s var parameter = new RestApiOperationParameter(name: "id", type: "array", isRequired: false, expand: false, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.SpaceDelimited); // Act - var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, $"[\"{specialSymbol}\"]"); + var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray(specialSymbol)); // Assert Assert.NotNull(result); @@ -108,7 +108,7 @@ public void ItShouldEncodeSpecialSymbolsInAmpersandDelimitedParameterValues(stri var parameter = new RestApiOperationParameter(name: "id", type: "array", isRequired: false, expand: true, location: RestApiOperationParameterLocation.Query, style: RestApiOperationParameterStyle.SpaceDelimited); // Act - var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, $"[\"{specialSymbol}\"]"); + var result = SpaceDelimitedStyleParameterSerializer.Serialize(parameter, new JsonArray(specialSymbol)); // Assert Assert.NotNull(result); diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ResourcePluginsProvider.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ResourcePluginsProvider.cs new file mode 100644 index 000000000000..ed3480ca1e9e --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ResourcePluginsProvider.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Resources; + +namespace SemanticKernel.Functions.UnitTests.OpenApi.TestPlugins; + +internal static class ResourcePluginsProvider +{ + /// + /// Loads OpenAPI document from assembly resource. + /// + /// The resource name. + /// The OpenAPI document resource stream. + public static Stream LoadFromResource(string resourceName) + { + var type = typeof(ResourcePluginsProvider); + + var stream = type.Assembly.GetManifestResourceStream(type, resourceName); + if (stream == null) + { + throw new MissingManifestResourceException($"Unable to load OpenAPI plugin from assembly resource '{resourceName}'."); + } + + return stream; + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ai-plugin.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ai-plugin.json new file mode 100644 index 000000000000..c72d1d4064fc --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ai-plugin.json @@ -0,0 +1,24 @@ +{ + "schema_version": "v1", + "name_for_human": "AzureKeyVault", + "name_for_model": "AzureKeyVault", + "description_for_human": "Query and interact with Azure Key Vault", + "description_for_model": "Query and interact with Azure Key Vault", + "auth": { + "type": "oauth", + "client_url": "https://login.microsoftonline.com/common/oauth2/v2.0/authorize", + "scope": "https://vault.azure.net/.default", + "authorization_url": "https://login.microsoftonline.com/common/oauth2/v2.0/token", + "authorization_content_type": "application/x-www-form-urlencoded", + "verification_tokens": { + "openai": "00000000000000000000000000000000" + } + }, + "api": { + "type": "openapi", + "url": "http://localhost:3001/openapi.json" + }, + "logo_url": "https://contoso.com/logo.png", + "contact_email": "contact@contoso.com", + "legal_info_url": "https://privacy.microsoft.com/en-US/privacystatement" +} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ai-plugin2.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ai-plugin2.json new file mode 100644 index 000000000000..4510e2e5402a --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/ai-plugin2.json @@ -0,0 +1,20 @@ +{ + "schema_version": "v1", + "name_for_model": "WebSearcher", + "name_for_human": "WebSearcher", + "description_for_model": "Searches the web", + "description_for_human": "Searches the web", + "auth": { + "type": "user_http", + "authorization_type": "bearer", + "verification_tokens": { "openAI": "" } + }, + "api": { + "type": "openapi", + "url": "https://localhost:443/swagger.json" + }, + "logo_url": "https://localhost:443/.well-known/icon", + "contact_email": "", + "legal_info_url": "", + "httpAuthorizationType": "" +} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV2_0.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV2_0.json new file mode 100644 index 000000000000..4c323deb97a8 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV2_0.json @@ -0,0 +1,348 @@ +{ + "basePath": "/", + "consumes": [], + "definitions": {}, + "host": "my-key-vault.vault.azure.net", + "info": { + "description": "A sample connector for the Azure Key Vault service. This connector is built for the Azure Key Vault REST API. You can see the details of the API here: https://docs.microsoft.com/rest/api/keyvault/.", + "title": "Azure Key Vault [Sample]", + "version": "1.0" + }, + "parameters": {}, + "paths": { + "/secrets/{secret-name}": { + "get": { + "description": "Get a specified secret from a given key vault. For details, see: https://docs.microsoft.com/rest/api/keyvault/getsecret/getsecret.", + "operationId": "GetSecret", + "parameters": [ + { + "in": "path", + "name": "secret-name", + "required": true, + "type": "string" + }, + { + "default": "7.0", + "in": "query", + "name": "api-version", + "required": true, + "type": "string", + "x-ms-visibility": "internal" + } + ], + "responses": { + "200": { + "description": "default", + "schema": { + "properties": { + "attributes": { + "description": "attributes", + "properties": { + "created": { + "description": "created", + "format": "int32", + "type": "integer" + }, + "enabled": { + "description": "enabled", + "type": "boolean" + }, + "recoverylevel": { + "description": "recoverylevel", + "type": "string" + }, + "updated": { + "description": "updated", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "id": { + "description": "id", + "type": "string" + }, + "value": { + "description": "value", + "format": "byte", + "type": "string" + } + }, + "type": "object" + } + } + }, + "summary": "Get secret" + }, + "put": { + "description": "Sets a secret in a specified key vault.", + "operationId": "SetSecret", + "parameters": [ + { + "in": "path", + "name": "secret-name", + "required": true, + "type": "string" + }, + { + "default": "7.0", + "in": "query", + "name": "api-version", + "required": true, + "type": "string", + "x-ms-visibility": "internal" + }, + { + "in": "body", + "name": "body", + "required": true, + "schema": { + "properties": { + "attributes": { + "description": "attributes", + "properties": { + "enabled": { + "description": "Determines whether the object is enabled.", + "type": "boolean" + } + }, + "type": "object" + }, + "value": { + "description": "The value of the secret.", + "type": "string" + } + }, + "required": [ + "value" + ], + "type": "object" + } + }, + { + "name": "Accept", + "in": "header", + "required": false, + "description": "Indicates which content types, expressed as MIME types, the client is able to understand.", + "type": "string", + "default": "application/json", + "x-ms-visibility": "internal" + }, + { + "name": "X-API-Version", + "in": "header", + "description": "Requested API version.", + "required": true, + "type": "integer", + "default": 10, + "x-ms-visibility": "internal", + "x-ms-summary": "X-API-Version" + }, + { + "collectionFormat": "csv", + "description": "The comma separated list of operation ids.", + "in": "header", + "items": { + "type": "string" + }, + "name": "X-Operation-Csv-Ids", + "required": false, + "type": "array", + "x-ms-summary": "Ids", + "x-ms-visibility": "advanced" + } + ], + "responses": { + "200": { + "description": "default", + "schema": { + "properties": { + "attributes": { + "description": "attributes", + "properties": { + "created": { + "description": "created", + "format": "int32", + "type": "integer" + }, + "enabled": { + "description": "enabled", + "type": "boolean" + }, + "recoverylevel": { + "description": "recoverylevel", + "type": "string" + }, + "updated": { + "description": "updated", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "id": { + "description": "id", + "type": "string" + }, + "value": { + "description": "value", + "type": "string" + } + }, + "type": "object" + } + } + }, + "summary": "Create or update secret value" + } + }, + "/FunPlugin/Excuses": { + "post": { + "summary": "Turn a scenario into a creative or humorous excuse to send your boss", + "operationId": "Excuses", + "consumes": [ + "text/plain" + ], + "produces": [ + "text/plain" + ], + "parameters": [ + { + "in": "body", + "name": "body", + "description": "excuse event", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "The OK response", + "schema": { + "type": "string" + } + } + } + } + }, + "/test-default-values/{string-parameter}": { + "put": { + "description": "Operation to test default parameter values.", + "operationId": "TestDefaultValues", + "parameters": [ + { + "in": "path", + "name": "string-parameter", + "default": "string-value", + "required": true, + "type": "string" + }, + { + "in": "query", + "name": "boolean-parameter", + "type": "boolean", + "default": true + }, + { + "in": "header", + "name": "integer-parameter", + "type": "integer", + "format": "int32", + "default": 281 + }, + { + "in": "header", + "name": "long-parameter", + "type": "integer", + "format": "int64", + "default": -2814 + }, + { + "in": "body", + "name": "body", + "required": true, + "schema": { + "properties": { + "attributes": { + "description": "attributes", + "properties": { + "double-parameter": { + "type": "number", + "format": "double", + "default": -12.01 + } + }, + "type": "object" + }, + "float-parameter": { + "type": "number", + "format": "float", + "default": 12.01 + }, + "encoded-characters-parameter": { + "type": "string", + "format": "byte", + "default": "AQIDBAU=" + }, + "binary-data-parameter": { + "type": "string", + "format": "binary", + "default": "23456" + }, + "date-parameter": { + "type": "string", + "format": "date", + "default": "2017-07-21" + }, + "date-time-parameter": { + "type": "string", + "format": "date-time", + "default": "2017-07-21T17:32:28Z" + }, + "password-parameter": { + "type": "string", + "format": "password", + "default": "password-value" + } + }, + "type": "object" + } + } + ], + "responses": { + "200": { + "description": "The OK response", + "schema": { + "type": "string" + } + } + }, + "summary": "Get secret" + } + } + }, + "produces": [], + "responses": {}, + "schemes": [ + "https" + ], + "security": [ + { + "oauth2_auth": [] + } + ], + "securityDefinitions": { + "oauth2_auth": { + "authorizationUrl": "https://login.windows.net/common/oauth2/authorize", + "flow": "accessCode", + "scopes": {}, + "tokenUrl": "https://login.windows.net/common/oauth2/authorize", + "type": "oauth2" + } + }, + "swagger": "2.0", + "tags": [] +} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV3_0.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV3_0.json new file mode 100644 index 000000000000..ace59229a42d --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV3_0.json @@ -0,0 +1,328 @@ +{ + "openapi": "3.0.1", + "info": { + "title": "Azure Key Vault [Sample]", + "description": "A sample connector for the Azure Key Vault service. This connector is built for the Azure Key Vault REST API. You can see the details of the API here: https://docs.microsoft.com/rest/api/keyvault/.", + "version": "1.0" + }, + "servers": [ + { + "url": "https://my-key-vault.vault.azure.net" + } + ], + "paths": { + "/secrets/{secret-name}": { + "get": { + "summary": "Get secret", + "description": "Get a specified secret from a given key vault. For details, see: https://docs.microsoft.com/rest/api/keyvault/getsecret/getsecret.", + "operationId": "GetSecret", + "parameters": [ + { + "name": "secret-name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "api-version", + "in": "query", + "required": true, + "schema": { + "type": "string", + "default": "7.0" + }, + "x-ms-visibility": "internal" + }, + { + "name": "nonExplodeFormParam", + "in": "query", + "style": "form", + "explode": false, + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "name": "explodeFormParam", + "in": "query", + "style": "form", + "explode": true, + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "name": "anotherExplodeFormParam", + "in": "query", + "schema": { + "type": "array", + "items": { + "type": "integer" + } + } + } + ], + "responses": { + "200": { + "description": "default" + } + } + }, + "put": { + "summary": "Create or update secret value", + "description": "Sets a secret in a specified key vault.", + "operationId": "SetSecret", + "parameters": [ + { + "name": "secret-name", + "in": "path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "api-version", + "in": "query", + "required": true, + "schema": { + "type": "string", + "default": "7.0" + }, + "x-ms-visibility": "internal" + }, + { + "name": "Accept", + "in": "header", + "description": "Indicates which content types, expressed as MIME types, the client is able to understand.", + "schema": { + "type": "string", + "default": "application/json" + }, + "x-ms-visibility": "internal" + }, + { + "name": "X-API-Version", + "in": "header", + "description": "Requested API version.", + "required": true, + "schema": { + "type": "integer", + "default": 10 + }, + "x-ms-visibility": "internal", + "x-ms-summary": "X-API-Version" + }, + { + "name": "X-Operation-Csv-Ids", + "in": "header", + "description": "The comma separated list of operation ids.", + "style": "simple", + "schema": { + "type": "array", + "items": { + "type": "string" + } + }, + "x-ms-summary": "Ids", + "x-ms-visibility": "advanced" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "required": [ + "value" + ], + "type": "object", + "properties": { + "attributes": { + "type": "object", + "properties": { + "enabled": { + "type": "boolean", + "description": "Determines whether the object is enabled." + } + }, + "description": "attributes" + }, + "value": { + "type": "string", + "description": "The value of the secret." + } + } + } + } + }, + "required": true, + "x-bodyName": "body" + }, + "responses": { + "200": { + "description": "default" + } + } + } + }, + "/FunPlugin/Excuses": { + "post": { + "summary": "Turn a scenario into a creative or humorous excuse to send your boss", + "operationId": "Excuses", + "requestBody": { + "description": "excuse event", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + }, + "x-bodyName": "body" + }, + "responses": { + "200": { + "description": "The OK response", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + } + } + } + } + }, + "/test-default-values/{string-parameter}": { + "put": { + "summary": "Operation to test default parameter values.", + "description": "Operation to test default parameter values.", + "operationId": "TestDefaultValues", + "parameters": [ + { + "name": "string-parameter", + "in": "path", + "required": true, + "schema": { + "type": "string", + "default": "string-value" + } + }, + { + "name": "boolean-parameter", + "in": "query", + "schema": { + "type": "boolean", + "default": true + } + }, + { + "name": "integer-parameter", + "in": "header", + "schema": { + "type": "integer", + "format": "int32", + "default": 281 + } + }, + { + "name": "long-parameter", + "in": "header", + "schema": { + "type": "integer", + "format": "int64", + "default": -2814 + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "attributes": { + "type": "object", + "properties": { + "double-parameter": { + "type": "number", + "format": "double", + "default": -12.01 + } + }, + "description": "attributes" + }, + "float-parameter": { + "type": "number", + "format": "float", + "default": 12.01 + }, + "encoded-characters-parameter": { + "type": "string", + "format": "byte", + "default": "AQIDBAU=" + }, + "binary-data-parameter": { + "type": "string", + "format": "binary", + "default": "23456" + }, + "date-parameter": { + "type": "string", + "format": "date", + "default": "2017-07-21" + }, + "date-time-parameter": { + "type": "string", + "format": "date-time", + "default": "2017-07-21T17:32:28.0000000+00:00" + }, + "password-parameter": { + "type": "string", + "format": "password", + "default": "password-value" + } + } + } + } + }, + "required": true, + "x-bodyName": "body" + }, + "responses": { + "200": { + "description": "The OK response" + } + } + } + } + }, + "components": { + "securitySchemes": { + "oauth2_auth": { + "type": "oauth2", + "flows": { + "authorizationCode": { + "authorizationUrl": "https://login.windows.net/common/oauth2/authorize", + "tokenUrl": "https://login.windows.net/common/oauth2/authorize", + "scopes": { } + } + } + } + } + }, + "security": [ + { + "oauth2_auth": [ ] + } + ] +} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV3_1.yaml b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV3_1.yaml new file mode 100644 index 000000000000..3dba0c595748 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/documentV3_1.yaml @@ -0,0 +1,217 @@ +openapi: 3.1.0 +info: + title: 'Azure Key Vault [Sample]' + description: 'A sample connector for the Azure Key Vault service. This connector is built for the Azure Key Vault REST API. You can see the details of the API here: https://docs.microsoft.com/rest/api/keyvault/.' + version: '1.0' +servers: + - url: https://my-key-vault.vault.azure.net +paths: + '/secrets/{secret-name}': + get: + summary: Get secret + description: 'Get a specified secret from a given key vault. For details, see: https://docs.microsoft.com/rest/api/keyvault/getsecret/getsecret.' + operationId: GetSecret + parameters: + - name: secret-name + in: path + required: true + schema: + type: string + - name: api-version + in: query + required: true + schema: + type: string + default: '7.0' + x-ms-visibility: internal + - name: nonExplodeFormParam + in: query + style: form + explode: false + schema: + type: array + items: + type: string + - name: explodeFormParam + in: query + style: form + schema: + type: array + items: + type: string + - name: anotherExplodeFormParam + in: query + schema: + type: array + items: + type: integer + responses: + '200': + description: default + put: + summary: Create or update secret value + description: Sets a secret in a specified key vault. + operationId: SetSecret + parameters: + - name: secret-name + in: path + required: true + schema: + type: string + - name: api-version + in: query + required: true + schema: + type: string + default: '7.0' + x-ms-visibility: internal + - name: Accept + in: header + description: 'Indicates which content types, expressed as MIME types, the client is able to understand.' + schema: + type: string + default: application/json + x-ms-visibility: internal + - name: X-API-Version + in: header + description: Requested API version. + required: true + schema: + type: integer + default: 10 + x-ms-visibility: internal + x-ms-summary: X-API-Version + - name: X-Operation-Csv-Ids + in: header + description: The comma separated list of operation ids. + style: simple + schema: + type: array + items: + type: string + x-ms-summary: Ids + x-ms-visibility: advanced + requestBody: + content: + application/json: + schema: + required: + - value + type: object + properties: + attributes: + type: object + properties: + enabled: + type: boolean + description: Determines whether the object is enabled. + description: attributes + value: + type: string + description: The value of the secret. + required: true + x-bodyName: body + responses: + '200': + description: default + /FunPlugin/Excuses: + post: + summary: Turn a scenario into a creative or humorous excuse to send your boss + operationId: Excuses + requestBody: + description: excuse event + content: + text/plain: + schema: + type: string + x-bodyName: body + responses: + '200': + description: The OK response + content: + text/plain: + schema: + type: string + '/test-default-values/{string-parameter}': + put: + summary: Operation to test default parameter values. + description: Operation to test default parameter values. + operationId: TestDefaultValues + parameters: + - name: string-parameter + in: path + required: true + schema: + type: string + default: string-value + - name: boolean-parameter + in: query + schema: + type: boolean + default: true + - name: integer-parameter + in: header + schema: + type: integer + format: int32 + default: 281 + - name: long-parameter + in: header + schema: + type: integer + format: int64 + default: -2814 + requestBody: + content: + application/json: + schema: + type: object + properties: + attributes: + type: object + properties: + double-parameter: + type: number + format: double + default: -12.01 + description: attributes + float-parameter: + type: number + format: float + default: 12.01 + encoded-characters-parameter: + type: string + format: byte + default: AQIDBAU= + binary-data-parameter: + type: string + format: binary + default: '23456' + date-parameter: + type: string + format: date + default: '2017-07-21' + date-time-parameter: + type: string + format: date-time + default: '2017-07-21T17:32:28.0000000+00:00' + password-parameter: + type: string + format: password + default: password-value + required: true + x-bodyName: body + responses: + '200': + description: The OK response +components: + securitySchemes: + oauth2_auth: + type: oauth2 + flows: + authorizationCode: + authorizationUrl: https://login.windows.net/common/oauth2/authorize + tokenUrl: https://login.windows.net/common/oauth2/authorize + scopes: { } +security: + - oauth2_auth: [ ] diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/nonCompliant_documentV3_0.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/nonCompliant_documentV3_0.json similarity index 100% rename from dotnet/src/Functions/Functions.UnitTests/OpenAPI/TestPlugins/nonCompliant_documentV3_0.json rename to dotnet/src/Functions/Functions.UnitTests/OpenApi/TestPlugins/nonCompliant_documentV3_0.json diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/200FakeResponseSchema.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/200FakeResponseSchema.json new file mode 100644 index 000000000000..27a5f7460332 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/200FakeResponseSchema.json @@ -0,0 +1,18 @@ +{ + "title": "FakeResponse200", + "type": "object", + "properties": { + "fakeItems": { + "type": "array", + "items": { + "title": "Item", + "type": "object", + "properties": { + "attributes": { "type": "array", "itemName": { "type": "string" } }, + "name": { "type": "string" } + } + } + } + }, + "additionalProperties": false +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/2XXFakeResponseSchema.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/2XXFakeResponseSchema.json new file mode 100644 index 000000000000..0124af1b44a3 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/2XXFakeResponseSchema.json @@ -0,0 +1,18 @@ +{ + "title": "FakeResponse2xx", + "type": "object", + "properties": { + "fakeItems": { + "type": "array", + "items": { + "title": "Item", + "type": "object", + "properties": { + "attributes": { "type": "array", "itemName": { "type": "string" } }, + "name": { "type": "string" } + } + } + } + }, + "additionalProperties": false +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/DefaultResponseSchema.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/DefaultResponseSchema.json new file mode 100644 index 000000000000..9f5b42dd6fe1 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/DefaultResponseSchema.json @@ -0,0 +1,9 @@ +{ + "title": "DefaultResponse", + "type": "object", + "properties": { + "code": { "type": "integer", "format": "int32" }, + "message": { "type": "string" } + }, + "additionalProperties": false +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/FakeResponseSchema.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/FakeResponseSchema.json new file mode 100644 index 000000000000..2fc256a7edca --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/FakeResponseSchema.json @@ -0,0 +1,18 @@ +{ + "title": "FakeResponse", + "type": "object", + "properties": { + "fakeItems": { + "type": "array", + "items": { + "title": "Item", + "type": "object", + "properties": { + "attributes": { "type": "array", "itemName": { "type": "string" } }, + "name": { "type": "string" } + } + } + } + }, + "additionalProperties": false +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/InvalidProductContent.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/InvalidProductContent.json new file mode 100644 index 000000000000..3d6f3416db94 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/InvalidProductContent.json @@ -0,0 +1 @@ +{"products": [{"id": "1234", "name": "Laptop"} \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/NotProductContent.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/NotProductContent.json new file mode 100644 index 000000000000..fc01cdaa3427 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/NotProductContent.json @@ -0,0 +1 @@ +{ "p": [{ "id": "1234", "name": "Laptop" }] } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ObjectResponseSchema.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ObjectResponseSchema.json new file mode 100644 index 000000000000..bba55dc66838 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ObjectResponseSchema.json @@ -0,0 +1 @@ +{ "type": "object" } diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ProductResponseSchema.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ProductResponseSchema.json new file mode 100644 index 000000000000..eb367f8ddc83 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ProductResponseSchema.json @@ -0,0 +1,20 @@ +{ + "title": "ProductResponse", + "type": "object", + "properties": { + "products": { + "type": "array", + "items": { + "title": "Product", + "type": "object", + "properties": { + "attributes": { "type": "array", "items": { "type": "string" } }, + "name": { "type": "string" }, + "price": { "type": "string" }, + "url": { "type": "string" } + } + } + } + }, + "additionalProperties": false +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ResourceResponseProvider.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ResourceResponseProvider.cs new file mode 100644 index 000000000000..68210678f2a0 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ResourceResponseProvider.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Resources; + +namespace SemanticKernel.Functions.UnitTests.OpenApi.TestResponses; + +internal static class ResourceResponseProvider +{ + /// + /// Loads OpenAPI response schema and content from assembly resource. + /// + /// The resource name. + /// The OpenAPI response schema or content resource stream. + public static string LoadFromResource(string resourceName) + { + var type = typeof(ResourceResponseProvider); + + var stream = type.Assembly.GetManifestResourceStream(type, resourceName); + if (stream == null) + { + throw new MissingManifestResourceException($"Unable to load OpenAPI response from assembly resource '{resourceName}'."); + } + + using var reader = new StreamReader(stream); + return reader.ReadToEnd(); + } +} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ValidProductContent.json b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ValidProductContent.json new file mode 100644 index 000000000000..834309686afd --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/TestResponses/ValidProductContent.json @@ -0,0 +1 @@ +{ "products": [{ "id": "1234", "name": "Laptop" }] } diff --git a/dotnet/src/Functions/Functions.UnitTests/Yaml/Functions/KernelFunctionYamlTests.cs b/dotnet/src/Functions/Functions.UnitTests/Yaml/Functions/KernelFunctionYamlTests.cs new file mode 100644 index 000000000000..d2ef5c294779 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/Yaml/Functions/KernelFunctionYamlTests.cs @@ -0,0 +1,199 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace SemanticKernel.Functions.UnitTests.Yaml; + +public class KernelFunctionYamlTests +{ + private readonly ISerializer _serializer; + + public KernelFunctionYamlTests() + { + this._serializer = new SerializerBuilder() + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .Build(); + } + + [Fact] + public void ItShouldCreateFunctionFromPromptYamlWithNoExecutionSettings() + { + // Arrange + // Act + var function = KernelFunctionYaml.FromPromptYaml(this._yamlNoExecutionSettings); + + // Assert + Assert.NotNull(function); + Assert.Equal("SayHello", function.Name); + Assert.Equal("Say hello to the specified person using the specified language", function.Description); + Assert.Equal(2, function.Metadata.Parameters.Count); + //Assert.Equal(0, function.ExecutionSettings.Count); + } + + [Fact] + public void ItShouldCreateFunctionFromPromptYaml() + { + // Arrange + // Act + var function = KernelFunctionYaml.FromPromptYaml(this._yaml); + + // Assert + Assert.NotNull(function); + Assert.Equal("SayHello", function.Name); + Assert.Equal("Say hello to the specified person using the specified language", function.Description); + } + + [Fact] + public void ItShouldCreateFunctionFromPromptYamlWithCustomExecutionSettings() + { + // Arrange + // Act + var function = KernelFunctionYaml.FromPromptYaml(this._yamlWithCustomSettings); + + // Assert + Assert.NotNull(function); + Assert.Equal("SayHello", function.Name); + Assert.Equal("Say hello to the specified person using the specified language", function.Description); + Assert.Equal(2, function.Metadata.Parameters.Count); + } + + [Fact] + public void ItShouldSupportCreatingOpenAIExecutionSettings() + { + // Arrange + var deserializer = new DeserializerBuilder() + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .WithNodeDeserializer(new PromptExecutionSettingsNodeDeserializer()) + .Build(); + var promptFunctionModel = deserializer.Deserialize(this._yaml); + + // Act + var executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(promptFunctionModel.ExecutionSettings["service1"]); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal("gpt-4", executionSettings.ModelId); + Assert.Equal(1.0, executionSettings.Temperature); + Assert.Equal(0.0, executionSettings.TopP); + } + + [Fact] + public void ItShouldCreateFunctionWithDefaultValueOfStringType() + { + // Act + var function = KernelFunctionYaml.FromPromptYaml(this._yamlWithCustomSettings); + + // Assert + Assert.NotNull(function?.Metadata?.Parameters); + Assert.Equal("John", function?.Metadata?.Parameters[0].DefaultValue); + Assert.Equal("English", function?.Metadata?.Parameters[1].DefaultValue); + } + + [Fact] + // This test checks that the logic of imposing a temporary limitation on the default value being a string is in place and works as expected. + public void ItShouldThrowExceptionWhileCreatingFunctionWithDefaultValueOtherThanString() + { + string CreateYaml(object defaultValue) + { + var obj = new + { + description = "function description", + input_variables = new[] + { + new + { + name = "name", + description = "description", + @default = defaultValue, + isRequired = true + } + } + }; + + return this._serializer.Serialize(obj); + } + + // Act + Assert.Throws(() => KernelFunctionYaml.FromPromptYaml(CreateYaml(new { p1 = "v1" }))); + } + + private readonly string _yamlNoExecutionSettings = @" + template_format: semantic-kernel + template: Say hello world to {{$name}} in {{$language}} + description: Say hello to the specified person using the specified language + name: SayHello + input_variables: + - name: name + description: The name of the person to greet + default: John + - name: language + description: The language to generate the greeting in + default: English + "; + + private readonly string _yaml = @" + template_format: semantic-kernel + template: Say hello world to {{$name}} in {{$language}} + description: Say hello to the specified person using the specified language + name: SayHello + input_variables: + - name: name + description: The name of the person to greet + default: John + - name: language + description: The language to generate the greeting in + default: English + execution_settings: + service1: + model_id: gpt-4 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [] + service2: + model_id: gpt-3.5 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [ ""foo"", ""bar"", ""baz"" ] + "; + + private readonly string _yamlWithCustomSettings = @" + template_format: semantic-kernel + template: Say hello world to {{$name}} in {{$language}} + description: Say hello to the specified person using the specified language + name: SayHello + input_variables: + - name: name + description: The name of the person to greet + default: John + - name: language + description: The language to generate the greeting in + default: English + execution_settings: + service1: + model_id: gpt-4 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [] + service2: + model_id: random-model + temperaturex: 1.0 + top_q: 0.0 + rando_penalty: 0.0 + max_token_count: 256 + stop_sequences: [ ""foo"", ""bar"", ""baz"" ] + "; +} diff --git a/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsNodeDeserializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsNodeDeserializerTests.cs new file mode 100644 index 000000000000..618cadc6a7f0 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsNodeDeserializerTests.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Xunit; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace SemanticKernel.Functions.UnitTests.Yaml; + +/// +/// Tests for . +/// +public sealed class PromptExecutionSettingsNodeDeserializerTests +{ + [Fact] + public void ItShouldCreatePromptFunctionFromYamlWithCustomModelSettings() + { + // Arrange + var deserializer = new DeserializerBuilder() + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .WithNodeDeserializer(new PromptExecutionSettingsNodeDeserializer()) + .Build(); + + // Act + var semanticFunctionConfig = deserializer.Deserialize(this._yaml); + + // Assert + Assert.NotNull(semanticFunctionConfig); + Assert.Equal("SayHello", semanticFunctionConfig.Name); + Assert.Equal("Say hello to the specified person using the specified language", semanticFunctionConfig.Description); + Assert.Equal(2, semanticFunctionConfig.InputVariables.Count); + Assert.Equal("language", semanticFunctionConfig.InputVariables[1].Name); + Assert.Equal(2, semanticFunctionConfig.ExecutionSettings.Count); + Assert.Equal("gpt-4", semanticFunctionConfig.ExecutionSettings["service1"].ModelId); + Assert.Equal("gpt-3.5", semanticFunctionConfig.ExecutionSettings["service2"].ModelId); + } + + private readonly string _yaml = @" + template_format: semantic-kernel + template: Say hello world to {{$name}} in {{$language}} + description: Say hello to the specified person using the specified language + name: SayHello + input_variables: + - name: name + description: The name of the person to greet + default: John + - name: language + description: The language to generate the greeting in + default: English + execution_settings: + service1: + model_id: gpt-4 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [] + service2: + model_id: gpt-3.5 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [ ""foo"", ""bar"", ""baz"" ] +"; +} diff --git a/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj b/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj new file mode 100644 index 000000000000..cb78aea8f4fe --- /dev/null +++ b/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj @@ -0,0 +1,31 @@ + + + + + Microsoft.SemanticKernel.Yaml + $(AssemblyName) + netstandard2.0 + true + + + + + + + Semantic Kernel - Support for Yaml Function Definitions + Semantic Kernel Yaml Functions + + + + + + + + + + + + + + + diff --git a/dotnet/src/Functions/Functions.Yaml/KernelFunctionYaml.cs b/dotnet/src/Functions/Functions.Yaml/KernelFunctionYaml.cs new file mode 100644 index 000000000000..0c7039c5530f --- /dev/null +++ b/dotnet/src/Functions/Functions.Yaml/KernelFunctionYaml.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace Microsoft.SemanticKernel; + +/// +/// Factory methods for creating instances. +/// +public static class KernelFunctionYaml +{ + /// + /// Creates a instance for a prompt function using the specified markdown text. + /// + /// YAML representation of the to use to create the prompt function. + /// + /// The to use when interpreting the prompt template configuration into a . + /// If null, a default factory will be used. + /// + /// The to use for logging. If null, no logging will be performed. + /// The created . + public static KernelFunction FromPromptYaml( + string text, + IPromptTemplateFactory? promptTemplateFactory = null, + ILoggerFactory? loggerFactory = null) + { + PromptTemplateConfig promptTemplateConfig = ToPromptTemplateConfig(text); + + // Prevent the default value from being any type other than a string. + // It's a temporary limitation that helps shape the public API surface + // (changing the type of the Default property to object) now, before the release. + // This helps avoid a breaking change while a proper solution for + // dealing with the different deserialization outputs of JSON/YAML prompt configurations is being evaluated. + foreach (var inputVariable in promptTemplateConfig.InputVariables) + { + if (inputVariable.Default is not null && inputVariable.Default is not string) + { + throw new NotSupportedException($"Default value for input variable '{inputVariable.Name}' must be a string. " + + $"This is a temporary limitation; future updates are expected to remove this constraint. Prompt function - '{promptTemplateConfig.Name ?? promptTemplateConfig.Description}'."); + } + } + + return KernelFunctionFactory.CreateFromPrompt( + promptTemplateConfig, + promptTemplateFactory, + loggerFactory); + } + + /// + /// Convert the given YAML text to a model. + /// + /// YAML representation of the to use to create the prompt function. + public static PromptTemplateConfig ToPromptTemplateConfig(string text) + { + var deserializer = new DeserializerBuilder() + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .WithNodeDeserializer(new PromptExecutionSettingsNodeDeserializer()) + .Build(); + + return deserializer.Deserialize(text); + } +} diff --git a/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsNodeDeserializer.cs b/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsNodeDeserializer.cs new file mode 100644 index 000000000000..5e8269eae7e0 --- /dev/null +++ b/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsNodeDeserializer.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using YamlDotNet.Core; +using YamlDotNet.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Deserializer for . +/// +internal sealed class PromptExecutionSettingsNodeDeserializer : INodeDeserializer +{ + /// + public bool Deserialize(IParser reader, Type expectedType, Func nestedObjectDeserializer, out object? value) + { + if (expectedType != typeof(PromptExecutionSettings)) + { + value = null; + return false; + } + + var dictionary = nestedObjectDeserializer.Invoke(reader, typeof(Dictionary)); + var modelSettings = new PromptExecutionSettings(); + foreach (var kv in (Dictionary)dictionary!) + { + switch (kv.Key) + { + case "model_id": + modelSettings.ModelId = (string)kv.Value; + break; + + default: + (modelSettings.ExtensionData ??= new()).Add(kv.Key, kv.Value); + break; + } + } + + value = modelSettings; + return true; + } +} diff --git a/dotnet/src/Functions/Functions.Yaml/PromptYamlKernelExtensions.cs b/dotnet/src/Functions/Functions.Yaml/PromptYamlKernelExtensions.cs new file mode 100644 index 000000000000..c290e3e01964 --- /dev/null +++ b/dotnet/src/Functions/Functions.Yaml/PromptYamlKernelExtensions.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// Class for extensions methods to define functions using prompt YAML format. +/// +public static class PromptYamlKernelExtensions +{ + /// + /// Creates a instance for a prompt function using the specified YAML. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// YAML representation of the to use to create the prompt function + /// + /// The to use when interpreting the prompt template configuration into a . + /// If null, a default factory will be used. + /// + /// The created . + public static KernelFunction CreateFunctionFromPromptYaml( + this Kernel kernel, + string text, + IPromptTemplateFactory? promptTemplateFactory = null) + { + return KernelFunctionYaml.FromPromptYaml(text, promptTemplateFactory, kernel.LoggerFactory); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs deleted file mode 100644 index d6f6275b562e..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextCompletion/HuggingFaceTextCompletionTests.cs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.HuggingFace.TextCompletion; -using Xunit; - -namespace SemanticKernel.IntegrationTests.Connectors.HuggingFace.TextCompletion; - -/// -/// Integration tests for . -/// -public sealed class HuggingFaceTextCompletionTests -{ - private const string Endpoint = "http://localhost:5000/completions"; - private const string Model = "gpt2"; - - private readonly IConfigurationRoot _configuration; - - public HuggingFaceTextCompletionTests() - { - // Load configuration - this._configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .Build(); - } - - [Fact(Skip = "This test is for manual verification.")] - public async Task HuggingFaceLocalAndRemoteTextCompletionAsync() - { - // Arrange - const string Input = "This is test"; - - var huggingFaceLocal = new HuggingFaceTextCompletion(Model, endpoint: Endpoint); - var huggingFaceRemote = new HuggingFaceTextCompletion(Model, apiKey: this.GetApiKey()); - - // Act - var localResponse = await huggingFaceLocal.CompleteAsync(Input); - var remoteResponse = await huggingFaceRemote.CompleteAsync(Input); - - // Assert - Assert.NotNull(localResponse); - Assert.NotNull(remoteResponse); - - Assert.StartsWith(Input, localResponse, StringComparison.Ordinal); - Assert.StartsWith(Input, remoteResponse, StringComparison.Ordinal); - } - - [Fact(Skip = "This test is for manual verification.")] - public async Task RemoteHuggingFaceTextCompletionWithCustomHttpClientAsync() - { - // Arrange - const string Input = "This is test"; - - using var httpClient = new HttpClient(); - httpClient.BaseAddress = new Uri("https://api-inference.huggingface.co/models"); - - var huggingFaceRemote = new HuggingFaceTextCompletion(Model, apiKey: this.GetApiKey(), httpClient: httpClient); - - // Act - var remoteResponse = await huggingFaceRemote.CompleteAsync(Input); - - // Assert - Assert.NotNull(remoteResponse); - - Assert.StartsWith(Input, remoteResponse, StringComparison.Ordinal); - } - - private string GetApiKey() - { - return this._configuration.GetSection("HuggingFace:ApiKey").Get()!; - } -} diff --git a/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextGeneration/HuggingFaceTextGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextGeneration/HuggingFaceTextGenerationTests.cs new file mode 100644 index 000000000000..2bd87423ff66 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/HuggingFace/TextGeneration/HuggingFaceTextGenerationTests.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.Connectors.HuggingFace; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.HuggingFace.TextGeneration; + +/// +/// Integration tests for . +/// +public sealed class HuggingFaceTextGenerationTests +{ + private const string Endpoint = "http://localhost:5000/completions"; + private const string Model = "gpt2"; + + private readonly IConfigurationRoot _configuration; + + public HuggingFaceTextGenerationTests() + { + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .Build(); + } + + [Fact(Skip = "This test is for manual verification.")] + public async Task HuggingFaceLocalAndRemoteTextGenerationAsync() + { + // Arrange + const string Input = "This is test"; + + var huggingFaceLocal = new HuggingFaceTextGenerationService(Model, endpoint: Endpoint); + var huggingFaceRemote = new HuggingFaceTextGenerationService(Model, apiKey: this.GetApiKey()); + + // Act + var localResponse = await huggingFaceLocal.GetTextContentAsync(Input); + var remoteResponse = await huggingFaceRemote.GetTextContentAsync(Input); + + // Assert + Assert.NotNull(localResponse.Text); + Assert.NotNull(remoteResponse.Text); + + Assert.StartsWith(Input, localResponse.Text, StringComparison.Ordinal); + Assert.StartsWith(Input, remoteResponse.Text, StringComparison.Ordinal); + } + + [Fact(Skip = "This test is for manual verification.")] + public async Task RemoteHuggingFaceTextGenerationWithCustomHttpClientAsync() + { + // Arrange + const string Input = "This is test"; + + using var httpClient = new HttpClient(); + httpClient.BaseAddress = new Uri("https://api-inference.huggingface.co/models"); + + var huggingFaceRemote = new HuggingFaceTextGenerationService(Model, apiKey: this.GetApiKey(), httpClient: httpClient); + + // Act + var remoteResponse = await huggingFaceRemote.GetTextContentAsync(Input); + + // Assert + Assert.NotNull(remoteResponse.Text); + + Assert.StartsWith(Input, remoteResponse.Text, StringComparison.Ordinal); + } + + private string GetApiKey() + { + return this._configuration.GetSection("HuggingFace:ApiKey").Get()!; + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs index 3b7895ddb733..1d22b75c7194 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Chroma/ChromaMemoryStoreTests.cs @@ -5,12 +5,12 @@ using System.Linq; using System.Net.Http; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Chroma; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Chroma; using Microsoft.SemanticKernel.Memory; using Xunit; -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Chroma; +namespace SemanticKernel.IntegrationTests.Connectors.Chroma; /// /// Integration tests for class. @@ -118,7 +118,7 @@ public async Task ItThrowsExceptionOnNonExistentCollectionDeletionAsync() var exception = await Record.ExceptionAsync(() => this._chromaMemoryStore.DeleteCollectionAsync(collectionName)); // Assert - Assert.IsType(exception); + Assert.IsType(exception); Assert.Contains( $"Cannot delete non-existent collection {collectionName}", exception.Message, diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/DataHelper.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/DataHelper.cs new file mode 100644 index 000000000000..646cfc27c588 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/DataHelper.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Numerics.Tensors; +using Microsoft.SemanticKernel.Memory; + +namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; + +internal static class DataHelper +{ + public static MemoryRecord[] VectorSearchExpectedResults { get; } + public static MemoryRecord[] VectorSearchTestRecords { get; } + public static float[] VectorSearchTestEmbedding { get; } + + static DataHelper() + { + VectorSearchTestRecords = CreateBatchRecords(8); + VectorSearchTestEmbedding = new[] { 1, 0.699f, 0.701f }; + VectorSearchExpectedResults = VectorSearchTestRecords + .OrderByDescending(r => TensorPrimitives.CosineSimilarity(r.Embedding.Span, VectorSearchTestEmbedding)) + .ToArray(); + } + + public static MemoryRecord CreateRecord(string id) => + MemoryRecord.LocalRecord( + id: id, + text: $"text_{id}", + description: $"description_{id}", + embedding: new[] { 1.1f, 2.2f, 3.3f }, + timestamp: GetDateTime()); + + public static MemoryRecord[] CreateBatchRecords(int count) => + Enumerable + .Range(0, count) + .Select(i => MemoryRecord.LocalRecord( + id: $"test_{i}", + text: $"text_{i}", + description: $"description_{i}", + embedding: new[] { 1, (float)Math.Cos(Math.PI * i / count), (float)Math.Sin(Math.PI * i / count) }, + timestamp: GetDateTime())) + .ToArray(); + + private static DateTime GetDateTime() => + new(TimeSpan.TicksPerMillisecond * (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond), DateTimeKind.Local); +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTests.cs new file mode 100644 index 000000000000..f692c3cedd13 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTests.cs @@ -0,0 +1,430 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.MongoDB; +using Microsoft.SemanticKernel.Memory; +using MongoDB.Driver; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; + +/// +/// Integration tests of . +/// +public class MongoDBMemoryStoreTests : IClassFixture +{ + // If null, all tests will be enabled + private const string? SkipReason = "MongoDB Atlas cluster is required"; + + private readonly MongoDBMemoryStoreTestsFixture _fixture; + + public MongoDBMemoryStoreTests(MongoDBMemoryStoreTestsFixture fixture) + { + this._fixture = fixture; + } + + [Fact(Skip = SkipReason)] + public async Task ItCanCreateAndGetCollectionAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + var collectionNames = memoryStore.GetCollectionsAsync(); + + // Assert + Assert.True(await collectionNames.ContainsAsync(collectionName)); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanCheckIfCollectionExistsAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + + // Assert + Assert.True(await memoryStore.DoesCollectionExistAsync(collectionName)); + Assert.False(await memoryStore.DoesCollectionExistAsync($"{collectionName}_1")); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanDeleteCollectionsAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + var collectionExistsAfterCreation = await memoryStore.DoesCollectionExistAsync(collectionName); + await memoryStore.DeleteCollectionAsync(collectionName); + + // Assert + Assert.True(collectionExistsAfterCreation); + Assert.False(await memoryStore.DoesCollectionExistAsync(collectionName)); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanTryDeleteNonExistingCollectionAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + // Act + await memoryStore.DeleteCollectionAsync(collectionName); + + // Assert + Assert.False(await memoryStore.DoesCollectionExistAsync(collectionName)); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanBatchGetAsync() + { + // Arrange + const string Id = "test"; + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + var testRecord = DataHelper.CreateRecord(Id); + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + var upsertedId = await memoryStore.UpsertAsync(collectionName, testRecord); + + var actualNoEmbedding = await memoryStore.GetAsync(collectionName, upsertedId); + var actualWithEmbedding = await memoryStore.GetAsync(collectionName, upsertedId, true); + + // Assert + Assert.NotNull(actualNoEmbedding); + Assert.NotNull(actualWithEmbedding); + AssertMemoryRecordEqualWithoutEmbedding(testRecord, actualNoEmbedding); + AssertMemoryRecordEqual(testRecord, actualWithEmbedding); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanGetEmptyEmbeddingAsync() + { + // Arrange + const string Id = "test"; + const string CollectionName = "test_collection"; + var memoryStore = this._fixture.MemoryStore; + + var testRecord = DataHelper.CreateRecord(Id); + + // Act + await memoryStore.CreateCollectionAsync(CollectionName); + var upsertedId = await memoryStore.UpsertAsync(CollectionName, testRecord); + + var actualNoEmbedding = await memoryStore.GetAsync(CollectionName, upsertedId); + var actualWithEmbedding = await memoryStore.GetAsync(CollectionName, upsertedId, true); + + // Assert + Assert.NotNull(actualNoEmbedding); + Assert.NotNull(actualWithEmbedding); + AssertMemoryRecordEqualWithoutEmbedding(testRecord, actualNoEmbedding); + AssertMemoryRecordEqual(testRecord, actualWithEmbedding); + } + + [Theory(Skip = SkipReason)] + [InlineData(true)] + [InlineData(false)] + public async Task ItCanBatchUpsertRecordsAsync(bool withEmbeddings) + { + // Arrange + const int Count = 10; + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + var records = DataHelper.CreateBatchRecords(Count); + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + var keys = await memoryStore.UpsertBatchAsync(collectionName, records).ToListAsync(); + var actualRecords = await memoryStore.GetBatchAsync(collectionName, keys, withEmbeddings: withEmbeddings).ToListAsync(); + + // Assert + Assert.NotNull(keys); + Assert.NotNull(actualRecords); + Assert.Equal(Count, keys.Count); + Assert.Equal(Count, actualRecords.Count); + + var actualRecordsOrdered = actualRecords.OrderBy(r => r.Key).ToArray(); + for (int i = 0; i < Count; i++) + { + AssertMemoryRecordEqual(records[i], actualRecordsOrdered[i], assertEmbeddingEqual: withEmbeddings); + } + } + + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertDifferentMemoryRecordsWithSameKeyAsync() + { + // Arrange + const string Id = "test"; + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + var testRecord1 = MemoryRecord.LocalRecord( + id: Id, + text: "text1", + description: "description", + embedding: new float[] { 1, 2, 3 }); + var testRecord2 = MemoryRecord.LocalRecord( + id: Id, + text: "text2", + description: "description new", + embedding: new float[] { 1, 2, 4 }); + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + var upsertedId1 = await memoryStore.UpsertAsync(collectionName, testRecord1); + var actual1 = await memoryStore.GetAsync(collectionName, Id, true); + + var upsertedId2 = await memoryStore.UpsertAsync(collectionName, testRecord2); + var actual2 = await memoryStore.GetAsync(collectionName, Id, true); + + // Assert + Assert.NotNull(actual1); + Assert.NotNull(actual2); + AssertMemoryRecordEqual(testRecord1, actual1); + AssertMemoryRecordEqual(testRecord2, actual2); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanRemoveRecordAsync() + { + // Arrange + const string Id = "test"; + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + var testRecord = DataHelper.CreateRecord(Id); + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + await memoryStore.UpsertAsync(collectionName, testRecord); + await memoryStore.RemoveAsync(collectionName, Id); + + // Assert + var actual = await memoryStore.GetAsync(collectionName, Id); + Assert.Null(actual); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanTryRemovingNonExistingRecordAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + await memoryStore.RemoveAsync(collectionName, "key"); + + var actual = await memoryStore.GetAsync(collectionName, "key"); + + // Assert + Assert.Null(actual); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanBatchRemoveRecordsAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + var testRecords = DataHelper.CreateBatchRecords(10); + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + var ids = await memoryStore.UpsertBatchAsync(collectionName, testRecords).ToListAsync(); + await memoryStore.RemoveBatchAsync(collectionName, ids); + + // Assert + var actual = await memoryStore.GetBatchAsync(collectionName, ids).ToListAsync(); + Assert.Empty(actual); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanTryBatchRemovingNonExistingRecordsAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + var ids = new[] { "a", "b", "c" }; + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + await memoryStore.RemoveBatchAsync(collectionName, ids); + + // Assert + var actual = await memoryStore.GetBatchAsync(collectionName, ids).ToListAsync(); + Assert.Empty(actual); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanTryBatchRemovingMixedExistingAndNonExistingRecordsAsync() + { + // Arrange + var collectionName = GetRandomName(); + var memoryStore = this._fixture.MemoryStore; + var testRecords = DataHelper.CreateBatchRecords(10); + var ids = testRecords.Select(t => t.Metadata.Id).Concat(new[] { "a", "b", "c" }).ToArray(); + + // Act + await memoryStore.CreateCollectionAsync(collectionName); + await memoryStore.RemoveBatchAsync(collectionName, ids); + + // Assert + var actual = await memoryStore.GetBatchAsync(collectionName, ids).ToListAsync(); + Assert.Empty(actual); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanListAllDatabaseCollectionsAsync() + { + // Arrange + var memoryStore = this._fixture.ListCollectionsMemoryStore; + var testCollections = new[] { "collection1", "collection2", "collection3" }; + foreach (var collection in testCollections) + { + await memoryStore.CreateCollectionAsync(collection); + } + + // Act + var actualCollections = await memoryStore.GetCollectionsAsync().ToListAsync(); + actualCollections?.Sort(); + + // Assert + foreach (var collection in testCollections) + { + Assert.True(await memoryStore.DoesCollectionExistAsync(collection)); + } + + Assert.NotNull(actualCollections); + Assert.True(testCollections.SequenceEqual(actualCollections)); + } + + [Theory(Skip = SkipReason)] + [InlineData(true)] + [InlineData(false)] + public async Task ItCanGetNearestMatchAsync(bool withEmbedding) + { + // Arrange + var collectionName = this._fixture.VectorSearchCollectionName; + var memoryStore = this._fixture.VectorSearchMemoryStore; + var searchEmbedding = DataHelper.VectorSearchTestEmbedding; + var nearestMatchExpected = DataHelper.VectorSearchExpectedResults[0]; + + // Act + var nearestMatch = await memoryStore.GetNearestMatchAsync(collectionName, searchEmbedding, withEmbedding: withEmbedding); + + // Assert + Assert.NotNull(nearestMatch); + + var actual = nearestMatch.Value.Item1; + Assert.NotNull(actual); + Assert.InRange(nearestMatch.Value.Item2, 0.9999, 1); + AssertMemoryRecordEqual(nearestMatchExpected, actual, assertEmbeddingEqual: withEmbedding); + } + + [Theory(Skip = SkipReason)] + [InlineData(1, false)] + [InlineData(1, true)] + [InlineData(5, false)] + [InlineData(8, false)] + public async Task ItCanGetNearestMatchesAsync(int limit, bool withEmbeddings) + { + // Arrange + var collectionName = this._fixture.VectorSearchCollectionName; + var memoryStore = this._fixture.VectorSearchMemoryStore; + var searchEmbedding = DataHelper.VectorSearchTestEmbedding; + var nearestMatchesExpected = DataHelper.VectorSearchExpectedResults; + + // Act + var nearestMatchesActual = await memoryStore.GetNearestMatchesAsync( + collectionName, + searchEmbedding, + limit, + withEmbeddings: withEmbeddings) + .ToListAsync(); + + // Assert + Assert.NotNull(nearestMatchesActual); + Assert.Equal(nearestMatchesActual.Count, limit); + + for (int i = 0; i < limit; i++) + { + AssertMemoryRecordEqual(nearestMatchesExpected[i], nearestMatchesActual[i].Item1, withEmbeddings); + } + } + + [Theory(Skip = SkipReason)] + [InlineData(0.999, 1, false)] + [InlineData(0.847, 5, false)] + [InlineData(0.847, 5, true)] + [InlineData(0.111, 8, false)] + public async Task ItCanGetNearestMatchesFilteredByMinScoreAsync(double minScore, int expectedCount, bool withEmbeddings) + { + // Arrange + var collectionName = this._fixture.VectorSearchCollectionName; + var memoryStore = this._fixture.VectorSearchMemoryStore; + var searchEmbedding = DataHelper.VectorSearchTestEmbedding; + var nearestMatchesExpected = DataHelper.VectorSearchExpectedResults; + + // Act + var nearestMatchesActual = await memoryStore.GetNearestMatchesAsync( + collectionName, + searchEmbedding, + 100, + minScore, + withEmbeddings: withEmbeddings) + .ToListAsync(); + + // Assert + Assert.NotNull(nearestMatchesActual); + Assert.Equal(nearestMatchesActual.Count, expectedCount); + + for (int i = 0; i < expectedCount; i++) + { + AssertMemoryRecordEqual(nearestMatchesExpected[i], nearestMatchesActual[i].Item1, withEmbeddings); + } + } + + #region private ================================================================================ + + private static void AssertMemoryRecordEqualWithoutEmbedding(MemoryRecord expectedRecord, MemoryRecord actualRecord) => + AssertMemoryRecordEqual(expectedRecord, actualRecord, false); + + private static void AssertMemoryRecordEqual(MemoryRecord expectedRecord, MemoryRecord actualRecord, bool assertEmbeddingEqual = true) + { + Assert.Equal(expectedRecord.Key, actualRecord.Key); + Assert.Equal(expectedRecord.Timestamp, actualRecord.Timestamp); + Assert.Equal(expectedRecord.Metadata.Id, actualRecord.Metadata.Id); + Assert.Equal(expectedRecord.Metadata.Text, actualRecord.Metadata.Text); + Assert.Equal(expectedRecord.Metadata.Description, actualRecord.Metadata.Description); + Assert.Equal(expectedRecord.Metadata.AdditionalMetadata, actualRecord.Metadata.AdditionalMetadata); + Assert.Equal(expectedRecord.Metadata.IsReference, actualRecord.Metadata.IsReference); + Assert.Equal(expectedRecord.Metadata.ExternalSourceName, actualRecord.Metadata.ExternalSourceName); + + if (assertEmbeddingEqual) + { + Assert.True(expectedRecord.Embedding.Span.SequenceEqual(actualRecord.Embedding.Span)); + } + else + { + Assert.True(actualRecord.Embedding.Span.IsEmpty); + } + } + + private static string GetRandomName() => $"test_{Guid.NewGuid():N}"; + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs new file mode 100644 index 000000000000..b82bdb9fced4 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/MongoDB/MongoDBMemoryStoreTestsFixture.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.Connectors.MongoDB; +using MongoDB.Driver; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.MongoDB; + +public class MongoDBMemoryStoreTestsFixture : IAsyncLifetime +{ +#pragma warning disable CA1859 // Use concrete types when possible for improved performance + private readonly IMongoClient _mongoClient = null!; +#pragma warning restore CA1859 // Use concrete types when possible for improved performance + + public string DatabaseTestName { get; } + public string ListCollectionsDatabaseTestName { get; } + public string VectorSearchCollectionName { get; } + + public MongoDBMemoryStore MemoryStore { get; } + public MongoDBMemoryStore ListCollectionsMemoryStore { get; } + public MongoDBMemoryStore VectorSearchMemoryStore { get; } + + public MongoDBMemoryStoreTestsFixture() + { + // Load configuration + var configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .Build(); + + var connectionString = GetSetting(configuration, "ConnectionString"); + var vectorSearchCollection = GetSetting(configuration, "VectorSearchCollection"); + + var vectorSearchCollectionNamespace = CollectionNamespace.FromFullName(vectorSearchCollection); + this.VectorSearchCollectionName = vectorSearchCollectionNamespace.CollectionName; + + var mongoClientSettings = MongoClientSettings.FromConnectionString(connectionString); + mongoClientSettings.ApplicationName = GetRandomName(); + + this.DatabaseTestName = "dotnetMSKIntegrationTests1"; + this.ListCollectionsDatabaseTestName = "dotnetMSKIntegrationTests2"; + + this._mongoClient = new MongoClient(mongoClientSettings); + this.MemoryStore = new MongoDBMemoryStore(this._mongoClient, this.DatabaseTestName); + this.VectorSearchMemoryStore = new MongoDBMemoryStore(this._mongoClient, vectorSearchCollectionNamespace.DatabaseNamespace.DatabaseName); + this.ListCollectionsMemoryStore = new MongoDBMemoryStore(this._mongoClient, this.ListCollectionsDatabaseTestName); + } + + public async Task InitializeAsync() + { + await this.VectorSearchMemoryStore.UpsertBatchAsync(this.VectorSearchCollectionName, DataHelper.VectorSearchTestRecords).ToListAsync(); + } + + public async Task DisposeAsync() + { + await this.DropAllCollectionsAsync(this.DatabaseTestName); + await this.DropAllCollectionsAsync(this.ListCollectionsDatabaseTestName); + + this.MemoryStore.Dispose(); + this.VectorSearchMemoryStore.Dispose(); + } + + #region private ================================================================================ + + private async Task DropAllCollectionsAsync(string databaseName) + { + var database = this._mongoClient.GetDatabase(databaseName); + var allCollectionCursor = await database.ListCollectionNamesAsync(); + var allCollectionNames = await allCollectionCursor.ToListAsync(); + + foreach (var collectionName in allCollectionNames) + { + await database.DropCollectionAsync(collectionName); + } + } + + private static string GetSetting(IConfigurationRoot configuration, string settingName) + { + var settingValue = configuration[$"MongoDB:{settingName}"]; + if (string.IsNullOrWhiteSpace(settingValue)) + { + throw new ArgumentNullException($"{settingValue} string is not configured"); + } + + return settingValue; + } + + private static string GetRandomName() => $"test_{Guid.NewGuid():N}"; + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs index 916e7e44f0c1..6435dc67da69 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Postgres/PostgresMemoryStoreTests.cs @@ -6,13 +6,12 @@ using System.Linq; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; -using Microsoft.SemanticKernel.Connectors.Memory.Postgres; +using Microsoft.SemanticKernel.Connectors.Postgres; using Microsoft.SemanticKernel.Memory; using Npgsql; -using Pgvector.Npgsql; using Xunit; -namespace SemanticKernel.IntegrationTests.Connectors.Memory.Postgres; +namespace SemanticKernel.IntegrationTests.Connectors.Postgres; /// /// Integration tests of . diff --git a/dotnet/src/IntegrationTests/Connectors/Milvus/MilvusMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Milvus/MilvusMemoryStoreTests.cs index db4c1d121c03..af3479fb8c9d 100644 --- a/dotnet/src/IntegrationTests/Connectors/Milvus/MilvusMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Milvus/MilvusMemoryStoreTests.cs @@ -4,11 +4,11 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Milvus; +using Microsoft.SemanticKernel.Connectors.Milvus; using Microsoft.SemanticKernel.Memory; using Xunit; -namespace SemanticKernel.IntegrationTests.Connectors.Milvus; +namespace SemanticKernel.IntegrationTests.Milvus; public class MilvusMemoryStoreTests : IAsyncLifetime { diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/AzureOpenAICompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/AzureOpenAICompletionTests.cs deleted file mode 100644 index 21eff7a0f286..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/AzureOpenAICompletionTests.cs +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Configuration; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Reliability.Basic; -using SemanticKernel.IntegrationTests.TestSettings; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -public sealed class AzureOpenAICompletionTests : IDisposable -{ - private readonly IConfigurationRoot _configuration; - private readonly XunitLogger _logger; - private readonly RedirectOutput _testOutputHelper; - - public AzureOpenAICompletionTests(ITestOutputHelper output) - { - this._logger = new XunitLogger(output); - this._testOutputHelper = new RedirectOutput(output); - Console.SetOut(this._testOutputHelper); - - // Load configuration - this._configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - } - - [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?")] - public async Task AzureOpenAIChatNoHttpRetryPolicyTestShouldThrowAsync(string prompt) - { - // Arrange - var configuration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(configuration); - - var httpRetryConfig = new BasicRetryConfig { MaxRetryCount = 0 }; - BasicHttpRetryHandlerFactory defaultHttpRetryHandlerFactory = new(httpRetryConfig); - - var target = new KernelBuilder() - .WithLoggerFactory(this._logger) - .WithAzureChatCompletionService(configuration.ChatDeploymentName!, configuration.Endpoint, configuration.ApiKey) - .WithHttpHandlerFactory(defaultHttpRetryHandlerFactory) - .Build(); - - // Act - var func = target.CreateSemanticFunction(prompt); - - var exception = await Assert.ThrowsAsync(() => func.InvokeAsync(string.Empty, target, requestSettings: new OpenAIRequestSettings() { MaxTokens = 1000000, Temperature = 0.5, TopP = 0.5 })); - - // Assert - Assert.NotNull(exception); - } - - [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?")] - public async Task AzureOpenAIChatNoHttpRetryPolicyCustomClientShouldThrowAsync(string prompt) - { - // Arrange - var configuration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(configuration); - - var clientOptions = new OpenAIClientOptions(); - clientOptions.Retry.MaxRetries = 0; - clientOptions.Retry.NetworkTimeout = TimeSpan.FromSeconds(10); - - var openAIClient = new OpenAIClient(new Uri(configuration.Endpoint), new AzureKeyCredential(configuration.ApiKey), clientOptions); - - var target = new KernelBuilder() - .WithLoggerFactory(this._logger) - .WithAzureChatCompletionService(configuration.ChatDeploymentName!, openAIClient) - .Build(); - - // Act - var func = target.CreateSemanticFunction(prompt); - - var exception = await Assert.ThrowsAsync(() => func.InvokeAsync(string.Empty, target, requestSettings: new OpenAIRequestSettings() { MaxTokens = 1000000, Temperature = 0.5, TopP = 0.5 })); - - // Assert - Assert.NotNull(exception); - } - - public void Dispose() - { - this.Dispose(true); - GC.SuppressFinalize(this); - } - - ~AzureOpenAICompletionTests() - { - this.Dispose(false); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); - } - } -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs new file mode 100644 index 000000000000..220fea717fef --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs @@ -0,0 +1,162 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Linq; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +public sealed class ChatHistoryTests : IDisposable +{ + private readonly IKernelBuilder _kernelBuilder; + private readonly XunitLogger _logger; + private readonly RedirectOutput _testOutputHelper; + private readonly IConfigurationRoot _configuration; + private static readonly JsonSerializerOptions s_jsonOptionsCache = new() { WriteIndented = true }; + public ChatHistoryTests(ITestOutputHelper output) + { + this._logger = new XunitLogger(output); + this._testOutputHelper = new RedirectOutput(output); + Console.SetOut(this._testOutputHelper); + + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + this._kernelBuilder = Kernel.CreateBuilder(); + } + + [Fact] + public async Task ItSerializesAndDeserializesChatHistoryAsync() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + this.ConfigureAzureOpenAIChatAsText(builder); + builder.Plugins.AddFromType(); + var kernel = builder.Build(); + + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + ChatHistory history = new(); + + // Act + history.AddUserMessage("Make me a special poem"); + var historyBeforeJson = JsonSerializer.Serialize(history.ToList(), s_jsonOptionsCache); + var service = kernel.GetRequiredService(); + ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); + history.AddUserMessage("Ok thank you"); + + ChatMessageContent resultOriginalWorking = await service.GetChatMessageContentAsync(history, settings, kernel); + var historyJson = JsonSerializer.Serialize(history, s_jsonOptionsCache); + var historyAfterSerialization = JsonSerializer.Deserialize(historyJson); + var exception = await Record.ExceptionAsync(() => service.GetChatMessageContentAsync(historyAfterSerialization!, settings, kernel)); + + // Assert + Assert.Null(exception); + } + + [Fact] + public async Task ItUsesChatSystemPromptFromSettingsAsync() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + this.ConfigureAzureOpenAIChatAsText(builder); + builder.Plugins.AddFromType(); + var kernel = builder.Build(); + + string systemPrompt = "You are batman. If asked who you are, say 'I am Batman!'"; + + OpenAIPromptExecutionSettings settings = new() { ChatSystemPrompt = systemPrompt }; + ChatHistory history = new(); + + // Act + history.AddUserMessage("Who are you?"); + var service = kernel.GetRequiredService(); + ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); + + // Assert + Assert.Contains("Batman", result.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ItUsesChatSystemPromptFromChatHistoryAsync() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + this.ConfigureAzureOpenAIChatAsText(builder); + builder.Plugins.AddFromType(); + var kernel = builder.Build(); + + string systemPrompt = "You are batman. If asked who you are, say 'I am Batman!'"; + + OpenAIPromptExecutionSettings settings = new(); + ChatHistory history = new(systemPrompt); + + // Act + history.AddUserMessage("Who are you?"); + var service = kernel.GetRequiredService(); + ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); + + // Assert + Assert.Contains("Batman", result.ToString(), StringComparison.OrdinalIgnoreCase); + } + + private void ConfigureAzureOpenAIChatAsText(IKernelBuilder kernelBuilder) + { + var azureOpenAIConfiguration = this._configuration.GetSection("Planners:AzureOpenAI").Get(); + + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.ApiKey); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + Assert.NotNull(azureOpenAIConfiguration.ServiceId); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey, + serviceId: azureOpenAIConfiguration.ServiceId); + } + + public class FakePlugin + { + [KernelFunction, Description("creates a special poem")] + public string CreateSpecialPoem() + { + return "ABCDE"; + } + } + + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + this._logger.Dispose(); + this._testOutputHelper.Dispose(); + } + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs index f0a6f607f565..8cf10ece9c88 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs @@ -4,14 +4,15 @@ using System.Collections.Generic; using System.Linq; using System.Net; +using System.Text; +using System.Text.Json; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; +using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Reliability.Basic; -using Microsoft.SemanticKernel.TemplateEngine; +using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; using Xunit.Abstractions; @@ -22,7 +23,8 @@ namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; public sealed class OpenAICompletionTests : IDisposable { - private readonly KernelBuilder _kernelBuilder; + private const string InputParameterName = "input"; + private readonly IKernelBuilder _kernelBuilder; private readonly IConfigurationRoot _configuration; public OpenAICompletionTests(ITestOutputHelper output) @@ -39,8 +41,7 @@ public OpenAICompletionTests(ITestOutputHelper output) .AddUserSecrets() .Build(); - this._kernelBuilder = new KernelBuilder(); - this._kernelBuilder.WithRetryBasic(); + this._kernelBuilder = Kernel.CreateBuilder(); } [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] @@ -51,19 +52,18 @@ public async Task OpenAITestAsync(string prompt, string expectedAnswerContains) var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); Assert.NotNull(openAIConfiguration); - IKernel target = this._kernelBuilder - .WithLoggerFactory(this._logger) - .WithOpenAITextCompletionService( + this._kernelBuilder.Services.AddSingleton(this._logger); + Kernel target = this._kernelBuilder + .AddOpenAITextGeneration( serviceId: openAIConfiguration.ServiceId, modelId: openAIConfiguration.ModelId, - apiKey: openAIConfiguration.ApiKey, - setAsDefault: true) + apiKey: openAIConfiguration.ApiKey) .Build(); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); // Act - KernelResult actual = await target.RunAsync(prompt, plugins["Chat"]); + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); // Assert Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); @@ -74,48 +74,84 @@ public async Task OpenAITestAsync(string prompt, string expectedAnswerContains) public async Task OpenAIChatAsTextTestAsync(string prompt, string expectedAnswerContains) { // Arrange - KernelBuilder builder = this._kernelBuilder.WithLoggerFactory(this._logger); + this._kernelBuilder.Services.AddSingleton(this._logger); + IKernelBuilder builder = this._kernelBuilder; this.ConfigureChatOpenAI(builder); - IKernel target = builder.Build(); + Kernel target = builder.Build(); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); // Act - KernelResult actual = await target.RunAsync(prompt, plugins["Chat"]); + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); // Assert Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); } [Fact(Skip = "Skipping while we investigate issue with GitHub actions.")] - public async Task CanUseOpenAiChatForTextCompletionAsync() + public async Task CanUseOpenAiChatForTextGenerationAsync() { - // Note: we use OpenAi Chat Completion and GPT 3.5 Turbo - KernelBuilder builder = this._kernelBuilder.WithLoggerFactory(this._logger); + // Note: we use OpenAI Chat Completion and GPT 3.5 Turbo + this._kernelBuilder.Services.AddSingleton(this._logger); + IKernelBuilder builder = this._kernelBuilder; this.ConfigureChatOpenAI(builder); - IKernel target = builder.Build(); + Kernel target = builder.Build(); - var func = target.CreateSemanticFunction( + var func = target.CreateFunctionFromPrompt( "List the two planets after '{{$input}}', excluding moons, using bullet points.", - new OpenAIRequestSettings()); + new OpenAIPromptExecutionSettings()); - var result = await func.InvokeAsync("Jupiter", target); + var result = await func.InvokeAsync(target, new() { [InputParameterName] = "Jupiter" }); Assert.NotNull(result); Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); } + [Theory] + [InlineData(false, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + [InlineData(true, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + public async Task AzureOpenAIStreamingTestAsync(bool useChatModel, string prompt, string expectedAnswerContains) + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + + if (useChatModel) + { + this.ConfigureAzureOpenAIChatAsText(builder); + } + else + { + this.ConfigureAzureOpenAI(builder); + } + + Kernel target = builder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + + StringBuilder fullResult = new(); + // Act + await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + fullResult.Append(content); + }; + + // Assert + Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + [Theory] [InlineData(false, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] [InlineData(true, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] public async Task AzureOpenAITestAsync(bool useChatModel, string prompt, string expectedAnswerContains) { // Arrange - var builder = this._kernelBuilder.WithLoggerFactory(this._logger); + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; if (useChatModel) { @@ -126,12 +162,12 @@ public async Task AzureOpenAITestAsync(bool useChatModel, string prompt, string this.ConfigureAzureOpenAI(builder); } - IKernel target = builder.Build(); + Kernel target = builder.Build(); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); // Act - KernelResult actual = await target.RunAsync(prompt, plugins["Chat"]); + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); // Assert Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); @@ -139,30 +175,32 @@ public async Task AzureOpenAITestAsync(bool useChatModel, string prompt, string // If the test fails, please note that SK retry logic may not be fully integrated into the underlying code using Azure SDK [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", - "Error executing action [attempt 1 of 1]. Reason: Unauthorized. Will retry after 2000ms")] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Resilience event occurred")] public async Task OpenAIHttpRetryPolicyTestAsync(string prompt, string expectedOutput) { - // Arrange - var retryConfig = new BasicRetryConfig(); - retryConfig.RetryableStatusCodes.Add(HttpStatusCode.Unauthorized); - OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); Assert.NotNull(openAIConfiguration); - IKernel target = this._kernelBuilder - .WithLoggerFactory(this._testOutputHelper) - .WithRetryBasic(retryConfig) - .WithOpenAITextCompletionService( + this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); + this._kernelBuilder + .AddOpenAITextGeneration( serviceId: openAIConfiguration.ServiceId, modelId: openAIConfiguration.ModelId, - apiKey: "INVALID_KEY") // Use an invalid API key to force a 401 Unauthorized response - .Build(); + apiKey: "INVALID_KEY"); // Use an invalid API key to force a 401 Unauthorized response + this._kernelBuilder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + }); + }); + Kernel target = this._kernelBuilder.Build(); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); // Act - await Assert.ThrowsAsync(() => target.RunAsync(prompt, plugins["Summarize"])); + await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); // Assert Assert.Contains(expectedOutput, this._testOutputHelper.GetLogs(), StringComparison.OrdinalIgnoreCase); @@ -170,38 +208,80 @@ public async Task OpenAIHttpRetryPolicyTestAsync(string prompt, string expectedO // If the test fails, please note that SK retry logic may not be fully integrated into the underlying code using Azure SDK [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", - "Error executing action [attempt 1 of 1]. Reason: Unauthorized. Will retry after 2000ms")] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Resilience event occurred")] public async Task AzureOpenAIHttpRetryPolicyTestAsync(string prompt, string expectedOutput) { - // Arrange - var retryConfig = new BasicRetryConfig(); - retryConfig.RetryableStatusCodes.Add(HttpStatusCode.Unauthorized); - - KernelBuilder builder = this._kernelBuilder - .WithLoggerFactory(this._testOutputHelper) - .WithRetryBasic(retryConfig); + this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); + IKernelBuilder builder = this._kernelBuilder; var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); // Use an invalid API key to force a 401 Unauthorized response - builder.WithAzureTextCompletionService( + builder.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, + modelId: azureOpenAIConfiguration.ModelId, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: "INVALID_KEY"); - IKernel target = builder.Build(); + builder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + }); + }); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + Kernel target = builder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); // Act - await Assert.ThrowsAsync(() => target.RunAsync(prompt, plugins["Summarize"])); + await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); // Assert Assert.Contains(expectedOutput, this._testOutputHelper.GetLogs(), StringComparison.OrdinalIgnoreCase); } + [Theory] + [InlineData(false)] + [InlineData(true)] + public async Task AzureOpenAIShouldReturnTokenUsageInMetadataAsync(bool useChatModel) + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + + if (useChatModel) + { + this.ConfigureAzureOpenAIChatAsText(builder); + } + else + { + this.ConfigureAzureOpenAI(builder); + } + + Kernel target = builder.Build(); + + IReadOnlyKernelPluginCollection plugin = TestHelpers.ImportSamplePlugins(target, "FunPlugin"); + + // Act and Assert + FunctionResult result = await target.InvokeAsync(plugin["FunPlugin"]["Limerick"]); + + Assert.NotNull(result.Metadata); + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("PromptTokens", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + Assert.True(jsonObject.TryGetProperty("CompletionTokens", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + } + [Fact] public async Task OpenAIHttpInvalidKeyShouldReturnErrorDetailAsync() { @@ -210,17 +290,18 @@ public async Task OpenAIHttpInvalidKeyShouldReturnErrorDetailAsync() Assert.NotNull(openAIConfiguration); // Use an invalid API key to force a 401 Unauthorized response - IKernel target = this._kernelBuilder - .WithOpenAITextCompletionService( + this._kernelBuilder.Services.AddSingleton(this._logger); + Kernel target = this._kernelBuilder + .AddOpenAITextGeneration( modelId: openAIConfiguration.ModelId, apiKey: "INVALID_KEY", serviceId: openAIConfiguration.ServiceId) .Build(); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); // Act and Assert - var ex = await Assert.ThrowsAsync(() => target.RunAsync("Any", plugins["Summarize"])); + var ex = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = "Any" })); Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)ex).StatusCode); } @@ -232,19 +313,20 @@ public async Task AzureOpenAIHttpInvalidKeyShouldReturnErrorDetailAsync() var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); - IKernel target = this._kernelBuilder - .WithLoggerFactory(this._testOutputHelper) - .WithAzureTextCompletionService( + this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); + Kernel target = this._kernelBuilder + .AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, + modelId: azureOpenAIConfiguration.ModelId, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: "INVALID_KEY", serviceId: azureOpenAIConfiguration.ServiceId) .Build(); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); // Act and Assert - var ex = await Assert.ThrowsAsync(() => target.RunAsync("Any", plugins["Summarize"])); + var ex = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = "Any" })); Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)ex).StatusCode); } @@ -256,20 +338,21 @@ public async Task AzureOpenAIHttpExceededMaxTokensShouldReturnErrorDetailAsync() Assert.NotNull(azureOpenAIConfiguration); // Arrange - IKernel target = this._kernelBuilder - .WithLoggerFactory(this._testOutputHelper) - .WithAzureTextCompletionService( + this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); + Kernel target = this._kernelBuilder + .AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, + modelId: azureOpenAIConfiguration.ModelId, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey, serviceId: azureOpenAIConfiguration.ServiceId) .Build(); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); // Act // Assert - await Assert.ThrowsAsync(() => plugins["Summarize"].InvokeAsync(string.Join('.', Enumerable.Range(1, 40000)), target)); + await Assert.ThrowsAsync(() => plugins["SummarizePlugin"]["Summarize"].InvokeAsync(target, new() { [InputParameterName] = string.Join('.', Enumerable.Range(1, 40000)) })); } [Theory(Skip = "This test is for manual verification.")] @@ -287,14 +370,15 @@ public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding, AIS const string ExpectedAnswerContains = "John"; - IKernel target = this._kernelBuilder.WithLoggerFactory(this._logger).Build(); + this._kernelBuilder.Services.AddSingleton(this._logger); + Kernel target = this._kernelBuilder.Build(); this._serviceConfiguration[service](target); - IDictionary plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); // Act - KernelResult actual = await target.RunAsync(prompt, plugins["Chat"]); + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); // Assert Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); @@ -304,14 +388,15 @@ public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding, AIS public async Task AzureOpenAIInvokePromptTestAsync() { // Arrange - var builder = this._kernelBuilder.WithLoggerFactory(this._logger); + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; this.ConfigureAzureOpenAI(builder); - IKernel target = builder.Build(); + Kernel target = builder.Build(); var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; // Act - KernelResult actual = await target.InvokeSemanticFunctionAsync(prompt, new OpenAIRequestSettings() { MaxTokens = 150 }); + FunctionResult actual = await target.InvokePromptAsync(prompt, new(new OpenAIPromptExecutionSettings() { MaxTokens = 150 })); // Assert Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase); @@ -321,14 +406,15 @@ public async Task AzureOpenAIInvokePromptTestAsync() public async Task AzureOpenAIDefaultValueTestAsync() { // Arrange - var builder = this._kernelBuilder.WithLoggerFactory(this._logger); + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; this.ConfigureAzureOpenAI(builder); - IKernel target = builder.Build(); + Kernel target = builder.Build(); - IDictionary plugin = TestHelpers.ImportSamplePlugins(target, "FunPlugin"); + IReadOnlyKernelPluginCollection plugin = TestHelpers.ImportSamplePlugins(target, "FunPlugin"); // Act - KernelResult actual = await target.RunAsync(plugin["Limerick"]); + FunctionResult actual = await target.InvokeAsync(plugin["FunPlugin"]["Limerick"]); // Assert Assert.Contains("Bob", actual.GetValue(), StringComparison.OrdinalIgnoreCase); @@ -338,47 +424,62 @@ public async Task AzureOpenAIDefaultValueTestAsync() public async Task MultipleServiceLoadPromptConfigTestAsync() { // Arrange - var builder = this._kernelBuilder.WithLoggerFactory(this._logger); + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; this.ConfigureAzureOpenAI(builder); this.ConfigureInvalidAzureOpenAI(builder); - IKernel target = builder.Build(); + Kernel target = builder.Build(); var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; - var defaultConfig = new PromptTemplateConfig(); - var azureConfig = PromptTemplateConfig.FromJson( - @" - { - ""completion"": { - ""max_tokens"": 256, - ""service_id"": ""azure-text-davinci-003"" + var defaultPromptModel = new PromptTemplateConfig(prompt) { Name = "FishMarket1" }; + var azurePromptModel = PromptTemplateConfig.FromJson( + @"{ + ""name"": ""FishMarket2"", + ""execution_settings"": { + ""azure-text-davinci-003"": { + ""max_tokens"": 256 + } } }"); + azurePromptModel.Template = prompt; - var defaultFunc = target.RegisterSemanticFunction( - "WherePlugin", "FishMarket1", - defaultConfig, - new PromptTemplate(prompt, defaultConfig, target.PromptTemplateEngine)); - var azureFunc = target.RegisterSemanticFunction( - "WherePlugin", "FishMarket2", - azureConfig, - new PromptTemplate(prompt, azureConfig, target.PromptTemplateEngine)); + var defaultFunc = target.CreateFunctionFromPrompt(defaultPromptModel); + var azureFunc = target.CreateFunctionFromPrompt(azurePromptModel); // Act - await Assert.ThrowsAsync(() => target.RunAsync(defaultFunc)); + await Assert.ThrowsAsync(() => target.InvokeAsync(defaultFunc)); - KernelResult azureResult = await target.RunAsync(azureFunc); + FunctionResult azureResult = await target.InvokeAsync(azureFunc); // Assert Assert.Contains("Pike Place", azureResult.GetValue(), StringComparison.OrdinalIgnoreCase); } + [Fact] + public async Task ChatSystemPromptIsNotIgnoredAsync() + { + // Arrange + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + this.ConfigureAzureOpenAIChatAsText(builder); + Kernel target = builder.Build(); + + // Act + var result = await target.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings)); + + // Assert + Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase); + } + #region internals private readonly XunitLogger _logger; private readonly RedirectOutput _testOutputHelper; - private readonly Dictionary> _serviceConfiguration = new(); + private readonly Dictionary> _serviceConfiguration = new(); public void Dispose() { @@ -400,7 +501,7 @@ private void Dispose(bool disposing) } } - private void ConfigureChatOpenAI(KernelBuilder kernelBuilder) + private void ConfigureChatOpenAI(IKernelBuilder kernelBuilder) { var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); @@ -409,14 +510,13 @@ private void ConfigureChatOpenAI(KernelBuilder kernelBuilder) Assert.NotNull(openAIConfiguration.ApiKey); Assert.NotNull(openAIConfiguration.ServiceId); - kernelBuilder.WithOpenAIChatCompletionService( + kernelBuilder.AddOpenAIChatCompletion( modelId: openAIConfiguration.ChatModelId, apiKey: openAIConfiguration.ApiKey, - serviceId: openAIConfiguration.ServiceId, - setAsDefault: true); + serviceId: openAIConfiguration.ServiceId); } - private void ConfigureAzureOpenAI(KernelBuilder kernelBuilder) + private void ConfigureAzureOpenAI(IKernelBuilder kernelBuilder) { var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); @@ -426,14 +526,14 @@ private void ConfigureAzureOpenAI(KernelBuilder kernelBuilder) Assert.NotNull(azureOpenAIConfiguration.ApiKey); Assert.NotNull(azureOpenAIConfiguration.ServiceId); - kernelBuilder.WithAzureTextCompletionService( + kernelBuilder.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, + modelId: azureOpenAIConfiguration.ModelId, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId, - setAsDefault: true); + serviceId: azureOpenAIConfiguration.ServiceId); } - private void ConfigureInvalidAzureOpenAI(KernelBuilder kernelBuilder) + private void ConfigureInvalidAzureOpenAI(IKernelBuilder kernelBuilder) { var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); @@ -441,15 +541,15 @@ private void ConfigureInvalidAzureOpenAI(KernelBuilder kernelBuilder) Assert.NotNull(azureOpenAIConfiguration.DeploymentName); Assert.NotNull(azureOpenAIConfiguration.Endpoint); - kernelBuilder.WithAzureTextCompletionService( + kernelBuilder.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, + modelId: azureOpenAIConfiguration.ModelId, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: "invalid-api-key", - serviceId: $"invalid-{azureOpenAIConfiguration.ServiceId}", - setAsDefault: true); + serviceId: $"invalid-{azureOpenAIConfiguration.ServiceId}"); } - private void ConfigureAzureOpenAIChatAsText(KernelBuilder kernelBuilder) + private void ConfigureAzureOpenAIChatAsText(IKernelBuilder kernelBuilder) { var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); @@ -459,8 +559,9 @@ private void ConfigureAzureOpenAIChatAsText(KernelBuilder kernelBuilder) Assert.NotNull(azureOpenAIConfiguration.Endpoint); Assert.NotNull(azureOpenAIConfiguration.ServiceId); - kernelBuilder.WithAzureChatCompletionService( + kernelBuilder.AddAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey, serviceId: azureOpenAIConfiguration.ServiceId); diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs index 83fb12cf4163..f325dcef3a92 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs @@ -4,8 +4,8 @@ using System.Collections.Generic; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; using Xunit.Abstractions; @@ -39,7 +39,7 @@ public async Task OpenAITestAsync(string testInputString) OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAIEmbeddings").Get(); Assert.NotNull(openAIConfiguration); - var embeddingGenerator = new OpenAITextEmbeddingGeneration(openAIConfiguration.ModelId, openAIConfiguration.ApiKey); + var embeddingGenerator = new OpenAITextEmbeddingGenerationService(openAIConfiguration.ModelId, openAIConfiguration.ApiKey); // Act var singleResult = await embeddingGenerator.GenerateEmbeddingAsync(testInputString); @@ -58,7 +58,7 @@ public async Task AzureOpenAITestAsync(string testInputString) AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); Assert.NotNull(azureOpenAIConfiguration); - var embeddingGenerator = new AzureTextEmbeddingGeneration(azureOpenAIConfiguration.DeploymentName, + var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService(azureOpenAIConfiguration.DeploymentName, azureOpenAIConfiguration.Endpoint, azureOpenAIConfiguration.ApiKey); diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs new file mode 100644 index 000000000000..c94c8c554498 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs @@ -0,0 +1,204 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.Planners.Stepwise; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +public sealed class OpenAIToolsTests : IDisposable +{ + public OpenAIToolsTests(ITestOutputHelper output) + { + this._testOutputHelper = new RedirectOutput(output); + + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsAsync() + { + // Arrange + Kernel kernel = this.InitializeKernel(); + kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + +#pragma warning disable CS0618 // Events are deprecated + void MyInvokingHandler(object? sender, FunctionInvokingEventArgs e) + { + invokedFunctions.Add(e.Function.Name); + } + + kernel.FunctionInvoking += MyInvokingHandler; +#pragma warning restore CS0618 // Events are deprecated + + // Act + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var result = await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("GetCurrentUtcTime", invokedFunctions); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsStreamingAsync() + { + // Arrange + Kernel kernel = this.InitializeKernel(); + kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + +#pragma warning disable CS0618 // Events are deprecated + void MyInvokingHandler(object? sender, FunctionInvokingEventArgs e) + { + invokedFunctions.Add($"{e.Function.Name}({string.Join(", ", e.Arguments)})"); + } + + kernel.FunctionInvoking += MyInvokingHandler; +#pragma warning restore CS0618 // Events are deprecated + + // Act + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + string result = ""; + await foreach (string c in kernel.InvokePromptStreamingAsync( + $"How much older is John than Jim? Compute that value and pass it to the {nameof(TimeInformation)}.{nameof(TimeInformation.InterpretValue)} function, then respond only with its result.", + new(settings))) + { + result += c; + } + + // Assert + Assert.Contains("6", result, StringComparison.InvariantCulture); + Assert.Contains("GetAge([personName, John])", invokedFunctions); + Assert.Contains("GetAge([personName, Jim])", invokedFunctions); + Assert.Contains("InterpretValue([value, 3])", invokedFunctions); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync() + { + // Arrange + Kernel kernel = this.InitializeKernel(); + kernel.ImportPluginFromType(); + + // Act + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var result = await kernel.InvokePromptAsync("What is the current temperature in Dublin, Ireland, in Fahrenheit?", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("42.8", result.GetValue(), StringComparison.InvariantCulture); // The WeatherPlugin always returns 42.8 for Dublin, Ireland. + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync() + { + // Arrange + Kernel kernel = this.InitializeKernel(); + kernel.ImportPluginFromType(); + + // Act + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var result = await kernel.InvokePromptAsync("Convert 50 degrees Fahrenheit to Celsius.", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("10", result.GetValue(), StringComparison.InvariantCulture); + } + + private Kernel InitializeKernel() + { + OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("Planners:OpenAI").Get(); + Assert.NotNull(openAIConfiguration); + + IKernelBuilder builder = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: openAIConfiguration.ModelId, + apiKey: openAIConfiguration.ApiKey); + + var kernel = builder.Build(); + + return kernel; + } + + private readonly RedirectOutput _testOutputHelper; + private readonly IConfigurationRoot _configuration; + + public void Dispose() => this._testOutputHelper.Dispose(); + + /// + /// A plugin that returns the current time. + /// + public class TimeInformation + { + [KernelFunction] + [Description("Retrieves the current time in UTC.")] + public string GetCurrentUtcTime() => DateTime.UtcNow.ToString("R"); + + [KernelFunction] + [Description("Gets the age of the specified person.")] + public int GetAge(string personName) + { + if ("John".Equals(personName, StringComparison.OrdinalIgnoreCase)) + { + return 33; + } + + if ("Jim".Equals(personName, StringComparison.OrdinalIgnoreCase)) + { + return 30; + } + + return -1; + } + + [KernelFunction] + public int InterpretValue(int value) => value * 2; + } + + public class WeatherPlugin + { + [KernelFunction, Description("Get current temperature.")] + public Task GetCurrentTemperatureAsync(WeatherParameters parameters) + { + if (parameters.City.Name == "Dublin" && (parameters.City.Country == "Ireland" || parameters.City.Country == "IE")) + { + return Task.FromResult(42.8); // 42.8 Fahrenheit. + } + + throw new NotSupportedException($"Weather in {parameters.City.Name} ({parameters.City.Country}) is not supported."); + } + + [KernelFunction, Description("Convert temperature from Fahrenheit to Celsius.")] + public Task ConvertTemperatureAsync(double temperatureInFahrenheit) + { + double temperatureInCelsius = (temperatureInFahrenheit - 32) * 5 / 9; + return Task.FromResult(temperatureInCelsius); + } + } + + public record WeatherParameters(City City); + + public class City + { + public string Name { get; set; } = string.Empty; + public string Country { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs index 1a2fb7e9d5ea..8976c841f844 100644 --- a/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs @@ -4,8 +4,8 @@ using System.Linq; using System.Net.Http; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.Memory.Weaviate; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Weaviate; using Microsoft.SemanticKernel.Memory; using Xunit; @@ -59,7 +59,7 @@ public async Task EnsureConflictingCollectionNamesAreHandledForDoesExistAsync() Assert.True(await this._weaviateMemoryStore.DoesCollectionExistAsync(collectionName)); var conflictingCollectionName = $"___{collectionName}"; - await Assert.ThrowsAsync(async () => + await Assert.ThrowsAsync(async () => await this._weaviateMemoryStore.DoesCollectionExistAsync(conflictingCollectionName)); } @@ -72,7 +72,7 @@ public async Task EnsureConflictingCollectionNamesAreHandledForDeleteAsync() Assert.True(await this._weaviateMemoryStore.DoesCollectionExistAsync(collectionName)); var conflictingCollectionName = $"___{collectionName}"; - await Assert.ThrowsAsync(async () => + await Assert.ThrowsAsync(async () => await this._weaviateMemoryStore.DeleteCollectionAsync(conflictingCollectionName)); } diff --git a/dotnet/src/IntegrationTests/Extensions/KernelFunctionExtensionsTests.cs b/dotnet/src/IntegrationTests/Extensions/KernelFunctionExtensionsTests.cs new file mode 100644 index 000000000000..fa75469cb3e0 --- /dev/null +++ b/dotnet/src/IntegrationTests/Extensions/KernelFunctionExtensionsTests.cs @@ -0,0 +1,127 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Microsoft.SemanticKernel.TextGeneration; +using SemanticKernel.IntegrationTests.Fakes; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests; + +public sealed class KernelFunctionExtensionsTests : IDisposable +{ + public KernelFunctionExtensionsTests(ITestOutputHelper output) + { + this._logger = new RedirectOutput(output); + } + + [Fact] + public async Task ItSupportsFunctionCallsAsync() + { + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(this._logger); + builder.Services.AddSingleton(new RedirectTextGenerationService()); + builder.Plugins.AddFromType(); + Kernel target = builder.Build(); + + var prompt = $"Hey {{{{{nameof(EmailPluginFake)}.GetEmailAddress}}}}"; + + // Act + FunctionResult actual = await target.InvokePromptAsync(prompt, new(new OpenAIPromptExecutionSettings() { MaxTokens = 150 })); + + // Assert + Assert.Equal("Hey johndoe1234@example.com", actual.GetValue()); + } + + [Fact] + public async Task ItSupportsFunctionCallsWithInputAsync() + { + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(this._logger); + builder.Services.AddSingleton(new RedirectTextGenerationService()); + builder.Plugins.AddFromType(); + Kernel target = builder.Build(); + + var prompt = $"Hey {{{{{nameof(EmailPluginFake)}.GetEmailAddress \"a person\"}}}}"; + + // Act + FunctionResult actual = await target.InvokePromptAsync(prompt, new(new OpenAIPromptExecutionSettings() { MaxTokens = 150 })); + + // Assert + Assert.Equal("Hey a person@example.com", actual.GetValue()); + } + + [Fact] + public async Task ItSupportsInvokePromptWithHandlebarsAsync() + { + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(this._logger); + builder.Services.AddSingleton(new RedirectTextGenerationService()); + builder.Plugins.AddFromType(); + Kernel target = builder.Build(); + + var prompt = $"Hey {{{{{nameof(EmailPluginFake)}-GetEmailAddress}}}}"; + + // Act + FunctionResult actual = await target.InvokePromptAsync( + prompt, + new(new OpenAIPromptExecutionSettings() { MaxTokens = 150 }), + templateFormat: "handlebars", + promptTemplateFactory: new HandlebarsPromptTemplateFactory()); + + // Assert + Assert.Equal("Hey johndoe1234@example.com", actual.GetValue()); + } + + [Fact] + public async Task ItSupportsInvokeHandlebarsPromptAsync() + { + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(this._logger); + builder.Services.AddSingleton(new RedirectTextGenerationService()); + builder.Plugins.AddFromType(); + Kernel target = builder.Build(); + + var prompt = $"Hey {{{{{nameof(EmailPluginFake)}-GetEmailAddress}}}}"; + + // Act + FunctionResult actual = await target.InvokeHandlebarsPromptAsync( + prompt, + new(new OpenAIPromptExecutionSettings() { MaxTokens = 150 })); + + // Assert + Assert.Equal("Hey johndoe1234@example.com", actual.GetValue()); + } + + private readonly RedirectOutput _logger; + + public void Dispose() + { + this._logger.Dispose(); + } + + private sealed class RedirectTextGenerationService : ITextGenerationService + { + public string? ModelId => null; + + public IReadOnlyDictionary Attributes => new Dictionary(); + + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings, Kernel? kernel, CancellationToken cancellationToken) + { + return Task.FromResult>(new List { new(prompt) }); + } + + public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + } +} diff --git a/dotnet/src/IntegrationTests/Extensions/KernelSemanticFunctionExtensionsTests.cs b/dotnet/src/IntegrationTests/Extensions/KernelSemanticFunctionExtensionsTests.cs deleted file mode 100644 index f118965bfa26..000000000000 --- a/dotnet/src/IntegrationTests/Extensions/KernelSemanticFunctionExtensionsTests.cs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using SemanticKernel.IntegrationTests.Fakes; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.Extensions; - -public sealed class KernelSemanticFunctionExtensionsTests : IDisposable -{ - public KernelSemanticFunctionExtensionsTests(ITestOutputHelper output) - { - this._logger = new RedirectOutput(output); - this._target = new BasicPromptTemplateEngine(); - } - - [Fact] - public async Task ItSupportsFunctionCallsAsync() - { - var builder = new KernelBuilder() - .WithAIService(null, new RedirectTextCompletion(), true) - .WithLoggerFactory(this._logger); - IKernel target = builder.Build(); - - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); - - var prompt = $"Hey {{{{{FunctionCollection.GlobalFunctionsPluginName}.GetEmailAddress}}}}"; - - // Act - KernelResult actual = await target.InvokeSemanticFunctionAsync(prompt, new OpenAIRequestSettings() { MaxTokens = 150 }); - - // Assert - Assert.Equal("Hey johndoe1234@example.com", actual.GetValue()); - } - - [Fact] - public async Task ItSupportsFunctionCallsWithInputAsync() - { - var builder = new KernelBuilder() - .WithAIService(null, new RedirectTextCompletion(), true) - .WithLoggerFactory(this._logger); - IKernel target = builder.Build(); - - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); - - var prompt = $"Hey {{{{{FunctionCollection.GlobalFunctionsPluginName}.GetEmailAddress \"a person\"}}}}"; - - // Act - KernelResult actual = await target.InvokeSemanticFunctionAsync(prompt, new OpenAIRequestSettings() { MaxTokens = 150 }); - - // Assert - Assert.Equal("Hey a person@example.com", actual.GetValue()); - } - - private readonly RedirectOutput _logger; - private readonly BasicPromptTemplateEngine _target; - - public void Dispose() - { - this._logger.Dispose(); - } - - private sealed class RedirectTextCompletion : ITextCompletion - { - Task> ITextCompletion.GetCompletionsAsync(string text, AIRequestSettings? requestSettings, CancellationToken cancellationToken) - { - return Task.FromResult>(new List { new RedirectTextCompletionResult(text) }); - } - - IAsyncEnumerable ITextCompletion.GetStreamingCompletionsAsync(string text, AIRequestSettings? requestSettings, CancellationToken cancellationToken) - { - throw new NotImplementedException(); // TODO - } - } - - internal sealed class RedirectTextCompletionResult : ITextResult - { - private readonly string _completion; - - public RedirectTextCompletionResult(string completion) - { - this._completion = completion; - } - - public ModelResult ModelResult => new(this._completion); - - public Task GetCompletionAsync(CancellationToken cancellationToken = default) - { - return Task.FromResult(this._completion); - } - } -} diff --git a/dotnet/src/IntegrationTests/Fakes/EmailPluginFake.cs b/dotnet/src/IntegrationTests/Fakes/EmailPluginFake.cs index 0629097f8d22..4890661355ba 100644 --- a/dotnet/src/IntegrationTests/Fakes/EmailPluginFake.cs +++ b/dotnet/src/IntegrationTests/Fakes/EmailPluginFake.cs @@ -5,11 +5,13 @@ using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; +#pragma warning disable CA1812 // Uninstantiated internal types + namespace SemanticKernel.IntegrationTests.Fakes; internal sealed class EmailPluginFake { - [SKFunction, Description("Given an email address and message body, send an email")] + [KernelFunction, Description("Given an email address and message body, send an email")] public Task SendEmailAsync( [Description("The body of the email message to send.")] string input = "", [Description("The email address to send email to.")] string? email_address = "default@email.com") @@ -18,7 +20,7 @@ public Task SendEmailAsync( return Task.FromResult($"Sent email to: {email_address}. Body: {input}"); } - [SKFunction, Description("Lookup an email address for a person given a name")] + [KernelFunction, Description("Lookup an email address for a person given a name")] public Task GetEmailAddressAsync( ILogger logger, [Description("The name of the person to email.")] string? input = null) @@ -33,7 +35,7 @@ public Task GetEmailAddressAsync( return Task.FromResult($"{input}@example.com"); } - [SKFunction, Description("Write a short poem for an e-mail")] + [KernelFunction, Description("Write a short poem for an e-mail")] public Task WritePoemAsync( [Description("The topic of the poem.")] string input) { diff --git a/dotnet/src/IntegrationTests/Fakes/ThrowingEmailPluginFake.cs b/dotnet/src/IntegrationTests/Fakes/ThrowingEmailPluginFake.cs new file mode 100644 index 000000000000..752e48b0b506 --- /dev/null +++ b/dotnet/src/IntegrationTests/Fakes/ThrowingEmailPluginFake.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; + +#pragma warning disable CA1812 // Uninstantiated internal types + +namespace SemanticKernel.IntegrationTests.Fakes; + +internal sealed class ThrowingEmailPluginFake +{ + [KernelFunction, Description("Given an email address and message body, send an email")] + public Task SendEmailAsync( + [Description("The body of the email message to send.")] string input = "", + [Description("The email address to send email to.")] string? email_address = "default@email.com") + { + // Throw a non-critical exception for testing + throw new ArgumentException($"Failed to send email to {email_address}"); + } + + [KernelFunction, Description("Write a short poem for an e-mail")] + public Task WritePoemAsync( + [Description("The topic of the poem.")] string input) + { + return Task.FromResult($"Roses are red, violets are blue, {input} is hard, so is this test."); + } + + [KernelFunction, Description("Write a joke for an e-mail")] + public Task WriteJokeAsync() + { + // Throw a critical exception for testing + throw new InvalidProgramException(); + } +} diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 7b2e9a85a212..a86e60cf1cb9 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -6,16 +6,35 @@ LatestMajor true false - CA2007,VSTHRD111 + CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0010,SKEXP0011,SKEXP0012,SKEXP0020,SKEXP0021,SKEXP0022,SKEXP0023,SKEXP0024,SKEXP0025,SKEXP0026,SKEXP0027,SKEXP0028,SKEXP0029,SKEXP0030,SKEXP0031,SKEXP0032,SKEXP0042,SKEXP0050,SKEXP0054,SKEXP0060,SKEXP0061 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 + + + + + + + + + + + + + + + + + + + @@ -26,27 +45,32 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all - - - - + + + - - - + + + - + - + + + + + + + Always @@ -57,8 +81,23 @@ Always - + + + + Always - + + + Always + + + + + + Always + + + Always + \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlanTests.cs b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlanTests.cs new file mode 100644 index 000000000000..c5099dbc5b26 --- /dev/null +++ b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlanTests.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Globalization; +using System.Threading.Tasks; +using HandlebarsDotNet; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Planning.Handlebars; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Planners.Handlebars; + +public sealed class HandlebarsPlanTests +{ + public HandlebarsPlanTests() + { + this._kernel = new(); + this._arguments = new() { ["input"] = Guid.NewGuid().ToString("X") }; + } + + private const string PlanTemplate = + @"{{!-- Step 1: Call Bar function --}} +{{set ""barResult"" (Foo-Bar)}} + +{{!-- Step 2: Call BazAsync function --}} +{{set ""bazAsyncResult"" (Foo-Baz)}} + +{{!-- Step 3: Call Combine function with two words --}} +{{set ""combinedWords"" (Foo-Combine x=""Hello"" y=""World"")}} + +{{!-- Step 4: Call StringifyInt function with an integer --}} +{{set ""stringifiedInt"" (Foo-StringifyInt x=42)}} + +{{!-- Step 5: Output the results --}} +{{concat barResult bazAsyncResult combinedWords stringifiedInt}}"; + + [Fact] + public async Task InvokeValidPlanAsync() + { + // Arrange & Act + var result = await this.InvokePlanAsync(PlanTemplate); + + // Assert + Assert.Equal("BarBazWorldHello42", result); + } + + [Fact] + public async Task InvokePlanWithHallucinatedFunctionAsync() + { + // Arrange + var planWithInvalidHelper = PlanTemplate.Replace("Foo-Combine", "Foo-HallucinatedHelper", StringComparison.CurrentCulture); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => await this.InvokePlanAsync(planWithInvalidHelper)); + Assert.IsType(exception.InnerException); + Assert.Contains("Template references a helper that cannot be resolved.", exception.InnerException.Message, StringComparison.CurrentCultureIgnoreCase); + } + + #region private + + private readonly Kernel _kernel; + private readonly KernelArguments _arguments; + + private async Task InvokePlanAsync(string planTemplate) + { + // Arrange + this._kernel.ImportPluginFromObject(new Foo()); + var plan = new HandlebarsPlan(planTemplate); + + // Act + return await plan.InvokeAsync(this._kernel, this._arguments); + } + + private sealed class Foo + { + [KernelFunction, Description("Return Bar")] + public string Bar() => "Bar"; + + [KernelFunction, Description("Return Baz")] + public async Task BazAsync() + { + await Task.Delay(1000); + return await Task.FromResult("Baz"); + } + + [KernelFunction, Description("Return words concatenated")] + public string Combine([Description("First word")] string x, [Description("Second word")] string y) => y + x; + + [KernelFunction, Description("Return number as string")] + public string StringifyInt([Description("Number to stringify")] int x) => x.ToString(CultureInfo.InvariantCulture); + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs new file mode 100644 index 000000000000..ae30ff196f2c --- /dev/null +++ b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs @@ -0,0 +1,191 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Planning.Handlebars; +using SemanticKernel.IntegrationTests.Fakes; +using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Planners.Handlebars; + +public sealed class HandlebarsPlannerTests : IDisposable +{ + public HandlebarsPlannerTests(ITestOutputHelper output) + { + this._testOutputHelper = new RedirectOutput(output); + + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + } + + [Theory] + [InlineData(true, "Write a joke and send it in an e-mail to Kai.", "SendEmail", "test")] + public async Task CreatePlanFunctionFlowAsync(bool useChatModel, string goal, string expectedFunction, string expectedPlugin) + { + // Arrange + bool useEmbeddings = false; + var kernel = this.InitializeKernel(useEmbeddings, useChatModel); + kernel.ImportPluginFromType(expectedPlugin); + TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + var plan = await new HandlebarsPlanner(s_defaultPlannerOptions).CreatePlanAsync(kernel, goal); + + // Assert expected function + Assert.Contains( + $"{expectedPlugin}-{expectedFunction}", + plan.ToString(), + StringComparison.CurrentCulture + ); + } + + [RetryTheory] + [InlineData("Write a novel about software development that is 3 chapters long.", "NovelChapter", "WriterPlugin")] + public async Task CreatePlanWithDefaultsAsync(string goal, string expectedFunction, string expectedPlugin) + { + // Arrange + Kernel kernel = this.InitializeKernel(); + TestHelpers.ImportSamplePlugins(kernel, "WriterPlugin", "MiscPlugin"); + + // Act + var plan = await new HandlebarsPlanner(s_defaultPlannerOptions).CreatePlanAsync(kernel, goal); + + // Assert + Assert.Contains( + $"{expectedPlugin}-{expectedFunction}", + plan.ToString(), + StringComparison.CurrentCulture + ); + } + + [Theory] + [InlineData(true, "List each property of the default Qux object.", "## Complex types", @"### Qux: +{ + ""type"": ""Object"", + ""properties"": { + ""Bar"": { + ""type"": ""String"", + }, + ""Baz"": { + ""type"": ""Int32"", + }, + } +}", "GetDefaultQux", "Foo")] + public async Task CreatePlanWithComplexTypesDefinitionsAsync(bool useChatModel, string goal, string expectedSectionHeader, string expectedTypeHeader, string expectedFunction, string expectedPlugin) + { + // Arrange + bool useEmbeddings = false; + var kernel = this.InitializeKernel(useEmbeddings, useChatModel); + kernel.ImportPluginFromObject(new Foo()); + + // Act + var plan = await new HandlebarsPlanner(s_defaultPlannerOptions).CreatePlanAsync(kernel, goal); + + // Assert expected section header for Complex Types in prompt + Assert.Contains( + expectedSectionHeader, + plan.Prompt, + StringComparison.CurrentCulture + ); + + // Assert expected complex parameter type in prompt + Assert.Contains( + expectedTypeHeader, + plan.Prompt, + StringComparison.CurrentCulture + ); + + // Assert expected function in plan + Assert.Contains( + $"{expectedPlugin}-{expectedFunction}", + plan.ToString(), + StringComparison.CurrentCulture + ); + } + + private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = true) + { + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + AzureOpenAIConfiguration? azureOpenAIEmbeddingsConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); + Assert.NotNull(azureOpenAIEmbeddingsConfiguration); + + IKernelBuilder builder = Kernel.CreateBuilder(); + + if (useChatModel) + { + builder.Services.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, + modelId: azureOpenAIConfiguration.ChatModelId!, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); + } + else + { + builder.Services.AddAzureOpenAITextGeneration( + deploymentName: azureOpenAIConfiguration.DeploymentName, + modelId: azureOpenAIConfiguration.ModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); + } + + if (useEmbeddings) + { + builder.Services.AddAzureOpenAITextEmbeddingGeneration( + deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, + modelId: azureOpenAIEmbeddingsConfiguration.EmbeddingModelId!, + endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, + apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); + } + + return builder.Build(); + } + + private readonly RedirectOutput _testOutputHelper; + private readonly IConfigurationRoot _configuration; + + private static readonly HandlebarsPlannerOptions s_defaultPlannerOptions = new() + { + ExecutionSettings = new OpenAIPromptExecutionSettings() + { + Temperature = 0.0, + TopP = 0.1, + } + }; + + private sealed class Foo + { + public sealed class Qux + { + public string Bar { get; set; } = string.Empty; + public int Baz { get; set; } + + public Qux(string bar, int baz) + { + this.Bar = bar; + this.Baz = baz; + } + } + + [KernelFunction, Description("Returns default Qux object.")] + public Qux GetDefaultQux() => new("bar", 42); + } + + public void Dispose() + { + this._testOutputHelper.Dispose(); + } +} diff --git a/dotnet/src/IntegrationTests/Planners/PlanTests.cs b/dotnet/src/IntegrationTests/Planners/PlanTests.cs index 4cbd03ab7388..df329d068085 100644 --- a/dotnet/src/IntegrationTests/Planners/PlanTests.cs +++ b/dotnet/src/IntegrationTests/Planners/PlanTests.cs @@ -4,23 +4,21 @@ using System.Collections.Generic; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; +using Microsoft.SemanticKernel.AI; +using Microsoft.SemanticKernel.Events; using Microsoft.SemanticKernel.Planning; using SemanticKernel.IntegrationTests.Fakes; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Planners; +namespace SemanticKernel.IntegrationTests.Planning; public sealed class PlanTests : IDisposable { public PlanTests(ITestOutputHelper output) { - this._loggerFactory = NullLoggerFactory.Instance; this._testOutputHelper = new RedirectOutput(output); // Load configuration @@ -53,8 +51,8 @@ public void CreatePlan(string prompt) public async Task CanExecuteRunSimpleAsync(string inputToEmail, string expectedEmail) { // Arrange - IKernel target = this.InitializeKernel(); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; var expectedBody = $"Sent email to: {expectedEmail}. Body: {inputToEmail}".Trim(); var plan = new Plan(emailFunctions["SendEmail"]); @@ -63,7 +61,7 @@ public async Task CanExecuteRunSimpleAsync(string inputToEmail, string expectedE var cv = new ContextVariables(); cv.Update(inputToEmail); cv.Set("email_address", expectedEmail); - var result = await target.RunAsync(cv, plan); + var result = await plan.InvokeAsync(kernel, cv); // Assert Assert.Equal(expectedBody, result.GetValue()); @@ -74,9 +72,9 @@ public async Task CanExecuteRunSimpleAsync(string inputToEmail, string expectedE public async Task CanExecuteAsChatAsync(string inputToEmail, string expectedEmail) { // Arrange - IKernel target = this.InitializeKernel(false, true); + Kernel kernel = this.InitializeKernel(false, true); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; var expectedBody = $"Sent email to: {expectedEmail}. Body: {inputToEmail}".Trim(); var plan = new Plan(emailFunctions["SendEmail"]); @@ -85,7 +83,7 @@ public async Task CanExecuteAsChatAsync(string inputToEmail, string expectedEmai var cv = new ContextVariables(); cv.Update(inputToEmail); cv.Set("email_address", expectedEmail); - var result = await target.RunAsync(cv, plan); + var result = await plan.InvokeAsync(kernel, cv); // Assert Assert.Equal(expectedBody, result.GetValue()); @@ -96,20 +94,20 @@ public async Task CanExecuteAsChatAsync(string inputToEmail, string expectedEmai public async Task CanExecuteRunSimpleStepsAsync(string goal, string inputToTranslate, string language, string expectedEmail) { // Arrange - IKernel target = this.InitializeKernel(); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); - var writerPlugin = TestHelpers.ImportSamplePlugins(target, "WriterPlugin"); + Kernel kernel = this.InitializeKernel(); + var emailPlugin = kernel.Plugins[nameof(EmailPluginFake)]; + var writerPlugin = kernel.Plugins["WriterPlugin"]; var expectedBody = $"Sent email to: {expectedEmail}. Body:".Trim(); var plan = new Plan(goal); - plan.AddSteps(writerPlugin["Translate"], emailFunctions["SendEmail"]); + plan.AddSteps(writerPlugin["Translate"], emailPlugin["SendEmail"]); // Act var cv = new ContextVariables(); cv.Update(inputToTranslate); cv.Set("email_address", expectedEmail); cv.Set("language", language); - var result = (await target.RunAsync(cv, plan)).GetValue(); + var result = (await plan.InvokeAsync(kernel, cv)).GetValue(); // Assert Assert.NotNull(result); @@ -121,22 +119,20 @@ public async Task CanExecuteRunSimpleStepsAsync(string goal, string inputToTrans public async Task CanExecutePlanWithTreeStepsAsync() { // Arrange - IKernel target = this.InitializeKernel(); + Kernel kernel = this.InitializeKernel(); var goal = "Write a poem or joke and send it in an e-mail to Kai."; var plan = new Plan(goal); var subPlan = new Plan("Write a poem or joke"); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; // Arrange - var returnContext = target.CreateNewContext(); - subPlan.AddSteps(emailFunctions["WritePoem"], emailFunctions["WritePoem"], emailFunctions["WritePoem"]); - plan.AddSteps(subPlan, emailFunctions["SendEmail"]); + plan.AddSteps(subPlan, new Plan(emailFunctions["SendEmail"])); plan.State.Set("email_address", "something@email.com"); // Act - var result = await target.RunAsync("PlanInput", plan); + var result = await plan.InvokeAsync(kernel, "PlanInput"); // Assert Assert.NotNull(result); @@ -145,14 +141,75 @@ public async Task CanExecutePlanWithTreeStepsAsync() result.GetValue()); } + [Fact] + public async Task ConPlanStepsTriggerKernelEventsAsync() + { + // Arrange + Kernel kernel = this.InitializeKernel(); + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + var subPlan = new Plan("Write a poem or joke"); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; + var expectedInvocations = 4; + // 1 - Outer Plan - Write poem and send email goal + // 2 - Inner Plan - Write poem or joke goal + // 3 - Inner Plan - Step 1 - WritePoem + // 4 - Inner Plan - Step 2 - WritePoem + // 5 - Inner Plan - Step 3 - WritePoem + // 6 - Outer Plan - Step 1 - SendEmail + + subPlan.AddSteps(emailFunctions["WritePoem"], emailFunctions["WritePoem"], emailFunctions["WritePoem"]); + plan.AddSteps(subPlan, new Plan(emailFunctions["SendEmail"])); + plan.State.Set("email_address", "something@email.com"); + + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + } + + kernel.FunctionInvoking += FunctionInvoking; + kernel.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(kernel, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvocations, invokingCalls); + Assert.Equal(expectedInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, emailFunctions["WritePoem"].Name); + Assert.Equal(invokingListFunctions[1].Name, emailFunctions["WritePoem"].Name); + Assert.Equal(invokingListFunctions[2].Name, emailFunctions["WritePoem"].Name); + Assert.Equal(invokingListFunctions[3].Name, emailFunctions["SendEmail"].Name); + + // Expected invoked sequence + Assert.Equal(invokedListFunctions[0].Name, emailFunctions["WritePoem"].Name); + Assert.Equal(invokedListFunctions[1].Name, emailFunctions["WritePoem"].Name); + Assert.Equal(invokedListFunctions[2].Name, emailFunctions["WritePoem"].Name); + Assert.Equal(invokedListFunctions[3].Name, emailFunctions["SendEmail"].Name); + } + [Theory] [InlineData("", "Write a poem or joke and send it in an e-mail to Kai.", "")] [InlineData("Hello World!", "Write a poem or joke and send it in an e-mail to Kai.", "some_email@email.com")] public async Task CanExecuteRunPlanSimpleManualStateAsync(string input, string goal, string email) { // Arrange - IKernel target = this.InitializeKernel(); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; // Create the input mapping from parent (plan) plan state to child plan (sendEmailPlan) state. var cv = new ContextVariables(); @@ -167,7 +224,7 @@ public async Task CanExecuteRunPlanSimpleManualStateAsync(string input, string g plan.State.Set("TheEmailFromState", email); // manually prepare the state // Act - var result = await target.StepAsync(input, plan); + var result = await kernel.StepAsync(input, plan); // Assert var expectedBody = string.IsNullOrEmpty(input) ? goal : input; @@ -184,8 +241,8 @@ public async Task CanExecuteRunPlanSimpleManualStateAsync(string input, string g public async Task CanExecuteRunPlanSimpleManualStateNoVariableAsync(string input, string goal, string email) { // Arrange - IKernel target = this.InitializeKernel(); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; // Create the input mapping from parent (plan) plan state to child plan (sendEmailPlan) state. var cv = new ContextVariables(); @@ -200,7 +257,7 @@ public async Task CanExecuteRunPlanSimpleManualStateNoVariableAsync(string input plan.State.Set("email_address", email); // manually prepare the state // Act - var result = await target.StepAsync(input, plan); + var result = await kernel.StepAsync(input, plan); // Assert var expectedBody = string.IsNullOrEmpty(input) ? goal : input; @@ -217,8 +274,8 @@ public async Task CanExecuteRunPlanSimpleManualStateNoVariableAsync(string input public async Task CanExecuteRunPlanManualStateAsync(string input, string goal, string email) { // Arrange - IKernel target = this.InitializeKernel(); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; // Create the input mapping from parent (plan) plan state to child plan (sendEmailPlan) state. var cv = new ContextVariables(); @@ -233,7 +290,7 @@ public async Task CanExecuteRunPlanManualStateAsync(string input, string goal, s plan.State.Set("TheEmailFromState", email); // manually prepare the state // Act - var result = await target.StepAsync(input, plan); + var result = await kernel.StepAsync(input, plan); // Assert var expectedBody = string.IsNullOrEmpty(input) ? goal : input; @@ -247,11 +304,11 @@ public async Task CanExecuteRunPlanManualStateAsync(string input, string goal, s public async Task CanExecuteRunPlanAsync(string goal, string inputToSummarize, string inputLanguage, string inputName, string expectedEmail) { // Arrange - IKernel target = this.InitializeKernel(); + Kernel kernel = this.InitializeKernel(); - var summarizePlugin = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - var writerPlugin = TestHelpers.ImportSamplePlugins(target, "WriterPlugin"); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + var summarizePlugin = kernel.Plugins["SummarizePlugin"]; + var writerPlugin = kernel.Plugins["WriterPlugin"]; + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; var expectedBody = $"Sent email to: {expectedEmail}. Body:".Trim(); @@ -293,19 +350,19 @@ public async Task CanExecuteRunPlanAsync(string goal, string inputToSummarize, s plan.AddSteps(summarizePlan, translatePlan, getEmailPlan, sendEmailPlan); // Act - var result = await target.StepAsync(inputToSummarize, plan); + var result = await kernel.StepAsync(inputToSummarize, plan); Assert.Equal(4, result.Steps.Count); Assert.Equal(1, result.NextStepIndex); Assert.True(result.HasNextStep); - result = await target.StepAsync(result); + result = await kernel.StepAsync(result); Assert.Equal(4, result.Steps.Count); Assert.Equal(2, result.NextStepIndex); Assert.True(result.HasNextStep); - result = await target.StepAsync(result); + result = await kernel.StepAsync(result); Assert.Equal(4, result.Steps.Count); Assert.Equal(3, result.NextStepIndex); Assert.True(result.HasNextStep); - result = await target.StepAsync(result); + result = await kernel.StepAsync(result); // Assert Assert.Equal(4, result.Steps.Count); @@ -321,10 +378,10 @@ public async Task CanExecuteRunPlanAsync(string goal, string inputToSummarize, s public async Task CanExecuteRunSequentialAsync(string goal, string inputToSummarize, string inputLanguage, string inputName, string expectedEmail) { // Arrange - IKernel target = this.InitializeKernel(); - var summarizePlugin = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - var writerPlugin = TestHelpers.ImportSamplePlugins(target, "WriterPlugin"); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(); + var summarizePlugin = kernel.Plugins["SummarizePlugin"]; + var writerPlugin = kernel.Plugins["WriterPlugin"]; + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; var expectedBody = $"Sent email to: {expectedEmail}. Body:".Trim(); @@ -367,7 +424,7 @@ public async Task CanExecuteRunSequentialAsync(string goal, string inputToSummar plan.AddSteps(summarizePlan, translatePlan, getEmailPlan, sendEmailPlan); // Act - var result = (await target.RunAsync(inputToSummarize, plan)).GetValue(); + var result = (await plan.InvokeAsync(kernel, inputToSummarize)).GetValue(); // Assert Assert.NotNull(result); @@ -381,14 +438,14 @@ public async Task CanExecuteRunSequentialOnDeserializedPlanAsync(string goal, st string expectedEmail) { // Arrange - IKernel target = this.InitializeKernel(); - var summarizePlugin = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - var writerPlugin = TestHelpers.ImportSamplePlugins(target, "WriterPlugin"); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(); + var summarizePlugins = kernel.Plugins["SummarizePlugin"]; + var writerPlugin = kernel.Plugins["WriterPlugin"]; + var emailFunction = kernel.Plugins[nameof(EmailPluginFake)]; var expectedBody = $"Sent email to: {expectedEmail}. Body:".Trim(); - var summarizePlan = new Plan(summarizePlugin["Summarize"]); + var summarizePlan = new Plan(summarizePlugins["Summarize"]); var cv = new ContextVariables(); cv.Set("language", inputLanguage); @@ -409,7 +466,7 @@ public async Task CanExecuteRunSequentialOnDeserializedPlanAsync(string goal, st { "TheEmailFromState" }; - var getEmailPlan = new Plan(emailFunctions["GetEmailAddress"]) + var getEmailPlan = new Plan(emailFunction["GetEmailAddress"]) { Parameters = cv, Outputs = outputs, @@ -418,7 +475,7 @@ public async Task CanExecuteRunSequentialOnDeserializedPlanAsync(string goal, st cv = new ContextVariables(); cv.Set("email_address", "$TheEmailFromState"); cv.Set("input", "$TRANSLATED_SUMMARY"); - var sendEmailPlan = new Plan(emailFunctions["SendEmail"]) + var sendEmailPlan = new Plan(emailFunction["SendEmail"]) { Parameters = cv }; @@ -428,8 +485,8 @@ public async Task CanExecuteRunSequentialOnDeserializedPlanAsync(string goal, st // Act var serializedPlan = plan.ToJson(); - var deserializedPlan = Plan.FromJson(serializedPlan, target.Functions); - var result = (await target.RunAsync(inputToSummarize, deserializedPlan)).GetValue(); + var deserializedPlan = Plan.FromJson(serializedPlan, kernel.Plugins); + var result = (await deserializedPlan.InvokeAsync(kernel, inputToSummarize)).GetValue(); // Assert Assert.NotNull(result); @@ -442,11 +499,11 @@ public async Task CanExecuteRunSequentialOnDeserializedPlanAsync(string goal, st public async Task CanExecuteRunSequentialFunctionsAsync(string goal, string inputToSummarize, string inputLanguage, string expectedEmail) { // Arrange - IKernel target = this.InitializeKernel(); + Kernel kernel = this.InitializeKernel(); - var summarizePlugin = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - var writerPlugin = TestHelpers.ImportSamplePlugins(target, "WriterPlugin"); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + var summarizePlugin = kernel.Plugins["SummarizePlugin"]; + var writerPlugin = kernel.Plugins["WriterPlugin"]; + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; var expectedBody = $"Sent email to: {expectedEmail}. Body:".Trim(); @@ -462,7 +519,7 @@ public async Task CanExecuteRunSequentialFunctionsAsync(string goal, string inpu cv.Update(inputToSummarize); cv.Set("email_address", expectedEmail); cv.Set("language", inputLanguage); - var result = await target.RunAsync(cv, plan); + var result = await plan.InvokeAsync(kernel, cv); // Assert Assert.Contains(expectedBody, result.GetValue(), StringComparison.OrdinalIgnoreCase); @@ -470,11 +527,11 @@ public async Task CanExecuteRunSequentialFunctionsAsync(string goal, string inpu [Theory] [InlineData("computers")] - public async Task CanImportAndRunPlanAsync(string input) + public async Task CanRunPlanAsync(string input) { // Arrange - IKernel target = this.InitializeKernel(); - var emailFunctions = target.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(); + var emailFunctions = kernel.Plugins[nameof(EmailPluginFake)]; var plan = new Plan("Write a poem about a topic and send in an email."); @@ -490,16 +547,14 @@ public async Task CanImportAndRunPlanAsync(string input) plan.Outputs.Add("EMAIL_RESULT"); //Act - var t = target.ImportPlan(plan); - - var result = await t.InvokeAsync(input, target); + var result = await plan.InvokeAsync(kernel, input); // Assert Assert.NotNull(result); Assert.Equal($"Sent email to: default@email.com. Body: Roses are red, violets are blue, {input} is hard, so is this test.", result.GetValue()); } - private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = false) + private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = false) { AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); @@ -507,20 +562,18 @@ private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = AzureOpenAIConfiguration? azureOpenAIEmbeddingsConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); Assert.NotNull(azureOpenAIEmbeddingsConfiguration); - var builder = new KernelBuilder() - .WithLoggerFactory(this._loggerFactory) - .WithRetryBasic(); + IKernelBuilder builder = Kernel.CreateBuilder(); if (useChatModel) { - builder.WithAzureChatCompletionService( + c.AddAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey); } else { - builder.WithAzureTextCompletionService( + c.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey); @@ -528,47 +581,26 @@ private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = if (useEmbeddings) { - builder - .WithAzureTextEmbeddingGenerationService( - deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, - endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, - apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); + c.AddAzureOpenAITextEmbeddingGeneration( + deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, + endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, + apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); } - var kernel = builder.Build(); + Kernel kernel = builder.Build(); // Import all sample plugins available for demonstration purposes. TestHelpers.ImportAllSamplePlugins(kernel); - kernel.ImportFunctions(new EmailPluginFake()); + kernel.ImportPluginFromType(); return kernel; } - private readonly ILoggerFactory _loggerFactory; private readonly RedirectOutput _testOutputHelper; private readonly IConfigurationRoot _configuration; public void Dispose() { - this.Dispose(true); - GC.SuppressFinalize(this); - } - - ~PlanTests() - { - this.Dispose(false); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - if (this._loggerFactory is IDisposable ld) - { - ld.Dispose(); - } - - this._testOutputHelper.Dispose(); - } + this._testOutputHelper.Dispose(); } } diff --git a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs index b16fd1fe6eb1..da15ecf935ee 100644 --- a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs +++ b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs @@ -2,15 +2,13 @@ using Microsoft.Extensions.Configuration; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Planners; -using Microsoft.SemanticKernel.Planners.Sequential; using Microsoft.SemanticKernel.Planning; using SemanticKernel.IntegrationTests.Fakes; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Planners.SequentialPlanner; +namespace SemanticKernel.IntegrationTests.Planners.Sequential; public class SequentialPlanParserTests { @@ -32,16 +30,14 @@ public void CanCallToPlanFromXml() AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); - IKernel kernel = new KernelBuilder() - .WithRetryBasic() - .WithAzureTextCompletionService( + Kernel kernel = Kernel.CreateBuilder() + .WithAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId, - setAsDefault: true) + serviceId: azureOpenAIConfiguration.ServiceId) .Build(); - kernel.ImportFunctions(new EmailPluginFake(), "email"); + kernel.ImportPluginFromType("email"); TestHelpers.ImportSamplePlugins(kernel, "SummarizePlugin", "WriterPlugin"); var planString = @@ -54,7 +50,7 @@ public void CanCallToPlanFromXml() var goal = "Summarize an input, translate to french, and e-mail to John Doe"; // Act - var plan = planString.ToPlanFromXml(goal, kernel.Functions.GetFunctionCallback()); + var plan = planString.ToPlanFromXml(goal, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); diff --git a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs index facf6f5cfc81..7eef3864cf8c 100644 --- a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs @@ -3,12 +3,10 @@ using System; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.AI.Embeddings; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Planners; +using Microsoft.SemanticKernel.Planning; using Microsoft.SemanticKernel.Plugins.Memory; using SemanticKernel.IntegrationTests.Fakes; using SemanticKernel.IntegrationTests.TestSettings; @@ -16,13 +14,12 @@ using Xunit; using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Planners.SequentialPlanner; +namespace SemanticKernel.IntegrationTests.Planners.Sequential; public sealed class SequentialPlannerTests : IDisposable { public SequentialPlannerTests(ITestOutputHelper output) { - this._logger = NullLoggerFactory.Instance; this._testOutputHelper = new RedirectOutput(output); // Load configuration @@ -35,17 +32,17 @@ public SequentialPlannerTests(ITestOutputHelper output) } [Theory] - [InlineData(false, "Write a joke and send it in an e-mail to Kai.", "SendEmail", FunctionCollection.GlobalFunctionsPluginName)] - [InlineData(true, "Write a joke and send it in an e-mail to Kai.", "SendEmail", FunctionCollection.GlobalFunctionsPluginName)] + [InlineData(false, "Write a joke and send it in an e-mail to Kai.", "SendEmail", "EmailPluginFake")] + [InlineData(true, "Write a joke and send it in an e-mail to Kai.", "SendEmail", "EmailPluginFake")] public async Task CreatePlanFunctionFlowAsync(bool useChatModel, string prompt, string expectedFunction, string expectedPlugin) { // Arrange bool useEmbeddings = false; - IKernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); - kernel.ImportFunctions(new EmailPluginFake()); + Kernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); + kernel.ImportPluginFromType(); TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); - var planner = new Microsoft.SemanticKernel.Planners.SequentialPlanner(kernel); + var planner = new SequentialPlanner(kernel); // Act var plan = await planner.CreatePlanAsync(prompt); @@ -63,10 +60,10 @@ public async Task CreatePlanFunctionFlowAsync(bool useChatModel, string prompt, public async Task CreatePlanWithDefaultsAsync(string prompt, string expectedFunction, string expectedPlugin, string expectedDefault) { // Arrange - IKernel kernel = this.InitializeKernel(); + Kernel kernel = this.InitializeKernel(); TestHelpers.ImportSamplePlugins(kernel, "WriterPlugin", "MiscPlugin"); - var planner = new Microsoft.SemanticKernel.Planners.SequentialPlanner(kernel); + var planner = new SequentialPlanner(kernel); // Act var plan = await planner.CreatePlanAsync(prompt); @@ -81,22 +78,22 @@ public async Task CreatePlanWithDefaultsAsync(string prompt, string expectedFunc } [RetryTheory] - [InlineData("Write a poem and a joke and send it in an e-mail to Kai.", "SendEmail", FunctionCollection.GlobalFunctionsPluginName)] + [InlineData("Write a poem and a joke and send it in an e-mail to Kai.", "SendEmail", "EmailPluginFake")] public async Task CreatePlanGoalRelevantAsync(string prompt, string expectedFunction, string expectedPlugin) { // Arrange bool useEmbeddings = true; - IKernel kernel = this.InitializeKernel(useEmbeddings); + Kernel kernel = this.InitializeKernel(useEmbeddings); ISemanticTextMemory memory = this.InitializeMemory(kernel.GetService()); - kernel.ImportFunctions(new EmailPluginFake()); + kernel.ImportPluginFromType(); // Import all sample plugins available for demonstration purposes. TestHelpers.ImportAllSamplePlugins(kernel); - var planner = new Microsoft.SemanticKernel.Planners.SequentialPlanner(kernel, - new SequentialPlannerConfig { SemanticMemoryConfig = new() { RelevancyThreshold = 0.65, MaxRelevantFunctions = 30, Memory = memory } }); + var planner = new SequentialPlanner(kernel, + new() { SemanticMemoryConfig = new() { RelevancyThreshold = 0.65, MaxRelevantFunctions = 30, Memory = memory } }); // Act var plan = await planner.CreatePlanAsync(prompt); @@ -109,7 +106,7 @@ public async Task CreatePlanGoalRelevantAsync(string prompt, string expectedFunc step.PluginName.Equals(expectedPlugin, StringComparison.OrdinalIgnoreCase)); } - private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = false) + private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = false) { AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); @@ -117,19 +114,18 @@ private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = AzureOpenAIConfiguration? azureOpenAIEmbeddingsConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); Assert.NotNull(azureOpenAIEmbeddingsConfiguration); - var builder = new KernelBuilder().WithLoggerFactory(this._logger); - builder.WithRetryBasic(); + IKernelBuilder builder = Kernel.CreateBuilder(); if (useChatModel) { - builder.WithAzureChatCompletionService( + builder.Services.AddAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey); } else { - builder.WithAzureTextCompletionService( + builder.Services.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey); @@ -137,53 +133,30 @@ private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = if (useEmbeddings) { - builder.WithAzureTextEmbeddingGenerationService( + builder.Services.AddAzureOpenAITextEmbeddingGeneration( deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); } - var kernel = builder.Build(); - - return kernel; + return builder.Build(); } private ISemanticTextMemory InitializeMemory(ITextEmbeddingGeneration textEmbeddingGeneration) { var builder = new MemoryBuilder(); - builder.WithLoggerFactory(this._logger); builder.WithMemoryStore(new VolatileMemoryStore()); builder.WithTextEmbeddingGeneration(textEmbeddingGeneration); return builder.Build(); } - private readonly ILoggerFactory _logger; private readonly RedirectOutput _testOutputHelper; private readonly IConfigurationRoot _configuration; public void Dispose() { - this.Dispose(true); - GC.SuppressFinalize(this); - } - - ~SequentialPlannerTests() - { - this.Dispose(false); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - if (this._logger is IDisposable ld) - { - ld.Dispose(); - } - - this._testOutputHelper.Dispose(); - } + this._testOutputHelper.Dispose(); } } diff --git a/dotnet/src/IntegrationTests/Planners/Stepwise/FunctionCallingStepwisePlannerTests.cs b/dotnet/src/IntegrationTests/Planners/Stepwise/FunctionCallingStepwisePlannerTests.cs new file mode 100644 index 000000000000..e2215e152c05 --- /dev/null +++ b/dotnet/src/IntegrationTests/Planners/Stepwise/FunctionCallingStepwisePlannerTests.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Planning; +using Microsoft.SemanticKernel.Plugins.Core; +using Microsoft.SemanticKernel.Plugins.Web; +using Microsoft.SemanticKernel.Plugins.Web.Bing; +using SemanticKernel.IntegrationTests.Fakes; +using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Planners.Stepwise; +public sealed class FunctionCallingStepwisePlannerTests : IDisposable +{ + private readonly string _bingApiKey; + + public FunctionCallingStepwisePlannerTests(ITestOutputHelper output) + { + this._logger = new XunitLogger(output); + this._testOutputHelper = new RedirectOutput(output); + Console.SetOut(this._testOutputHelper); + + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + string? bingApiKeyCandidate = this._configuration["Bing:ApiKey"]; + Assert.NotNull(bingApiKeyCandidate); + this._bingApiKey = bingApiKeyCandidate; + } + + [Theory] + [InlineData("What is the tallest mountain on Earth? How tall is it?", new string[] { "WebSearch-Search" })] + [InlineData("What is the weather in Seattle?", new string[] { "WebSearch-Search" })] + [InlineData("What is the current hour number, plus 5?", new string[] { "Time-HourNumber", "Math-Add" })] + [InlineData("What is 387 minus 22? Email the solution to John and Mary.", new string[] { "Math-Subtract", "Email-GetEmailAddress", "Email-SendEmail" })] + public async Task CanExecuteStepwisePlanAsync(string prompt, string[] expectedFunctions) + { + // Arrange + Kernel kernel = this.InitializeKernel(); + var bingConnector = new BingConnector(this._bingApiKey); + var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); + kernel.ImportPluginFromObject(webSearchEnginePlugin, "WebSearch"); + kernel.ImportPluginFromType("Time"); + kernel.ImportPluginFromType("Math"); + kernel.ImportPluginFromType("Email"); + + var planner = new FunctionCallingStepwisePlanner( + new FunctionCallingStepwisePlannerOptions() { MaxIterations = 10 }); + + // Act + var planResult = await planner.ExecuteAsync(kernel, prompt); + + // Assert - should contain the expected answer & function calls within the maximum iterations + Assert.NotNull(planResult); + Assert.NotEqual(string.Empty, planResult.FinalAnswer); + Assert.True(planResult.Iterations > 0); + Assert.True(planResult.Iterations <= 10); + Assert.NotEmpty(planResult.FinalAnswer); + + string serializedChatHistory = JsonSerializer.Serialize(planResult.ChatHistory); + foreach (string expectedFunction in expectedFunctions) + { + Assert.Contains(expectedFunction, serializedChatHistory, StringComparison.InvariantCultureIgnoreCase); + } + } + + [RetryFact(typeof(HttpOperationException))] + public async Task DoesNotThrowWhenPluginFunctionThrowsNonCriticalExceptionAsync() + { + // Arrange + Kernel kernel = this.InitializeKernel(); + + var emailPluginFake = new ThrowingEmailPluginFake(); + kernel.Plugins.Add( + KernelPluginFactory.CreateFromFunctions( + "Email", + new[] { + KernelFunctionFactory.CreateFromMethod(emailPluginFake.WritePoemAsync), + KernelFunctionFactory.CreateFromMethod(emailPluginFake.SendEmailAsync), + })); + + var planner = new FunctionCallingStepwisePlanner( + new FunctionCallingStepwisePlannerOptions() { MaxIterations = 5 }); + + // Act + var planResult = await planner.ExecuteAsync(kernel, "Email a poem about cats to test@example.com"); + + // Assert - should contain the expected answer & function calls within the maximum iterations + Assert.NotNull(planResult); + Assert.True(planResult.Iterations > 0); + Assert.True(planResult.Iterations <= 5); + + string serializedChatHistory = JsonSerializer.Serialize(planResult.ChatHistory); + Assert.Contains("Email-WritePoem", serializedChatHistory, StringComparison.InvariantCultureIgnoreCase); + Assert.Contains("Email-SendEmail", serializedChatHistory, StringComparison.InvariantCultureIgnoreCase); + } + + [RetryFact(typeof(HttpOperationException))] + public async Task ThrowsWhenPluginFunctionThrowsCriticalExceptionAsync() + { + // Arrange + Kernel kernel = this.InitializeKernel(); + + var emailPluginFake = new ThrowingEmailPluginFake(); + kernel.Plugins.Add( + KernelPluginFactory.CreateFromFunctions( + "Email", + new[] { + KernelFunctionFactory.CreateFromMethod(emailPluginFake.WriteJokeAsync), + KernelFunctionFactory.CreateFromMethod(emailPluginFake.SendEmailAsync), + })); + + var planner = new FunctionCallingStepwisePlanner( + new FunctionCallingStepwisePlannerOptions() { MaxIterations = 5 }); + + // Act & Assert + // Planner should call ThrowingEmailPluginFake.WriteJokeAsync, which throws InvalidProgramException + await Assert.ThrowsAsync(async () => await planner.ExecuteAsync(kernel, "Email a joke to test@example.com")); + } + + private Kernel InitializeKernel() + { + OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("Planners:OpenAI").Get(); + Assert.NotNull(openAIConfiguration); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(this._logger); + builder.AddOpenAIChatCompletion( + modelId: openAIConfiguration.ModelId, + apiKey: openAIConfiguration.ApiKey); + + var kernel = builder.Build(); + + return kernel; + } + + private readonly RedirectOutput _testOutputHelper; + private readonly IConfigurationRoot _configuration; + private readonly XunitLogger _logger; + + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + ~FunctionCallingStepwisePlannerTests() + { + this.Dispose(false); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + this._logger.Dispose(); + this._testOutputHelper.Dispose(); + } + } +} diff --git a/dotnet/src/IntegrationTests/Planners/StepwisePlanner/FunctionCallingStepwisePlannerTests.cs b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/FunctionCallingStepwisePlannerTests.cs new file mode 100644 index 000000000000..f4e743fdb989 --- /dev/null +++ b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/FunctionCallingStepwisePlannerTests.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Planning; +using Microsoft.SemanticKernel.Plugins.Core; +using Microsoft.SemanticKernel.Plugins.Web; +using Microsoft.SemanticKernel.Plugins.Web.Bing; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Planners.Stepwise; + +public sealed class FunctionCallingStepwisePlannerTests : IDisposable +{ + private readonly string _bingApiKey; + + public FunctionCallingStepwisePlannerTests(ITestOutputHelper output) + { + this._testOutputHelper = new RedirectOutput(output); + + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + string? bingApiKeyCandidate = this._configuration["Bing:ApiKey"]; + Assert.NotNull(bingApiKeyCandidate); + this._bingApiKey = bingApiKeyCandidate; + } + + [Theory(Skip = "Requires model deployment that supports function calling.")] + [InlineData("What is the tallest mountain on Earth? How tall is it?", "Everest")] + [InlineData("What is the weather in Seattle?", "Seattle")] + public async Task CanExecuteStepwisePlanAsync(string prompt, string partialExpectedAnswer) + { + // Arrange + bool useEmbeddings = false; + Kernel kernel = this.InitializeKernel(useEmbeddings); + var bingConnector = new BingConnector(this._bingApiKey); + var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); + kernel.ImportPluginFromObject(webSearchEnginePlugin, "WebSearch"); + kernel.ImportPluginFromType("time"); + + var planner = new FunctionCallingStepwisePlanner( + kernel, + new FunctionCallingStepwisePlannerConfig() { MaxIterations = 10 }); + + // Act + var planResult = await planner.ExecuteAsync(prompt); + + // Assert - should contain the expected answer + Assert.NotNull(planResult); + Assert.NotEqual(string.Empty, planResult.FinalAnswer); + Assert.Contains(partialExpectedAnswer, planResult.FinalAnswer, StringComparison.InvariantCultureIgnoreCase); + Assert.True(planResult.Iterations > 0); + Assert.True(planResult.Iterations <= 10); + } + + private Kernel InitializeKernel(bool useEmbeddings = false) + { + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + AzureOpenAIConfiguration? azureOpenAIEmbeddingsConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); + Assert.NotNull(azureOpenAIEmbeddingsConfiguration); + + var builder = Kernel.CreateBuilder() + .WithAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey); + if (useEmbeddings) + { + builder.WithAzureOpenAITextEmbeddingGeneration( + deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, + endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, + apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); + } + + return builder.Build(); + } + + private readonly RedirectOutput _testOutputHelper; + private readonly IConfigurationRoot _configuration; + + public void Dispose() + { + this._testOutputHelper.Dispose(); + } +} diff --git a/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs index 79881ed0e79a..4a82dde5b7f2 100644 --- a/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs @@ -3,12 +3,9 @@ using System; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Planners; +using Microsoft.SemanticKernel.Plugins.OpenApi.OpenAI; +using Microsoft.SemanticKernel.Planning; using Microsoft.SemanticKernel.Plugins.Core; using Microsoft.SemanticKernel.Plugins.Web; using Microsoft.SemanticKernel.Plugins.Web.Bing; @@ -17,7 +14,7 @@ using Xunit; using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Planners.StepwisePlanner; +namespace SemanticKernel.IntegrationTests.Planners.Stepwise; public sealed class StepwisePlannerTests : IDisposable { @@ -25,7 +22,6 @@ public sealed class StepwisePlannerTests : IDisposable public StepwisePlannerTests(ITestOutputHelper output) { - this._loggerFactory = NullLoggerFactory.Instance; this._testOutputHelper = new RedirectOutput(output); // Load configuration @@ -44,17 +40,17 @@ public StepwisePlannerTests(ITestOutputHelper output) [Theory] [InlineData(false, "Who is the current president of the United States? What is his current age divided by 2", "ExecutePlan", "StepwisePlanner")] [InlineData(true, "Who is the current president of the United States? What is his current age divided by 2", "ExecutePlan", "StepwisePlanner")] - public void CanCreateStepwisePlan(bool useChatModel, string prompt, string expectedFunction, string expectedPlugin) + public void CanCreateStepwisePlanAsync(bool useChatModel, string prompt, string expectedFunction, string expectedPlugin) { // Arrange bool useEmbeddings = false; - IKernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); + Kernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); var bingConnector = new BingConnector(this._bingApiKey); var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); - kernel.ImportFunctions(webSearchEnginePlugin, "WebSearch"); - kernel.ImportFunctions(new TimePlugin(), "time"); + kernel.ImportPluginFromObject(webSearchEnginePlugin, "WebSearch"); + kernel.ImportPluginFromType("time"); - var planner = new Microsoft.SemanticKernel.Planners.StepwisePlanner(kernel, new StepwisePlannerConfig() { MaxIterations = 10 }); + var planner = new StepwisePlanner(kernel, new() { MaxIterations = 10 }); // Act var plan = planner.CreatePlan(prompt); @@ -74,13 +70,13 @@ public async Task CanExecuteStepwisePlanAsync(bool useChatModel, string prompt, { // Arrange bool useEmbeddings = false; - IKernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); + Kernel kernel = this.InitializeKernel(useEmbeddings, useChatModel); var bingConnector = new BingConnector(this._bingApiKey); var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); - kernel.ImportFunctions(webSearchEnginePlugin, "WebSearch"); - kernel.ImportFunctions(new TimePlugin(), "time"); + kernel.ImportPluginFromObject(webSearchEnginePlugin, "WebSearch"); + kernel.ImportPluginFromType("time"); - var planner = new Microsoft.SemanticKernel.Planners.StepwisePlanner(kernel, new StepwisePlannerConfig() { MaxIterations = 10 }); + var planner = new StepwisePlanner(kernel, new() { MaxIterations = 10 }); // Act var plan = planner.CreatePlan(prompt); @@ -99,23 +95,23 @@ public async Task CanExecuteStepwisePlanAsync(bool useChatModel, string prompt, public async Task ExecutePlanFailsWithTooManyFunctionsAsync() { // Arrange - IKernel kernel = this.InitializeKernel(); + Kernel kernel = this.InitializeKernel(); var bingConnector = new BingConnector(this._bingApiKey); var webSearchEnginePlugin = new WebSearchEnginePlugin(bingConnector); - kernel.ImportFunctions(webSearchEnginePlugin, "WebSearch"); - kernel.ImportFunctions(new TextPlugin(), "text"); - kernel.ImportFunctions(new ConversationSummaryPlugin(kernel), "ConversationSummary"); - kernel.ImportFunctions(new MathPlugin(), "Math"); - kernel.ImportFunctions(new FileIOPlugin(), "FileIO"); - kernel.ImportFunctions(new HttpPlugin(), "Http"); + kernel.ImportPluginFromObject(webSearchEnginePlugin, "WebSearch"); + kernel.ImportPluginFromType("text"); + kernel.ImportPluginFromType("ConversationSummary"); + kernel.ImportPluginFromType("Math"); + kernel.ImportPluginFromType("FileIO"); + kernel.ImportPluginFromType("Http"); - var planner = new Microsoft.SemanticKernel.Planners.StepwisePlanner(kernel, new() { MaxTokens = 1000 }); + var planner = new StepwisePlanner(kernel, new() { MaxTokens = 1000 }); // Act var plan = planner.CreatePlan("I need to buy a new brush for my cat. Can you show me options?"); // Assert - var ex = await Assert.ThrowsAsync(async () => await kernel.RunAsync(plan)); + var ex = await Assert.ThrowsAsync(async () => await plan.InvokeAsync(kernel)); Assert.Equal("ChatHistory is too long to get a completion. Try reducing the available functions.", ex.Message); } @@ -123,23 +119,23 @@ public async Task ExecutePlanFailsWithTooManyFunctionsAsync() public async Task ExecutePlanSucceedsWithAlmostTooManyFunctionsAsync() { // Arrange - IKernel kernel = this.InitializeKernel(); + Kernel kernel = this.InitializeKernel(); - _ = await kernel.ImportPluginFunctionsAsync("Klarna", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"), new OpenApiFunctionExecutionParameters(enableDynamicOperationPayload: true)); + _ = await kernel.ImportPluginFromOpenAIAsync("Klarna", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"), new OpenAIFunctionExecutionParameters(enableDynamicOperationPayload: true)); - var planner = new Microsoft.SemanticKernel.Planners.StepwisePlanner(kernel); + var planner = new StepwisePlanner(kernel); // Act var plan = planner.CreatePlan("I need to buy a new brush for my cat. Can you show me options?"); - var kernelResult = await kernel.RunAsync(plan); - var result = kernelResult.GetValue(); + var functionResult = await plan.InvokeAsync(kernel); + var result = functionResult.GetValue(); // Assert - should contain results, for now just verify it didn't fail Assert.NotNull(result); Assert.DoesNotContain("Result not found, review 'stepsTaken' to see what happened", result, StringComparison.OrdinalIgnoreCase); } - private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = false) + private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = false) { AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); @@ -147,20 +143,18 @@ private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = AzureOpenAIConfiguration? azureOpenAIEmbeddingsConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); Assert.NotNull(azureOpenAIEmbeddingsConfiguration); - var builder = new KernelBuilder() - .WithLoggerFactory(this._loggerFactory) - .WithRetryBasic(); + IKernelBuilder builder = Kernel.CreateBuilder(); if (useChatModel) { - builder.WithAzureChatCompletionService( + builder.Services.AddAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey); } else { - builder.WithAzureTextCompletionService( + builder.Services.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, apiKey: azureOpenAIConfiguration.ApiKey); @@ -168,42 +162,20 @@ private IKernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = if (useEmbeddings) { - builder.WithAzureTextEmbeddingGenerationService( - deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, - endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, - apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); + builder.Services.AddAzureOpenAITextEmbeddingGeneration( + deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, + endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, + apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); } - var kernel = builder.Build(); - - return kernel; + return builder.Build(); } - private readonly ILoggerFactory _loggerFactory; private readonly RedirectOutput _testOutputHelper; private readonly IConfigurationRoot _configuration; public void Dispose() { - this.Dispose(true); - GC.SuppressFinalize(this); - } - - ~StepwisePlannerTests() - { - this.Dispose(false); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - if (this._loggerFactory is IDisposable ld) - { - ld.Dispose(); - } - - this._testOutputHelper.Dispose(); - } + this._testOutputHelper.Dispose(); } } diff --git a/dotnet/src/IntegrationTests/Plugins/PluginTests.cs b/dotnet/src/IntegrationTests/Plugins/PluginTests.cs index bde065e44624..a9e7ca8363d2 100644 --- a/dotnet/src/IntegrationTests/Plugins/PluginTests.cs +++ b/dotnet/src/IntegrationTests/Plugins/PluginTests.cs @@ -4,17 +4,46 @@ using System.Net.Http; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Functions.OpenAPI.Extensions; -using Microsoft.SemanticKernel.Orchestration; +using Microsoft.SemanticKernel.Plugins.OpenApi; using Xunit; namespace SemanticKernel.IntegrationTests.Plugins; + public class PluginTests { [Theory] [InlineData("https://www.klarna.com/.well-known/ai-plugin.json", "Klarna", "productsUsingGET", "Laptop", 3, 200, "US")] + public async Task QueryKlarnaOpenAIPluginAsync( + string pluginEndpoint, + string name, + string functionName, + string query, + int size, + int budget, + string countryCode) + { + // Arrange + var kernel = new Kernel(); + using HttpClient httpClient = new(); + + var plugin = await kernel.ImportPluginFromOpenAIAsync( + name, + new Uri(pluginEndpoint), + new OpenAIFunctionExecutionParameters(httpClient)); + + var arguments = new KernelArguments(); + arguments["q"] = query; + arguments["size"] = size; + arguments["max_price"] = budget; + arguments["countryCode"] = countryCode; + + // Act + await plugin[functionName].InvokeAsync(kernel, arguments); + } + + [Theory] [InlineData("https://www.klarna.com/us/shopping/public/openai/v0/api-docs/", "Klarna", "productsUsingGET", "Laptop", 3, 200, "US")] - public async Task QueryKlarnaPluginAsync( + public async Task QueryKlarnaOpenApiPluginAsync( string pluginEndpoint, string name, string functionName, @@ -24,29 +53,65 @@ public async Task QueryKlarnaPluginAsync( string countryCode) { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); using HttpClient httpClient = new(); - var plugin = await kernel.ImportPluginFunctionsAsync( + var plugin = await kernel.ImportPluginFromOpenApiAsync( name, new Uri(pluginEndpoint), new OpenApiFunctionExecutionParameters(httpClient)); - var contextVariables = new ContextVariables(); - contextVariables["q"] = query; - contextVariables["size"] = size.ToString(System.Globalization.CultureInfo.InvariantCulture); - contextVariables["budget"] = budget.ToString(System.Globalization.CultureInfo.InvariantCulture); - contextVariables["countryCode"] = countryCode; + var arguments = new KernelArguments(); + arguments["q"] = query; + arguments["size"] = size.ToString(System.Globalization.CultureInfo.InvariantCulture); + arguments["max_price"] = budget; + arguments["countryCode"] = countryCode; // Act - await plugin[functionName].InvokeAsync(kernel.CreateNewContext(contextVariables)); + await plugin[functionName].InvokeAsync(kernel, arguments); + } + + [Theory] + [InlineData("https://www.klarna.com/us/shopping/public/openai/v0/api-docs/", "Klarna", "productsUsingGET", "Laptop", 3, 200, "US")] + public async Task QueryKlarnaOpenApiPluginRunAsync( + string pluginEndpoint, + string name, + string functionName, + string query, + int size, + int budget, + string countryCode) + { + // Arrange + var kernel = new Kernel(); + using HttpClient httpClient = new(); + + var plugin = await kernel.ImportPluginFromOpenApiAsync( + name, + new Uri(pluginEndpoint), + new OpenApiFunctionExecutionParameters(httpClient)); + + var arguments = new KernelArguments(); + arguments["q"] = query; + arguments["size"] = size; + arguments["budget"] = budget.ToString(System.Globalization.CultureInfo.InvariantCulture); + arguments["countryCode"] = countryCode; + + // Act + var result = (await kernel.InvokeAsync(plugin[functionName], arguments)).GetValue(); + + // Assert + Assert.NotNull(result); + Assert.NotNull(result.ExpectedSchema); + Assert.NotNull(result.Content); + Assert.True(result.IsValid()); } [Theory] [InlineData("https://raw.githubusercontent.com/sisbell/chatgpt-plugin-store/main/manifests/instacart.com.json", "Instacart", "create", - "{\"title\":\"Shopping List\", \"ingredients\": [\"Flour\"], \"question\": \"what ingredients do I need to make chocolate cookies?\", \"partnerName\": \"OpenAI\" }" + "{\"title\":\"Shopping List\", \"ingredients\": [\"Flour\"], \"question\": \"what ingredients do I need to make chocolate cookies?\", \"partner_name\": \"OpenAI\" }" )] public async Task QueryInstacartPluginAsync( string pluginEndpoint, @@ -55,27 +120,27 @@ public async Task QueryInstacartPluginAsync( string payload) { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); using HttpClient httpClient = new(); //note that this plugin is not compliant according to the underlying validator in SK - var plugin = await kernel.ImportPluginFunctionsAsync( + var plugin = await kernel.ImportPluginFromOpenAIAsync( name, new Uri(pluginEndpoint), - new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true }); + new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false }); - var contextVariables = new ContextVariables(); - contextVariables["payload"] = payload; + var arguments = new KernelArguments(); + arguments["payload"] = payload; // Act - await plugin[functionName].InvokeAsync(kernel.CreateNewContext(contextVariables)); + await plugin[functionName].InvokeAsync(kernel, arguments); } [Theory] [InlineData("Plugins/instacart-ai-plugin.json", "Instacart", "create", - "{\"title\":\"Shopping List\", \"ingredients\": [\"Flour\"], \"question\": \"what ingredients do I need to make chocolate cookies?\", \"partnerName\": \"OpenAI\" }" + "{\"title\":\"Shopping List\", \"ingredients\": [\"Flour\"], \"question\": \"what ingredients do I need to make chocolate cookies?\", \"partner_name\": \"OpenAI\" }" )] public async Task QueryInstacartPluginFromStreamAsync( string pluginFilePath, @@ -86,20 +151,20 @@ public async Task QueryInstacartPluginFromStreamAsync( // Arrange using (var stream = System.IO.File.OpenRead(pluginFilePath)) { - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); using HttpClient httpClient = new(); - //note that this plugin is not compliant according to the underlying validator in SK - var plugin = await kernel.ImportPluginFunctionsAsync( + // note that this plugin is not compliant according to the underlying validator in SK + var plugin = await kernel.ImportPluginFromOpenAIAsync( name, stream, - new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true }); + new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false }); - var contextVariables = new ContextVariables(); - contextVariables["payload"] = payload; + var arguments = new KernelArguments(); + arguments["payload"] = payload; // Act - await plugin[functionName].InvokeAsync(kernel.CreateNewContext(contextVariables)); + await plugin[functionName].InvokeAsync(kernel, arguments); } } @@ -107,7 +172,7 @@ public async Task QueryInstacartPluginFromStreamAsync( [InlineData("Plugins/instacart-ai-plugin.json", "Instacart", "create", - "{\"title\":\"Shopping List\", \"ingredients\": [\"Flour\"], \"question\": \"what ingredients do I need to make chocolate cookies?\", \"partnerName\": \"OpenAI\" }" + "{\"title\":\"Shopping List\", \"ingredients\": [\"Flour\"], \"question\": \"what ingredients do I need to make chocolate cookies?\", \"partner_name\": \"OpenAI\" }" )] public async Task QueryInstacartPluginUsingRelativeFilePathAsync( string pluginFilePath, @@ -116,19 +181,50 @@ public async Task QueryInstacartPluginUsingRelativeFilePathAsync( string payload) { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); using HttpClient httpClient = new(); - //note that this plugin is not compliant according to the underlying validator in SK - var plugin = await kernel.ImportPluginFunctionsAsync( + // note that this plugin is not compliant according to the underlying validator in SK + var plugin = await kernel.ImportPluginFromOpenAIAsync( name, pluginFilePath, - new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true }); + new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false }); - var contextVariables = new ContextVariables(); - contextVariables["payload"] = payload; + var arguments = new KernelArguments(); + arguments["payload"] = payload; // Act - await plugin[functionName].InvokeAsync(kernel.CreateNewContext(contextVariables)); + await plugin[functionName].InvokeAsync(kernel, arguments); + } + + [Theory] + [InlineData("Plugins/instacart-ai-plugin.json", "Instacart", "create")] + public async Task QueryInstacartPluginWithDynamicPayloadAsync( + string pluginFilePath, + string name, + string functionName) + { + // Arrange + using (var stream = System.IO.File.OpenRead(pluginFilePath)) + { + var kernel = new Kernel(); + using HttpClient httpClient = new(); + + // note that this plugin is not compliant according to the underlying validator in SK + var plugin = await kernel.ImportPluginFromOpenAIAsync( + name, + stream, + new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = true }); ; + + var arguments = new KernelArguments(); + arguments["title"] = "Shopping List"; + arguments["ingredients"] = new string[] { "Flour", "Sugar", "Eggs" }; + arguments["instructions"] = new string[] { "Cream softened butter and granulated sugar", "Add eggs one at a time, mix well, and stir in vanilla extract", "Combine dry ingredients and mix" }; + arguments["question"] = "what ingredients do I need to make chocolate cookies?"; + arguments["partner_name"] = "OpenAI"; + + // Act + await plugin[functionName].InvokeAsync(kernel, arguments); + } } } diff --git a/dotnet/src/IntegrationTests/Plugins/SamplePluginsTests.cs b/dotnet/src/IntegrationTests/Plugins/SamplePluginsTests.cs index 040290ac3e97..17cf17e4c93e 100644 --- a/dotnet/src/IntegrationTests/Plugins/SamplePluginsTests.cs +++ b/dotnet/src/IntegrationTests/Plugins/SamplePluginsTests.cs @@ -5,26 +5,26 @@ using Xunit; namespace SemanticKernel.IntegrationTests.Plugins; + public class SamplePluginsTests { [Fact] - public void CanLoadSamplePluginsRequestSettings() + public void CanLoadSamplePluginsExecutionSettings() { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); // Act TestHelpers.ImportAllSamplePlugins(kernel); // Assert - Assert.NotNull(kernel.Functions); - var functionViews = kernel.Functions.GetFunctionViews(); - Assert.NotNull(functionViews); - Assert.Equal(48, functionViews.Count); // currently we have 48 sample plugin functions - functionViews.ToList().ForEach(view => + Assert.NotNull(kernel.Plugins); + var metadata = kernel.Plugins.GetFunctionsMetadata(); + Assert.NotNull(metadata); + Assert.Equal(48, metadata.Count); // currently we have 48 sample plugin functions + metadata.ToList().ForEach(function => { - var function = kernel.Functions.GetFunction(view.PluginName, view.Name); - Assert.NotNull(function); + Assert.NotNull(kernel.Plugins.GetFunction(function.PluginName, function.Name)); }); } @@ -33,20 +33,19 @@ public void CanLoadSamplePluginsRequestSettings() public void CanLoadSampleSkillsCompletions() { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); // Act TestHelpers.ImportAllSampleSkills(kernel); // Assert - Assert.NotNull(kernel.Functions); - var functionViews = kernel.Functions.GetFunctionViews(); - Assert.NotNull(functionViews); - Assert.Equal(48, functionViews.Count); // currently we have 48 sample plugin functions - functionViews.ToList().ForEach(view => + Assert.NotNull(kernel.Plugins); + var metadata = kernel.Plugins.GetFunctionsMetadata(); + Assert.NotNull(metadata); + Assert.Single(metadata); + metadata.ToList().ForEach(function => { - var function = kernel.Functions.GetFunction(view.PluginName, view.Name); - Assert.NotNull(function); + Assert.NotNull(kernel.Plugins.GetFunction(function.PluginName, function.Name)); }); } } diff --git a/dotnet/src/IntegrationTests/PromptTests.cs b/dotnet/src/IntegrationTests/PromptTests.cs new file mode 100644 index 000000000000..1d6c8decdb2d --- /dev/null +++ b/dotnet/src/IntegrationTests/PromptTests.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Reflection; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using SemanticKernel.IntegrationTests.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests; + +public sealed class PromptTests : IDisposable +{ + public PromptTests(ITestOutputHelper output) + { + this._logger = new XunitLogger(output); + this._testOutputHelper = new RedirectOutput(output); + Console.SetOut(this._testOutputHelper); + + // Load configuration + this._configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + this._kernelBuilder = Kernel.CreateBuilder(); + this._kernelBuilder.Services.AddSingleton(this._logger); + } + + [Theory] + [InlineData("SemanticKernel.IntegrationTests.prompts.GenerateStory.yaml", false)] + [InlineData("SemanticKernel.IntegrationTests.prompts.GenerateStoryHandlebars.yaml", true)] + public async Task GenerateStoryTestAsync(string resourceName, bool isHandlebars) + { + // Arrange + var builder = this._kernelBuilder; + this.ConfigureAzureOpenAI(builder); + var kernel = builder.Build(); + + // Load prompt from resource + var promptTemplateFactory = isHandlebars ? new HandlebarsPromptTemplateFactory() : null; + using StreamReader reader = new(Assembly.GetExecutingAssembly().GetManifestResourceStream(resourceName)!); + var function = kernel.CreateFunctionFromPromptYaml(await reader.ReadToEndAsync(), promptTemplateFactory); + + // Act + FunctionResult actual = await kernel.InvokeAsync(function, arguments: new() + { + { "topic", "Dog" }, + { "length", "3" }, + }); + + // Assert + Assert.Contains("Dog", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + #region private methods + + private readonly IKernelBuilder _kernelBuilder; + private readonly IConfigurationRoot _configuration; + private readonly XunitLogger _logger; + private readonly RedirectOutput _testOutputHelper; + + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + ~PromptTests() + { + this.Dispose(false); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + this._logger.Dispose(); + this._testOutputHelper.Dispose(); + } + } + + private void ConfigureAzureOpenAI(IKernelBuilder kernelBuilder) + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.DeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + Assert.NotNull(azureOpenAIConfiguration.ApiKey); + Assert.NotNull(azureOpenAIConfiguration.ServiceId); + + kernelBuilder.AddAzureOpenAITextGeneration( + deploymentName: azureOpenAIConfiguration.DeploymentName, + modelId: azureOpenAIConfiguration.ModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: azureOpenAIConfiguration.ApiKey, + serviceId: azureOpenAIConfiguration.ServiceId); + } + #endregion +} diff --git a/dotnet/src/IntegrationTests/README.md b/dotnet/src/IntegrationTests/README.md index 8f5d0aa2252a..b7e58637a78a 100644 --- a/dotnet/src/IntegrationTests/README.md +++ b/dotnet/src/IntegrationTests/README.md @@ -48,6 +48,14 @@ dotnet user-secrets set "AzureOpenAIEmbeddings:ApiKey" "..." dotnet user-secrets set "HuggingFace:ApiKey" "..." dotnet user-secrets set "Bing:ApiKey" "..." dotnet user-secrets set "Postgres:ConnectionString" "..." + +dotnet user-secrets set "Planners:AzureOpenAI:Endpoint" "https://contoso.openai.azure.com/" +dotnet user-secrets set "Planners:AzureOpenAI:ChatDeploymentName" "gpt-4-1106-preview" +dotnet user-secrets set "Planners:AzureOpenAI:ServiceId" "gpt-4-1106-preview" +dotnet user-secrets set "Planners:AzureOpenAI:ApiKey" "..." + +dotnet user-secrets set "Planners:OpenAI:ModelId" "gpt-3.5-turbo-1106" +dotnet user-secrets set "Planners:OpenAI:ApiKey" "..." ``` ### Option 2: Use Configuration File diff --git a/dotnet/src/IntegrationTests/RedirectOutput.cs b/dotnet/src/IntegrationTests/RedirectOutput.cs index 1905b5849ea6..34cac5ba9654 100644 --- a/dotnet/src/IntegrationTests/RedirectOutput.cs +++ b/dotnet/src/IntegrationTests/RedirectOutput.cs @@ -44,9 +44,16 @@ public string GetLogs() public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) { - var message = formatter(state, exception); - this._output?.WriteLine(message); - this._logs.AppendLine(message); + try + { + var message = formatter(state, exception); + this._logs.AppendLine(message); + this._output?.WriteLine(message); + } + catch (InvalidOperationException ioe) + { + Console.WriteLine($"RedirectOutput failed, reason: {ioe}"); + } } public ILogger CreateLogger(string categoryName) => this; diff --git a/dotnet/src/IntegrationTests/TemplateLanguage/PromptTemplateEngineTests.cs b/dotnet/src/IntegrationTests/TemplateLanguage/PromptTemplateEngineTests.cs deleted file mode 100644 index 5c68ca4d345a..000000000000 --- a/dotnet/src/IntegrationTests/TemplateLanguage/PromptTemplateEngineTests.cs +++ /dev/null @@ -1,242 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.IO; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.TemplateLanguage; - -#pragma warning disable VSTHRD103 // ok to use WriteLine synchronously -#pragma warning disable CA1849 // ok to use WriteLine synchronously - -public sealed class PromptTemplateEngineTests : IDisposable -{ - public PromptTemplateEngineTests(ITestOutputHelper output) - { - this._logger = new RedirectOutput(output); - this._target = new BasicPromptTemplateEngine(); - } - - [Fact] - public async Task ItSupportsVariablesAsync() - { - // Arrange - const string Input = "template tests"; - const string Winner = "SK"; - const string Template = "And the winner\n of {{$input}} \nis: {{ $winner }}!"; - - var kernel = new KernelBuilder().Build(); - var context = kernel.CreateNewContext(); - context.Variables["input"] = Input; - context.Variables["winner"] = Winner; - - // Act - var result = await this._target.RenderAsync(Template, context); - - // Assert - var expected = Template - .Replace("{{$input}}", Input, StringComparison.OrdinalIgnoreCase) - .Replace("{{ $winner }}", Winner, StringComparison.OrdinalIgnoreCase); - Assert.Equal(expected, result); - } - - [Fact] - public async Task ItSupportsValuesAsync() - { - // Arrange - const string Template = "And the winner\n of {{'template\ntests'}} \nis: {{ \"SK\" }}!"; - const string Expected = "And the winner\n of template\ntests \nis: SK!"; - - var kernel = new KernelBuilder().Build(); - var context = kernel.CreateNewContext(); - - // Act - var result = await this._target.RenderAsync(Template, context); - - // Assert - Assert.Equal(Expected, result); - } - - [Fact] - public async Task ItAllowsToPassVariablesToFunctionsAsync() - { - // Arrange - const string Template = "== {{my.check123 $call}} =="; - var kernel = new KernelBuilder().Build(); - kernel.ImportFunctions(new MyPlugin(), "my"); - var context = kernel.CreateNewContext(); - context.Variables["call"] = "123"; - - // Act - var result = await this._target.RenderAsync(Template, context); - - // Assert - Assert.Equal("== 123 ok ==", result); - } - - [Fact] - public async Task ItAllowsToPassValuesToFunctionsAsync() - { - // Arrange - const string Template = "== {{my.check123 '234'}} =="; - var kernel = new KernelBuilder().Build(); - kernel.ImportFunctions(new MyPlugin(), "my"); - var context = kernel.CreateNewContext(); - - // Act - var result = await this._target.RenderAsync(Template, context); - - // Assert - Assert.Equal("== 234 != 123 ==", result); - } - - [Fact] - public async Task ItAllowsToPassEscapedValues1ToFunctionsAsync() - { - // Arrange - const char Esc = '\\'; - string template = "== {{my.check123 'a" + Esc + "'b'}} =="; - var kernel = new KernelBuilder().Build(); - kernel.ImportFunctions(new MyPlugin(), "my"); - var context = kernel.CreateNewContext(); - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("== a'b != 123 ==", result); - } - - [Fact] - public async Task ItAllowsToPassEscapedValues2ToFunctionsAsync() - { - // Arrange - const char Esc = '\\'; - string template = "== {{my.check123 \"a" + Esc + "\"b\"}} =="; - var kernel = new KernelBuilder().Build(); - kernel.ImportFunctions(new MyPlugin(), "my"); - var context = kernel.CreateNewContext(); - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("== a\"b != 123 ==", result); - } - - [Fact] - public async Task ItHandlesNamedArgsAsync() - { - // Arrange - string template = "Output: {{my.sayAge name=\"Mario\" birthdate=$birthdate exclamation='Wow, that\\'s surprising'}}"; - var kernel = new KernelBuilder().Build(); - kernel.ImportFunctions(new MyPlugin(), "my"); - var context = kernel.CreateNewContext(); - context.Variables["birthdate"] = "1981-08-20T00:00:00"; - - // Act - var result = await this._target.RenderAsync(template, context); - - // Assert - Assert.Equal("Output: Mario is 42 today. Wow, that's surprising!", result); - } - - [Theory] - [MemberData(nameof(GetTemplateLanguageTests))] - public async Task ItHandleEdgeCasesAsync(string template, string expectedResult) - { - // Arrange - var kernel = new KernelBuilder().Build(); - kernel.ImportFunctions(new MyPlugin()); - - // Act - this._logger.WriteLine("template: " + template); - this._logger.WriteLine("expected: " + expectedResult); - if (expectedResult.StartsWith("ERROR", StringComparison.OrdinalIgnoreCase)) - { - await Assert.ThrowsAsync( - async () => await this._target.RenderAsync(template, kernel.CreateNewContext())); - } - else - { - var result = await this._target.RenderAsync(template, kernel.CreateNewContext()); - this._logger.WriteLine(" result: " + result); - - // Assert - Assert.Equal(expectedResult, result); - } - } - - public static IEnumerable GetTemplateLanguageTests() - { - return GetTestData("TemplateLanguage/tests.txt"); - } - - public class MyPlugin - { - [SKFunction, Description("This is a test"), SKName("check123")] - public string MyFunction(string input) - { - return input == "123" ? "123 ok" : input + " != 123"; - } - - [SKFunction, Description("This is a test"), SKName("asis")] - public string? MyFunction2(string? input = null) - { - return input; - } - - [SKFunction, Description("This is a test"), SKName("sayAge")] - public string MyFunction3(string name, DateTime birthdate, string exclamation) - { - var today = new DateTime(2023, 8, 25); - TimeSpan timespan = today - birthdate; - int age = (int)(timespan.TotalDays / 365.25); - return $"{name} is {age} today. {exclamation}!"; - } - } - - #region internals - - private readonly RedirectOutput _logger; - private readonly BasicPromptTemplateEngine _target; - - private static IEnumerable GetTestData(string file) - { - if (!File.Exists(file)) { Assert.Fail("File not found: " + file); } - - var content = File.ReadLines(file); - var key = string.Empty; - foreach (string value in content) - { - if (string.IsNullOrEmpty(value) || value.StartsWith('#')) { continue; } - - if (string.IsNullOrEmpty(key)) - { - key = value; - } - else - { - yield return new string[] { key, value }; - key = string.Empty; - } - } - } - - public void Dispose() - { - this._logger.Dispose(); - } - - #endregion -} - -#pragma warning restore VSTHRD103 -#pragma warning restore CA1849 diff --git a/dotnet/src/IntegrationTests/TemplateLanguage/tests.txt b/dotnet/src/IntegrationTests/TemplateLanguage/tests.txt deleted file mode 100644 index ec73d0be348b..000000000000 --- a/dotnet/src/IntegrationTests/TemplateLanguage/tests.txt +++ /dev/null @@ -1,73 +0,0 @@ -# About this file: -# - The file is used by PromptTemplateEngineTests.cs to test SK template language. -# - By using a TXT file there is no ambiguity caused by C#/Python escaping syntax. -# - Empty lines and lines starting with "#" are ignored. -# - Lines are NOT trimmed, there might be empty spaces at the end on purpose. -# - The file contains multiple test cases. -# - Each test case consists of two lines: -# - line 1: the template to render -# - line 2: the expected result after rendering -# - If a template is invalid, line 2 contains the value "ERROR", e.g. a TemplateException is expected. - -"" -"" - -{} -{} - -{{}} -{{}} - -.{{asis}}. -.. - -a{{asis ''}}b -ab - -{{asis 'a'}} -a - -{{ asis 'foo' }} -foo - -# The second quote means the value is never closed, hiding the closing brackets -# turning the entire string as a static text -{{ asis 'foo\' }} -{{ asis 'foo\' }} - -{{ asis 'f\'11' }} -f'11 - -{{ asis "f\\\'22" }} -f\'22 - -# The last quote hides the closing }} -{{ call 'f\\'33" }} -{{ call 'f\\'33" }} - -# \ is escaped but the second quote is not, terminating the string -# After the string is terminated the token is invalid -{{ call 'f\\'x }} -ERROR - -# \ is escaped but the second quote is not, terminating the string -# After the string is terminated the token is invalid -{{ call 'f\\'xy }} -ERROR - -{{ "{{" }} and {{ "}}" }} x -{{ and }} x - - {{ " nothing special about these sequences: \ \0 \n \t \r \foo" }} - nothing special about these sequences: \ \0 \n \t \r \foo - -1{{ '\\' }}2 -1\2 - -# Even number of escaped \ -{{ "\\\\\\\\" }} -\\\\ - -# Odd number of escaped \ -{{ "\\\\\\\\\\" }} -\\\\\ diff --git a/dotnet/src/IntegrationTests/TestHelpers.cs b/dotnet/src/IntegrationTests/TestHelpers.cs index e48a925913e0..aa2497b9d5a2 100644 --- a/dotnet/src/IntegrationTests/TestHelpers.cs +++ b/dotnet/src/IntegrationTests/TestHelpers.cs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; using System.IO; +using System.Linq; using System.Reflection; using Microsoft.SemanticKernel; @@ -10,9 +10,9 @@ namespace SemanticKernel.IntegrationTests; internal static class TestHelpers { - internal static void ImportAllSamplePlugins(IKernel kernel) + internal static void ImportAllSamplePlugins(Kernel kernel) { - ImportSampleSemanticFunctions(kernel, "../../../../../../samples/plugins", + ImportSamplePromptFunctions(kernel, "../../../../../../samples/plugins", "ChatPlugin", "SummarizePlugin", "WriterPlugin", @@ -26,28 +26,17 @@ internal static void ImportAllSamplePlugins(IKernel kernel) "QAPlugin"); } - internal static void ImportAllSampleSkills(IKernel kernel) + internal static void ImportAllSampleSkills(Kernel kernel) { - ImportSampleSemanticFunctions(kernel, "../../../../../../samples/skills", - "ChatSkill", - "SummarizeSkill", - "WriterSkill", - "CalendarSkill", - "ChildrensBookSkill", - "ClassificationSkill", - "CodingSkill", - "FunSkill", - "IntentDetectionSkill", - "MiscSkill", - "QASkill"); + ImportSamplePromptFunctions(kernel, "./skills", "FunSkill"); } - internal static IDictionary ImportSamplePlugins(IKernel kernel, params string[] pluginNames) + internal static IReadOnlyKernelPluginCollection ImportSamplePlugins(Kernel kernel, params string[] pluginNames) { - return ImportSampleSemanticFunctions(kernel, "../../../../../../samples/plugins", pluginNames); + return ImportSamplePromptFunctions(kernel, "../../../../../../samples/plugins", pluginNames); } - internal static IDictionary ImportSampleSemanticFunctions(IKernel kernel, string path, params string[] pluginNames) + internal static IReadOnlyKernelPluginCollection ImportSamplePromptFunctions(Kernel kernel, string path, params string[] pluginNames) { string? currentAssemblyDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); if (string.IsNullOrWhiteSpace(currentAssemblyDirectory)) @@ -57,6 +46,8 @@ internal static IDictionary ImportSampleSemanticFunctions(I string parentDirectory = Path.GetFullPath(Path.Combine(currentAssemblyDirectory, path)); - return kernel.ImportSemanticFunctionsFromDirectory(parentDirectory, pluginNames); + return new KernelPluginCollection( + from pluginName in pluginNames + select kernel.ImportPluginFromPromptDirectory(Path.Combine(parentDirectory, pluginName))); } } diff --git a/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs index ad2b292d6ddb..d8663b240f55 100644 --- a/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs +++ b/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs @@ -12,17 +12,26 @@ internal sealed class AzureOpenAIConfiguration public string DeploymentName { get; set; } + public string ModelId { get; set; } + public string? ChatDeploymentName { get; set; } + public string ChatModelId { get; set; } + + public string EmbeddingModelId { get; set; } + public string Endpoint { get; set; } public string ApiKey { get; set; } - public AzureOpenAIConfiguration(string serviceId, string deploymentName, string endpoint, string apiKey, string? chatDeploymentName = null) + public AzureOpenAIConfiguration(string serviceId, string deploymentName, string endpoint, string apiKey, string? chatDeploymentName = null, string? modelId = null, string? chatModelId = null, string? embeddingModelId = null) { this.ServiceId = serviceId; this.DeploymentName = deploymentName; - this.ChatDeploymentName = chatDeploymentName; + this.ModelId = modelId ?? deploymentName; + this.ChatDeploymentName = deploymentName; + this.ChatModelId = chatModelId ?? deploymentName; + this.EmbeddingModelId = embeddingModelId ?? "text-embedding-ada-002"; this.Endpoint = endpoint; this.ApiKey = apiKey; } diff --git a/dotnet/src/IntegrationTests/WebPlugin/WebPluginTests.cs b/dotnet/src/IntegrationTests/WebPlugin/WebPluginTests.cs index 571078785f2c..00a8e8360f0a 100644 --- a/dotnet/src/IntegrationTests/WebPlugin/WebPluginTests.cs +++ b/dotnet/src/IntegrationTests/WebPlugin/WebPluginTests.cs @@ -1,13 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.IO; -using System.Threading.Tasks; using Microsoft.Extensions.Configuration; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Plugins.Web; -using Microsoft.SemanticKernel.Plugins.Web.Bing; using Xunit; using Xunit.Abstractions; @@ -38,52 +33,6 @@ public WebPluginTests(ITestOutputHelper output) this._bingApiKey = bingApiKeyCandidate; } - [Theory(Skip = "Bing search results not consistent enough for testing.")] - [InlineData("What is generally recognized as the tallest building in Seattle, Washington, USA?", "Columbia Center")] - public async Task BingPluginTestAsync(string prompt, string expectedAnswerContains) - { - // Arrange - IKernel kernel = new KernelBuilder().WithLoggerFactory(this._logger).Build(); - - using XunitLogger connectorLogger = new(this._output); - BingConnector connector = new(this._bingApiKey, connectorLogger); - Assert.NotEmpty(this._bingApiKey); - - WebSearchEnginePlugin plugin = new(connector); - var searchFunctions = kernel.ImportFunctions(plugin, "WebSearchEngine"); - - // Act - KernelResult result = await kernel.RunAsync( - prompt, - searchFunctions["Search"] - ); - - // Assert - Assert.Contains(expectedAnswerContains, result.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task WebFileDownloadPluginFileTestAsync() - { - // Arrange - IKernel kernel = new KernelBuilder().WithLoggerFactory(this._logger).Build(); - using XunitLogger pluginLogger = new(this._output); - var plugin = new WebFileDownloadPlugin(pluginLogger); - var downloadFunctions = kernel.ImportFunctions(plugin, "WebFileDownload"); - string fileWhereToSaveWebPage = Path.GetTempFileName(); - var contextVariables = new ContextVariables("https://www.microsoft.com"); - contextVariables.Set(WebFileDownloadPlugin.FilePathParamName, fileWhereToSaveWebPage); - - // Act - await kernel.RunAsync(contextVariables, downloadFunctions["DownloadToFile"]); - - // Assert - var fileInfo = new FileInfo(fileWhereToSaveWebPage); - Assert.True(fileInfo.Length > 0); - - File.Delete(fileWhereToSaveWebPage); - } - #region internals private readonly ITestOutputHelper _output; diff --git a/dotnet/src/IntegrationTests/prompts/GenerateStory.yaml b/dotnet/src/IntegrationTests/prompts/GenerateStory.yaml new file mode 100644 index 000000000000..fc5ecd88f34e --- /dev/null +++ b/dotnet/src/IntegrationTests/prompts/GenerateStory.yaml @@ -0,0 +1,17 @@ +name: GenerateStory +template: | + Tell a story about {{$topic}} that is {{$length}} sentences long. +template_format: semantic-kernel +description: A function that generates a story about a topic. +input_variables: + - name: topic + description: The topic of the story. + is_required: true + - name: length + description: The number of sentences in the story. + is_required: true +output_variable: + description: The generated story. +execution_settings: + default: + temperature: 0.6 diff --git a/dotnet/src/IntegrationTests/prompts/GenerateStoryHandlebars.yaml b/dotnet/src/IntegrationTests/prompts/GenerateStoryHandlebars.yaml new file mode 100644 index 000000000000..b1cb891fb706 --- /dev/null +++ b/dotnet/src/IntegrationTests/prompts/GenerateStoryHandlebars.yaml @@ -0,0 +1,23 @@ +name: GenerateStory +template: | + Tell a story about {{topic}} that is {{length}} sentences long. +template_format: handlebars +description: A function that generates a story about a topic. +input_variables: + - name: topic + description: The topic of the story. + is_required: true + - name: length + description: The number of sentences in the story. + is_required: true +output_variable: + description: The generated story. +execution_settings: + service1: + model_id: gpt-4 + temperature: 0.6 + service2: + model_id: gpt-3 + temperature: 0.4 + default: + temperature: 0.5 diff --git a/samples/skills/FunSkill/Joke/config.json b/dotnet/src/IntegrationTests/skills/FunSkill/Joke/config.json similarity index 100% rename from samples/skills/FunSkill/Joke/config.json rename to dotnet/src/IntegrationTests/skills/FunSkill/Joke/config.json diff --git a/samples/skills/FunSkill/Joke/skprompt.txt b/dotnet/src/IntegrationTests/skills/FunSkill/Joke/skprompt.txt similarity index 100% rename from samples/skills/FunSkill/Joke/skprompt.txt rename to dotnet/src/IntegrationTests/skills/FunSkill/Joke/skprompt.txt diff --git a/dotnet/src/IntegrationTests/testsettings.json b/dotnet/src/IntegrationTests/testsettings.json index 2b5e41c5cbd7..0b96ea45992e 100644 --- a/dotnet/src/IntegrationTests/testsettings.json +++ b/dotnet/src/IntegrationTests/testsettings.json @@ -18,7 +18,7 @@ }, "AzureOpenAIEmbeddings": { "ServiceId": "azure-text-embedding-ada-002", - "DeploymentName": "text-embedding-ada-002", + "DeploymentName": "ada-002", "Endpoint": "", "ApiKey": "" }, @@ -30,5 +30,22 @@ }, "Postgres": { "ConnectionString": "" + }, + "MongoDB": { + "ConnectionString": "", + "VectorSearchCollection": "dotnetMSKNearestTest.nearestSearch" + }, + "Planners": { + "AzureOpenAI": { + "ServiceId": "azure-gpt-35-turbo", + "DeploymentName": "gpt-35-turbo", + "Endpoint": "", + "ApiKey": "" + }, + "OpenAI": { + "ServiceId": "openai-gpt-4", + "ModelId": "gpt-4", + "ApiKey": "" + } } } \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/planning/Extensions/ChatHistoryExtensions.cs b/dotnet/src/InternalUtilities/planning/Extensions/ChatHistoryExtensions.cs new file mode 100644 index 000000000000..979b16807a9c --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Extensions/ChatHistoryExtensions.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Extension methods for class. +/// +internal static class ChatHistoryExtensions +{ + /// + /// Returns the number of tokens in the chat history. + /// + /// The chat history. + /// An additional message to include in the token count. + /// The index to start skipping messages. + /// The number of messages to skip. + /// The token counter to use. + internal static int GetTokenCount(this ChatHistory chatHistory, string? additionalMessage = null, int skipStart = 0, int skipCount = 0, TextChunker.TokenCounter? tokenCounter = null) + { + return tokenCounter is null ? + Default(chatHistory, additionalMessage, skipStart, skipCount) : + Custom(chatHistory, additionalMessage, skipStart, skipCount, tokenCounter); + + static int Default(ChatHistory chatHistory, string? additionalMessage, int skipStart, int skipCount) + { + int chars = 0; + bool prevMsg = false; + for (int i = 0; i < chatHistory.Count; i++) + { + if (i >= skipStart && i < skipStart + skipCount) + { + continue; + } + + chars += chatHistory[i].Content?.Length ?? 0; + + // +1 for "\n" if there was a previous message + if (prevMsg) + { + chars++; + } + prevMsg = true; + } + + if (additionalMessage is not null) + { + chars += 1 + additionalMessage.Length; // +1 for "\n" + } + + return chars / 4; // same as TextChunker's default token counter + } + + static int Custom(ChatHistory chatHistory, string? additionalMessage, int skipStart, int skipCount, TextChunker.TokenCounter tokenCounter) + { + var messages = string.Join("\n", chatHistory.Where((m, i) => i < skipStart || i >= skipStart + skipCount).Select(m => m.Content)); + + if (!string.IsNullOrEmpty(additionalMessage)) + { + messages = $"{messages}\n{additionalMessage}"; + } + + var tokenCount = tokenCounter(messages); + return tokenCount; + } + } +} diff --git a/dotnet/src/InternalUtilities/planning/Extensions/KernelFunctionMetadataExtensions.cs b/dotnet/src/InternalUtilities/planning/Extensions/KernelFunctionMetadataExtensions.cs new file mode 100644 index 000000000000..76d6e676e9a7 --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Extensions/KernelFunctionMetadataExtensions.cs @@ -0,0 +1,100 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Provides extension methods for the class. +/// +internal static class KernelFunctionMetadataExtensions +{ + private const string SuccessfulResponseCode = "200"; + private const string SuccessfulResponseDescription = "Success"; + + /// + /// Creates a for a function. + /// + /// The function. + /// Indicates if the schema should include information about the output or return type of the function. + /// The delimiter to use between the plugin name and the function name. + /// An instance of + public static JsonSchemaFunctionView ToJsonSchemaFunctionView(this KernelFunctionMetadata function, bool includeOutputSchema = true, string nameDelimiter = "-") + { + var functionView = new JsonSchemaFunctionView + { + Name = $"{function.PluginName}{nameDelimiter}{function.Name}", + Description = function.Description, + }; + + var requiredProperties = new List(); + foreach (var parameter in function.Parameters) + { + var schema = parameter.Schema; + if (schema is not null) + { + functionView.Parameters.Properties.Add(parameter.Name, schema); + } + if (parameter.IsRequired) + { + requiredProperties.Add(parameter.Name); + } + } + + if (includeOutputSchema) + { + var functionResponse = new JsonSchemaFunctionResponse + { + Description = SuccessfulResponseDescription + }; + + functionResponse.Content.JsonResponse.Schema = function.ReturnParameter.Schema; + + functionView.FunctionResponses.Add(SuccessfulResponseCode, functionResponse); + } + + functionView.Parameters.Required = requiredProperties; + return functionView; + } + + /// + /// Create a manual-friendly string for a function. + /// + /// The function to create a manual-friendly string for. + /// A manual-friendly string for a function. + internal static string ToManualString(this KernelFunctionMetadata function) + { + var inputs = string.Join("\n", function.Parameters.Select(parameter => + { + var defaultValueString = InternalTypeConverter.ConvertToString(parameter.DefaultValue); + defaultValueString = string.IsNullOrEmpty(defaultValueString) ? string.Empty : $" (default value: {defaultValueString})"; + return $" - {parameter.Name}: {parameter.Description}{defaultValueString}"; + })); + + // description and inputs are indented by 2 spaces + // While each parameter in inputs is indented by 4 spaces + return $"{function.ToFullyQualifiedName()}: description: {function.Description} inputs:{inputs}"; + } + + /// + /// Create a fully qualified name for a function. + /// + /// The function to create a fully qualified name for. + /// A fully qualified name for a function. + internal static string ToFullyQualifiedName(this KernelFunctionMetadata function) + { + return $"{function.PluginName}.{function.Name}"; + } + + /// + /// Create a string for generating an embedding for a function. + /// + /// The function to create a string for generating an embedding for. + /// A string for generating an embedding for a function. + internal static string ToEmbeddingString(this KernelFunctionMetadata function) + { + var inputs = string.Join("\n", function.Parameters.Select(p => $" - {p.Name}: {p.Description}")); + return $"{function.Name}:\n description: {function.Description}\n inputs:\n{inputs}"; + } +} diff --git a/dotnet/src/InternalUtilities/planning/Extensions/ReadOnlyFunctionCollectionPlannerExtensions.cs b/dotnet/src/InternalUtilities/planning/Extensions/ReadOnlyFunctionCollectionPlannerExtensions.cs new file mode 100644 index 000000000000..d053a70cd817 --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Extensions/ReadOnlyFunctionCollectionPlannerExtensions.cs @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Json.Schema; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Provides extension methods for the implementations for planners. +/// +internal static class ReadOnlyPluginCollectionPlannerExtensions +{ + internal const string PlannerMemoryCollectionName = "Planning.KernelFunctionsManual"; + + /// + /// Returns a function callback that can be used to retrieve a function from the function provider. + /// + /// The plugins. + /// A function callback that can be used to retrieve a function from the function provider. + internal static Func GetFunctionCallback(this IReadOnlyKernelPluginCollection plugins) + { + return (pluginName, functionName) => + { + plugins.TryGetFunction(pluginName, functionName, out var pluginFunction); + return pluginFunction; + }; + } + + /// + /// Returns a string containing the manual for all available functions. + /// + /// The plugins. + /// The planner options. + /// The semantic query for finding relevant registered functions + /// The logger to use for logging. + /// The to monitor for cancellation requests. The default is . + /// A string containing the manual for all available functions. + internal static async Task GetFunctionsManualAsync( + this IReadOnlyKernelPluginCollection plugins, + PlannerOptions plannerOptions, + string? semanticQuery = null, + ILogger? logger = null, + CancellationToken cancellationToken = default) + { + IEnumerable availableFunctions = await plugins.GetFunctionsAsync(plannerOptions, semanticQuery, logger, cancellationToken).ConfigureAwait(false); + + return string.Join("\n\n", availableFunctions.Select(x => x.ToManualString())); + } + + /// + /// Returns a string containing the manual for all available functions in a JSON Schema format. + /// + /// The plugins. + /// The planner options. + /// The semantic query for finding relevant registered functions + /// The logger to use for logging. + /// Indicates if the output or return type of the function should be included in the schema. + /// The delimiter to use between the plugin name and the function name. + /// The to monitor for cancellation requests. The default is . + /// A string containing the manual for all available functions. + internal static async Task GetJsonSchemaFunctionsManualAsync( + this IReadOnlyKernelPluginCollection plugins, + PlannerOptions plannerOptions, + string? semanticQuery = null, + ILogger? logger = null, + bool includeOutputSchema = true, + string nameDelimiter = "-", + CancellationToken cancellationToken = default) + { + IEnumerable availableFunctions = await plugins.GetFunctionsAsync(plannerOptions, semanticQuery, logger, cancellationToken).ConfigureAwait(false); + var manuals = availableFunctions.Select(x => x.ToJsonSchemaFunctionView(includeOutputSchema)); + return JsonSerializer.Serialize(manuals); + } + + /// + /// Returns a list of functions that are available to the user based on the semantic query and the excluded plugins and functions. + /// + /// The function provider. + /// The planner options. + /// The semantic query for finding relevant registered functions + /// The logger to use for logging. + /// The to monitor for cancellation requests. The default is . + /// A list of functions that are available to the user based on the semantic query and the excluded plugins and functions. + internal static async Task> GetFunctionsAsync( + this IReadOnlyKernelPluginCollection plugins, + PlannerOptions plannerOptions, + string? semanticQuery, + ILogger? logger, + CancellationToken cancellationToken) + { + return plannerOptions.GetAvailableFunctionsAsync is null ? + await plugins.GetAvailableFunctionsAsync(plannerOptions, semanticQuery, logger, cancellationToken).ConfigureAwait(false) : + await plannerOptions.GetAvailableFunctionsAsync(plannerOptions, semanticQuery, cancellationToken).ConfigureAwait(false); + } + + /// + /// Returns a list of functions that are available to the user based on the semantic query and the excluded plugins and functions. + /// + /// The function provider. + /// The planner options. + /// The semantic query for finding relevant registered functions + /// The logger to use for logging. + /// The to monitor for cancellation requests. The default is . + /// A list of functions that are available to the user based on the semantic query and the excluded plugins and functions. + internal static async Task> GetAvailableFunctionsAsync( + this IReadOnlyKernelPluginCollection plugins, + PlannerOptions plannerOptions, + string? semanticQuery = null, + ILogger? logger = null, + CancellationToken cancellationToken = default) + { + var functionsView = plugins.GetFunctionsMetadata(); + + var availableFunctions = functionsView + .Where(s => !plannerOptions.ExcludedPlugins.Contains(s.PluginName, StringComparer.OrdinalIgnoreCase) + && !plannerOptions.ExcludedFunctions.Contains(s.Name, StringComparer.OrdinalIgnoreCase)) + .ToList(); + + List? result = null; + var semanticMemoryConfig = plannerOptions.SemanticMemoryConfig; + if (string.IsNullOrEmpty(semanticQuery) || semanticMemoryConfig is null || semanticMemoryConfig.Memory is NullMemory) + { + // If no semantic query is provided, return all available functions. + // If a Memory provider has not been registered, return all available functions. + result = availableFunctions; + } + else + { + result = new List(); + + // Remember functions in memory so that they can be searched. + await RememberFunctionsAsync(semanticMemoryConfig.Memory, availableFunctions, cancellationToken).ConfigureAwait(false); + + // Search for functions that match the semantic query. + var memories = semanticMemoryConfig.Memory.SearchAsync( + PlannerMemoryCollectionName, + semanticQuery!, + semanticMemoryConfig.MaxRelevantFunctions, + semanticMemoryConfig.RelevancyThreshold ?? 0.0, + cancellationToken: cancellationToken); + + // Add functions that were found in the search results. + result.AddRange(await GetRelevantFunctionsAsync(availableFunctions, memories, logger ?? NullLogger.Instance, cancellationToken).ConfigureAwait(false)); + + // Add any missing functions that were included but not found in the search results. + var missingFunctions = semanticMemoryConfig.IncludedFunctions + .Except(result.Select(x => (x.PluginName, x.Name))!) + .Join(availableFunctions, f => f, af => (af.PluginName, af.Name), (_, af) => af); + + result.AddRange(missingFunctions); + } + + return result + .OrderBy(x => x.PluginName) + .ThenBy(x => x.Name); + } + + private static async Task> GetRelevantFunctionsAsync( + IEnumerable availableFunctions, + IAsyncEnumerable memories, + ILogger logger, + CancellationToken cancellationToken = default) + { + var relevantFunctions = new List(); + await foreach (var memoryEntry in memories.WithCancellation(cancellationToken)) + { + var function = availableFunctions.FirstOrDefault(x => x.ToFullyQualifiedName() == memoryEntry.Metadata.Id); + if (function != null) + { + if (logger.IsEnabled(LogLevel.Debug)) + { + logger.LogDebug("Found relevant function. Relevance Score: {0}, Function: {1}", memoryEntry.Relevance, function.ToFullyQualifiedName()); + } + + relevantFunctions.Add(function); + } + } + + return relevantFunctions; + } + + /// + /// Saves all available functions to memory. + /// + /// The memory provided to store the functions to. + /// The available functions to save. + /// The to monitor for cancellation requests. The default is . + private static async Task RememberFunctionsAsync( + ISemanticTextMemory memory, + List availableFunctions, + CancellationToken cancellationToken = default) + { + foreach (var function in availableFunctions) + { + var functionName = function.ToFullyQualifiedName(); + var key = functionName; + var description = string.IsNullOrEmpty(function.Description) ? functionName : function.Description; + var textToEmbed = function.ToEmbeddingString(); + + // It'd be nice if there were a saveIfNotExists method on the memory interface + var memoryEntry = await memory.GetAsync(collection: PlannerMemoryCollectionName, key: key, withEmbedding: false, + cancellationToken: cancellationToken).ConfigureAwait(false); + if (memoryEntry == null) + { + // TODO It'd be nice if the minRelevanceScore could be a parameter for each item that was saved to memory + // As folks may want to tune their functions to be more or less relevant. + // Memory now supports these such strategies. + await memory.SaveInformationAsync(collection: PlannerMemoryCollectionName, text: textToEmbed, id: key, description: description, + additionalMetadata: string.Empty, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + } +} diff --git a/dotnet/src/InternalUtilities/planning/PlannerInstrumentation.cs b/dotnet/src/InternalUtilities/planning/PlannerInstrumentation.cs new file mode 100644 index 000000000000..3d933aa242a2 --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/PlannerInstrumentation.cs @@ -0,0 +1,218 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.Planning; + +/// Surrounds the invocation of a planner with logging and metrics. +internal static partial class PlannerInstrumentation +{ + /// for planning-related activities. + private static readonly ActivitySource s_activitySource = new("Microsoft.SemanticKernel.Planning"); + + /// for planner-related metrics. + private static readonly Meter s_meter = new("Microsoft.SemanticKernel.Planning"); + + /// to record plan creation duration. + private static readonly Histogram s_createPlanDuration = s_meter.CreateHistogram( + name: "semantic_kernel.planning.create_plan.duration", + unit: "s", + description: "Duration time of plan creation."); + + /// to record plan execution duration. + private static readonly Histogram s_planExecutionDuration = s_meter.CreateHistogram( + name: "semantic_kernel.planning.invoke_plan.duration", + unit: "s", + description: "Duration time of plan execution."); + + /// Invokes the supplied delegate, surrounded by logging and metrics. + public static async Task CreatePlanAsync( + Func> createPlanAsync, + TPlanner planner, Kernel kernel, string goal, ILogger logger, CancellationToken cancellationToken) + where TPlanner : class + where TPlan : class + { + string plannerName = planner.GetType().FullName; + + using var activity = s_activitySource.StartActivity(plannerName); + + logger.LogCreatePlanStarted(); + logger.LogGoal(goal); + + TagList tags = new() { { "semantic_kernel.planner.name", plannerName } }; + long startingTimestamp = Stopwatch.GetTimestamp(); + try + { + var plan = await createPlanAsync(planner, kernel, goal, cancellationToken).ConfigureAwait(false); + logger.LogPlanCreated(); + logger.LogPlan(plan); + + return plan; + } + catch (Exception ex) + { + tags.Add("error.type", ex.GetType().FullName); + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + logger.LogCreatePlanError(ex, ex.Message); + throw; + } + finally + { + TimeSpan duration = new((long)((Stopwatch.GetTimestamp() - startingTimestamp) * (10_000_000.0 / Stopwatch.Frequency))); + logger.LogCreatePlanDuration(duration.TotalSeconds); + s_createPlanDuration.Record(duration.TotalSeconds, in tags); + } + } + + // Invokes the supplied delegate, surrounded by logging and metrics. + public static async Task InvokePlanAsync( + Func> InvokePlanAsync, + TPlan plan, Kernel kernel, TPlanInput? input, ILogger logger, CancellationToken cancellationToken) + where TPlan : class + where TPlanInput : class + where TPlanResult : class + { + string planName = plan.GetType().FullName; + using var activity = s_activitySource.StartActivity(planName); + + logger.LogInvokePlanStarted(); + + TagList tags = new() { { "semantic_kernel.plan.name", planName } }; + long startingTimestamp = Stopwatch.GetTimestamp(); + try + { + TPlanResult planResult = await InvokePlanAsync(plan, kernel, input, cancellationToken).ConfigureAwait(false); + + logger.LogInvokePlanSuccess(); + logger.LogPlanResult(planResult); + + return planResult; + } + catch (Exception ex) + { + tags.Add("error.type", ex.GetType().FullName); + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + logger.LogInvokePlanError(ex, ex.Message); + throw; + } + finally + { + TimeSpan duration = new((long)((Stopwatch.GetTimestamp() - startingTimestamp) * (10_000_000.0 / Stopwatch.Frequency))); + logger.LogInvokePlanDuration(duration.TotalSeconds); + s_planExecutionDuration.Record(duration.TotalSeconds, in tags); + } + } + + #region CreatePlan Logging helpers +#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Plan creation started.")] + static partial void LogCreatePlanStarted(this ILogger logger); + + [LoggerMessage( + EventId = 0, + Level = LogLevel.Trace, // Sensitive data, logging as trace, disabled by default + Message = "Goal: {Goal}")] + static partial void LogGoal(this ILogger logger, string goal); + + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Plan created.")] + static partial void LogPlanCreated(this ILogger logger); + + private static readonly Action s_logPlan = + LoggerMessage.Define( + logLevel: LogLevel.Trace, // Sensitive data, logging as trace, disabled by default + eventId: 0, + "Plan:\n{Plan}"); + private static void LogPlan(this ILogger logger, object plan) + { + if (logger.IsEnabled(LogLevel.Trace)) + { + try + { + var jsonString = JsonSerializer.Serialize(plan); + s_logPlan(logger, jsonString, null); + } + catch (NotSupportedException ex) + { + s_logPlan(logger, "Failed to serialize plan to Json", ex); + } + } + } + + [LoggerMessage( + EventId = 0, + Level = LogLevel.Error, + Message = "Plan creation failed. Error: {Message}")] + static partial void LogCreatePlanError(this ILogger logger, Exception exception, string message); + + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Plan creation duration: {Duration}s.")] + static partial void LogCreatePlanDuration(this ILogger logger, double duration); + + #endregion + + #region InvokePlan Logging helpers + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Plan execution started.")] + static partial void LogInvokePlanStarted(this ILogger logger); + + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Plan executed successfully.")] + static partial void LogInvokePlanSuccess(this ILogger logger); + + private static readonly Action s_logPlanResult = + LoggerMessage.Define( + logLevel: LogLevel.Trace, // Sensitive data, logging as trace, disabled by default + eventId: 0, + "Plan result: {Result}"); + + private static void LogPlanResult(this ILogger logger, object planResult) + { + if (logger.IsEnabled(LogLevel.Trace)) + { + try + { + var jsonString = planResult.GetType() == typeof(string) + ? planResult.ToString() + : JsonSerializer.Serialize(planResult); + s_logPlanResult(logger, jsonString, null); + } + catch (NotSupportedException ex) + { + s_logPlanResult(logger, "Failed to serialize plan result to Json", ex); + } + } + } + + [LoggerMessage( + EventId = 0, + Level = LogLevel.Error, + Message = "Plan execution failed. Error: {Message}")] + static partial void LogInvokePlanError(this ILogger logger, Exception exception, string message); + + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Plan execution duration: {Duration}s.")] + static partial void LogInvokePlanDuration(this ILogger logger, double duration); + +#pragma warning restore SYSLIB1006 // Multiple logging methods cannot use the same event id within a class + #endregion +} diff --git a/dotnet/src/InternalUtilities/planning/PlannerOptions.cs b/dotnet/src/InternalUtilities/planning/PlannerOptions.cs new file mode 100644 index 000000000000..463b9b5d032c --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/PlannerOptions.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Planner config with semantic memory +/// +public abstract class PlannerOptions +{ + /// + /// A list of plugins to exclude from the plan creation request. + /// + public HashSet ExcludedPlugins { get; } = new(); + + /// + /// A list of functions to exclude from the plan creation request. + /// + public HashSet ExcludedFunctions { get; } = new(); + + /// + /// Callback to get the available functions for planning (optional). + /// Use if you want to override the default function lookup behavior. + /// If set, this function takes precedence over . + /// Setting , will be used to filter the results. + /// + public Func>>? GetAvailableFunctionsAsync { get; set; } + + /// + /// Semantic Memory configuration, used to enable function filtering during plan creation. + /// + /// + /// This configuration will be ignored if GetAvailableFunctionsAsync is set. + /// + public SemanticMemoryConfig SemanticMemoryConfig { get; set; } = new(); +} diff --git a/dotnet/src/InternalUtilities/planning/PlanningUtilities.props b/dotnet/src/InternalUtilities/planning/PlanningUtilities.props new file mode 100644 index 000000000000..c773f759be88 --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/PlanningUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionContent.cs b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionContent.cs new file mode 100644 index 000000000000..2a0f6682130c --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionContent.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// A class to describe the content of a response/return type from an KernelFunctionFactory, in a JSON Schema friendly way. +/// +internal sealed class JsonSchemaFunctionContent +{ + /// + /// The JSON Schema for applivation/json responses. + /// + [JsonPropertyName("application/json")] + public JsonSchemaResponse JsonResponse { get; } = new JsonSchemaResponse(); +} diff --git a/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionParameters.cs b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionParameters.cs new file mode 100644 index 000000000000..6bd4438b28c1 --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionParameters.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// A class to describe the parameters of an KernelFunctionFactory in a JSON Schema friendly way. +/// +internal sealed class JsonSchemaFunctionParameters +{ + /// + /// The type of schema which is always "object" when describing function parameters. + /// + [JsonPropertyName("type")] + public string Type => "object"; + + /// + /// The list of required properties. + /// + [JsonPropertyName("required")] + public List Required { get; set; } = new List(); + + /// + /// A dictionary of properties name => JSON Schema. + /// + [JsonPropertyName("properties")] + public Dictionary Properties { get; set; } = new Dictionary(); +} diff --git a/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionResponse.cs b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionResponse.cs new file mode 100644 index 000000000000..7ae7e057a160 --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionResponse.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// A class for describing the reponse/return type of an KernelFunctionFactory in a JSON Schema friendly way. +/// +internal sealed class JsonSchemaFunctionResponse +{ + /// + /// The response description. + /// + [JsonPropertyName("description")] + public string Description { get; set; } = string.Empty; + + /// + /// The response content. + /// + [JsonPropertyName("content")] + public JsonSchemaFunctionContent Content { get; set; } = new JsonSchemaFunctionContent(); +} diff --git a/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionView.cs b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionView.cs new file mode 100644 index 000000000000..41f0d5ec8e7f --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaFunctionView.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// A class to describe an KernelFunctionFactory in a JSON Schema friendly way. +/// +internal sealed class JsonSchemaFunctionView +{ + /// + /// The function name. + /// + [JsonPropertyName("name")] + public string Name { get; set; } = string.Empty; + + /// + /// The function description. + /// + [JsonPropertyName("description")] + public string Description { get; set; } = string.Empty; + + /// + /// The function parameters. + /// + [JsonPropertyName("parameters")] + public JsonSchemaFunctionParameters Parameters { get; set; } = new JsonSchemaFunctionParameters(); + + /// + /// The function response. + /// + [JsonPropertyName("responses")] + public Dictionary FunctionResponses { get; set; } = new Dictionary(); +} diff --git a/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaResponse.cs b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaResponse.cs new file mode 100644 index 000000000000..0a9549dbb49b --- /dev/null +++ b/dotnet/src/InternalUtilities/planning/Schema/JsonSchemaResponse.cs @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// A class to describe the content schma of a response/return type from an KernelFunctionFactory, in a JSON Schema friendly way. +/// +internal sealed class JsonSchemaResponse +{ + /// + /// The JSON Schema + /// + [JsonPropertyName("schema")] + public KernelJsonSchema? Schema { get; set; } +} diff --git a/dotnet/src/Planners/Planners.Core/SemanticMemoryConfig.cs b/dotnet/src/InternalUtilities/planning/SemanticMemoryConfig.cs similarity index 90% rename from dotnet/src/Planners/Planners.Core/SemanticMemoryConfig.cs rename to dotnet/src/InternalUtilities/planning/SemanticMemoryConfig.cs index 2a6f8e5ff8fb..f7dfa8eab1d2 100644 --- a/dotnet/src/Planners/Planners.Core/SemanticMemoryConfig.cs +++ b/dotnet/src/InternalUtilities/planning/SemanticMemoryConfig.cs @@ -3,10 +3,7 @@ using System.Collections.Generic; using Microsoft.SemanticKernel.Memory; -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// Semantic memory configuration. diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs b/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs index 0f57cdfdb282..9ce82b36c05b 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/CompilerServicesAttributes.cs @@ -4,14 +4,14 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -#pragma warning disable IDE0130 // Namespace does not match folder structure -// ReSharper disable once CheckNamespace +using System.Diagnostics.CodeAnalysis; + namespace System.Runtime.CompilerServices; -#pragma warning restore IDE0130 #if !NETCOREAPP [AttributeUsage(AttributeTargets.Parameter, AllowMultiple = false, Inherited = false)] +[ExcludeFromCodeCoverage] internal sealed class CallerArgumentExpressionAttribute : Attribute { public CallerArgumentExpressionAttribute(string parameterName) diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ExceptionExtensions.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ExceptionExtensions.cs index dca7ebadaacc..d23173bdec09 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/ExceptionExtensions.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ExceptionExtensions.cs @@ -1,15 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using System.Threading; -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Exception namespace System; -#pragma warning restore IDE0130 /// /// Exception extension methods. /// +[ExcludeFromCodeCoverage] internal static class ExceptionExtensions { /// diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ExperimentalAttribute.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ExperimentalAttribute.cs new file mode 100644 index 000000000000..1332155b0d37 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ExperimentalAttribute.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This is a copy of: +// https://github.com/dotnet/runtime/blob/main/src/libraries/System.Private.CoreLib/src/System/Diagnostics/CodeAnalysis/ExperimentalAttribute.cs +// made internal rather than public. + +namespace System.Diagnostics.CodeAnalysis; + +#if !NET8_0_OR_GREATER +/// +/// Indicates that an API is experimental and it may change in the future. +/// +/// +/// This attribute allows call sites to be flagged with a diagnostic that indicates that an experimental +/// feature is used. Authors can use this attribute to ship preview features in their assemblies. +/// +[ExcludeFromCodeCoverage] +[AttributeUsage(AttributeTargets.Assembly | + AttributeTargets.Module | + AttributeTargets.Class | + AttributeTargets.Struct | + AttributeTargets.Enum | + AttributeTargets.Constructor | + AttributeTargets.Method | + AttributeTargets.Property | + AttributeTargets.Field | + AttributeTargets.Event | + AttributeTargets.Interface | + AttributeTargets.Delegate, Inherited = false)] +internal sealed class ExperimentalAttribute : Attribute +{ + /// + /// Initializes a new instance of the class, specifying the ID that the compiler will use + /// when reporting a use of the API the attribute applies to. + /// + /// The ID that the compiler will use when reporting a use of the API the attribute applies to. + public ExperimentalAttribute(string diagnosticId) + { + this.DiagnosticId = diagnosticId; + } + + /// + /// Gets the ID that the compiler will use when reporting a use of the API the attribute applies to. + /// + /// The unique diagnostic ID. + /// + /// The diagnostic ID is shown in build output for warnings and errors. + /// This property represents the unique ID that can be used to suppress the warnings or errors, if needed. + /// + public string DiagnosticId { get; } + + /// + /// Gets or sets the URL for corresponding documentation. + /// The API accepts a format string instead of an actual URL, creating a generic URL that includes the diagnostic ID. + /// + /// The format string that represents a URL to corresponding documentation. + /// An example format string is https://contoso.com/obsoletion-warnings/{0}. + public string? UrlFormat { get; set; } +} +#endif diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/HttpStatusCodeType.cs b/dotnet/src/InternalUtilities/src/Diagnostics/HttpStatusCodeType.cs deleted file mode 100644 index 01371aa6dd9a..000000000000 --- a/dotnet/src/InternalUtilities/src/Diagnostics/HttpStatusCodeType.cs +++ /dev/null @@ -1,342 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; - -namespace Microsoft.SemanticKernel.Diagnostics; - -/// -/// Contains the values of status codes defined for HTTP in the response to an HTTP request. -/// -[SuppressMessage("Design", "CA1069:Enums values should not be duplicated", Justification = "")] -internal enum HttpStatusCodeType -{ - /// - /// The server has received the request headers and the client should proceed to send the request body. - /// - Continue = 100, - - /// - /// The server is switching protocols according to the Upgrade header sent by the client. - /// - SwitchingProtocols = 101, - - /// - /// The server is processing the request, but has not completed it yet. - /// - Processing = 102, - - /// - /// The server is sending some hints about the response before the final status code. - /// - EarlyHints = 103, - - /// - /// The request has succeeded and the response contains the requested resource. - /// - OK = 200, - - /// - /// The request has been fulfilled and a new resource has been created. - /// - Created = 201, - - /// - /// The request has been accepted for further processing, but the processing has not been completed. - /// - Accepted = 202, - - /// - /// The server is returning a response from a different source than the requested one, but the response is still valid. - /// - NonAuthoritativeInformation = 203, - - /// - /// The request has been successfully processed, but the server does not need to return any content. - /// - NoContent = 204, - - /// - /// The server has fulfilled the request and the client should reset the document view. - /// - ResetContent = 205, - - /// - /// The server is returning a partial response to a range request. - /// - PartialContent = 206, - - /// - /// The server is returning a response that contains multiple status codes for different parts of the request. - /// - MultiStatus = 207, - - /// - /// The server has already reported the status of the request and does not need to repeat it. - /// - AlreadyReported = 208, - - /// - /// The server is returning a response that is the result of applying a delta encoding to the requested resource. - /// - IMUsed = 226, - - /// - /// The requested resource has multiple representations and the client should choose one of them. - /// - Ambiguous = 300, - - /// - /// The requested resource has multiple representations and the client should choose one of them. - /// - MultipleChoices = 300, - - /// - /// The requested resource has been permanently moved to a new location and the client should use the new URI. - /// - Moved = 301, - - /// - /// The requested resource has been permanently moved to a new location and the client should use the new URI. - /// - MovedPermanently = 301, - - /// - /// The requested resource has been temporarily moved to a new location and the client should use the new URI. - /// - Found = 302, - - /// - /// The requested resource has been temporarily moved to a new location and the client should use the new URI. - /// - Redirect = 302, - - /// - /// The requested resource can be found at a different URI and the client should use a GET method to retrieve it. - /// - RedirectMethod = 303, - - /// - /// The requested resource can be found at a different URI and the client should use a GET method to retrieve it. - /// - SeeOther = 303, - - /// - /// The requested resource has not been modified since the last request and the client can use the cached version. - /// - NotModified = 304, - - /// - /// The requested resource is only available through a proxy and the client should use the proxy URI. - /// - UseProxy = 305, - - /// - /// This status code is no longer used and is reserved for future use. - /// - Unused = 306, - - /// - /// The requested resource has been temporarily moved to a new location and the client should use the same method to access it. - /// - RedirectKeepVerb = 307, - - /// - /// The requested resource has been temporarily moved to a new location and the client should use the same method to access it. - /// - TemporaryRedirect = 307, - - /// - /// The requested resource has been permanently moved to a new location and the client should use the same method to access it. - /// - PermanentRedirect = 308, - - /// - /// The server cannot process the request due to a malformed syntax or an invalid parameter. - /// - BadRequest = 400, - - /// - /// The request requires authentication and the client should provide valid credentials. - /// - Unauthorized = 401, - - /// - /// The request requires payment and the client should provide valid payment information. - /// - PaymentRequired = 402, - - /// - /// The server has understood the request, but refuses to authorize it due to insufficient permissions or other reasons. - /// - Forbidden = 403, - - /// - /// The server cannot find the requested resource and the client should not repeat the request. - /// - NotFound = 404, - - /// - /// The server does not support the method used by the request and the client should use a different method. - /// - MethodNotAllowed = 405, - - /// - /// The server cannot produce a response that matches the preferences specified by the request headers. - /// - NotAcceptable = 406, - - /// - /// The request requires authentication through a proxy and the client should provide valid proxy credentials. - /// - ProxyAuthenticationRequired = 407, - - /// - /// The server did not receive the complete request within the time limit and the client should try again later. - /// - RequestTimeout = 408, - - /// - /// The request could not be completed due to a conflict with the current state of the resource. - /// - Conflict = 409, - - /// - /// The requested resource is no longer available and the server does not know the new location. - /// - Gone = 410, - - /// - /// The request requires a Content-Length header and the client should provide it. - /// - LengthRequired = 411, - - /// - /// The request does not meet the preconditions specified by the request headers and the server cannot process it. - /// - PreconditionFailed = 412, - - /// - /// The request entity is too large and the server cannot process it. - /// - RequestEntityTooLarge = 413, - - /// - /// The request URI is too long and the server cannot process it. - /// - RequestUriTooLong = 414, - - /// - /// The request entity has a media type that the server does not support or cannot handle. - /// - UnsupportedMediaType = 415, - - /// - /// The request specifies a range that the server cannot satisfy or is invalid. - /// - RequestedRangeNotSatisfiable = 416, - - /// - /// The request contains an Expect header that the server cannot meet or is invalid. - /// - ExpectationFailed = 417, - - /// - /// The request was directed to a server that is not able to produce a response. - /// - MisdirectedRequest = 421, - - /// - /// The request entity is well-formed, but cannot be processed by the server due to semantic errors. - /// - UnprocessableEntity = 422, - - /// - /// The requested resource is locked and the client should release it before modifying it. - /// - Locked = 423, - - /// - /// The request failed due to a dependency on another request that failed. - /// - FailedDependency = 424, - - /// - /// The request requires the server to upgrade to a different protocol and the client should use the Upgrade header to specify it. - /// - UpgradeRequired = 426, - - /// - /// The request requires the server to apply preconditions and the client should use the If-Match or If-Unmodified-Since headers to specify them. - /// - PreconditionRequired = 428, - - /// - /// The client has sent too many requests in a given time and the server rejects them to prevent overload. - /// - TooManyRequests = 429, - - /// - /// The request contains headers that are too large and the server cannot process them. - /// - RequestHeaderFieldsTooLarge = 431, - - /// - /// The server is denying access to the requested resource for legal reasons and the client should not repeat the request. - /// - UnavailableForLegalReasons = 451, - - /// - /// The server encountered an unexpected error and cannot fulfill the request. - /// - InternalServerError = 500, - - /// - /// The server does not support the functionality required by the request and the client should not repeat the request. - /// - NotImplemented = 501, - - /// - /// The server received an invalid response from an upstream server and cannot fulfill the request. - /// - BadGateway = 502, - - /// - /// The server is temporarily unavailable due to maintenance or overload and the client should try again later. - /// - ServiceUnavailable = 503, - - /// - /// The server did not receive a timely response from an upstream server and cannot fulfill the request. - /// - GatewayTimeout = 504, - - /// - /// The server does not support the HTTP version used by the request and the client should use a different version. - /// - HttpVersionNotSupported = 505, - - /// - /// The server has a configuration error and cannot negotiate a suitable representation for the requested resource. - /// - VariantAlsoNegotiates = 506, - - /// - /// The server has insufficient storage space to complete the request. - /// - InsufficientStorage = 507, - - /// - /// The server detected an infinite loop while processing the request. - /// - LoopDetected = 508, - - /// - /// The request requires additional extensions that the server does not support or cannot handle. - /// - NotExtended = 510, - - /// - /// The request requires authentication at the network level and the client should provide valid network credentials. - /// - NetworkAuthenticationRequired = 511, -} diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs b/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs index 43f0c25312d8..5b34b2d75c1a 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/IsExternalInit.cs @@ -1,14 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.ComponentModel; - namespace System.Runtime.CompilerServices; /// /// Reserved to be used by the compiler for tracking metadata. /// This class should not be used by developers in source code. /// -[EditorBrowsable(EditorBrowsableState.Never)] internal static class IsExternalInit { } diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/NullableAttributes.cs b/dotnet/src/InternalUtilities/src/Diagnostics/NullableAttributes.cs index c63b1cb2979e..34f0de31ec3c 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/NullableAttributes.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/NullableAttributes.cs @@ -7,10 +7,7 @@ // This was copied from https://github.com/dotnet/runtime/blob/39b9607807f29e48cae4652cd74735182b31182e/src/libraries/System.Private.CoreLib/src/System/Diagnostics/CodeAnalysis/NullableAttributes.cs // and updated to have the scope of the attributes be internal. -#pragma warning disable IDE0130 // Namespace does not match folder structure -// ReSharper disable once CheckNamespace namespace System.Diagnostics.CodeAnalysis; -#pragma warning restore IDE0130 #if !NETCOREAPP @@ -39,6 +36,7 @@ internal sealed class NotNullAttribute : Attribute } /// Specifies that when a method returns , the parameter may be null even if the corresponding type disallows it. +[ExcludeFromCodeCoverage] [AttributeUsage(AttributeTargets.Parameter, Inherited = false)] internal sealed class MaybeNullWhenAttribute : Attribute { @@ -53,6 +51,7 @@ internal sealed class MaybeNullWhenAttribute : Attribute } /// Specifies that when a method returns , the parameter will not be null even if the corresponding type allows it. +[ExcludeFromCodeCoverage] [AttributeUsage(AttributeTargets.Parameter, Inherited = false)] internal sealed class NotNullWhenAttribute : Attribute { @@ -67,6 +66,7 @@ internal sealed class NotNullWhenAttribute : Attribute } /// Specifies that the output will be non-null if the named parameter is non-null. +[ExcludeFromCodeCoverage] [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.ReturnValue, AllowMultiple = true, Inherited = false)] internal sealed class NotNullIfNotNullAttribute : Attribute { @@ -87,6 +87,7 @@ internal sealed class DoesNotReturnAttribute : Attribute } /// Specifies that the method will not return if the associated Boolean parameter is passed the specified value. +[ExcludeFromCodeCoverage] [AttributeUsage(AttributeTargets.Parameter, Inherited = false)] internal sealed class DoesNotReturnIfAttribute : Attribute { @@ -106,6 +107,7 @@ internal sealed class DoesNotReturnIfAttribute : Attribute #if !NETCOREAPP || NETCOREAPP3_1 /// Specifies that the method or property will ensure that the listed field and property members have not-null values. +[ExcludeFromCodeCoverage] [AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, Inherited = false, AllowMultiple = true)] internal sealed class MemberNotNullAttribute : Attribute { @@ -127,6 +129,7 @@ internal sealed class MemberNotNullAttribute : Attribute } /// Specifies that the method or property will ensure that the listed field and property members have not-null values when returning with the specified return value condition. +[ExcludeFromCodeCoverage] [AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, Inherited = false, AllowMultiple = true)] internal sealed class MemberNotNullWhenAttribute : Attribute { diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs index c19bfa19c7a0..c8e754c40c08 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs @@ -8,8 +8,9 @@ using System.Runtime.CompilerServices; using System.Text.RegularExpressions; -namespace Microsoft.SemanticKernel.Diagnostics; +namespace Microsoft.SemanticKernel; +[ExcludeFromCodeCoverage] internal static class Verify { private static readonly Regex s_asciiLettersDigitsUnderscoresRegex = new("^[0-9A-Za-z_]*$"); @@ -36,27 +37,63 @@ internal static void NotNullOrWhiteSpace([NotNull] string? str, [CallerArgumentE } } - internal static void ValidPluginName([NotNull] string? pluginName) + internal static void NotNullOrEmpty(IList list, [CallerArgumentExpression("list")] string? paramName = null) + { + NotNull(list, paramName); + if (list.Count == 0) + { + throw new ArgumentException("The value cannot be empty.", paramName); + } + } + + public static void True(bool condition, string message, [CallerArgumentExpression("condition")] string? paramName = null) + { + if (!condition) + { + throw new ArgumentException(message, paramName); + } + } + + internal static void ValidPluginName([NotNull] string? pluginName, IReadOnlyKernelPluginCollection? plugins = null, [CallerArgumentExpression("pluginName")] string? paramName = null) { NotNullOrWhiteSpace(pluginName); if (!s_asciiLettersDigitsUnderscoresRegex.IsMatch(pluginName)) { - ThrowInvalidName("plugin name", pluginName); + ThrowArgumentInvalidName("plugin name", pluginName, paramName); } - } - internal static void ValidFunctionName([NotNull] string? functionName) => - ValidName(functionName, "function name"); + if (plugins is not null && plugins.Contains(pluginName)) + { + throw new ArgumentException($"A plugin with the name '{pluginName}' already exists."); + } + } - internal static void ValidFunctionParamName([NotNull] string? functionParamName) => - ValidName(functionParamName, "function parameter name"); + internal static void ValidFunctionName([NotNull] string? functionName, [CallerArgumentExpression("functionName")] string? paramName = null) + { + NotNullOrWhiteSpace(functionName); + if (!s_asciiLettersDigitsUnderscoresRegex.IsMatch(functionName)) + { + ThrowArgumentInvalidName("function name", functionName, paramName); + } + } - private static void ValidName([NotNull] string? name, string kind) + public static void ValidateUrl(string url, bool allowQuery = false, [CallerArgumentExpression("url")] string? paramName = null) { - NotNullOrWhiteSpace(name); - if (!s_asciiLettersDigitsUnderscoresRegex.IsMatch(name)) + NotNullOrWhiteSpace(url, paramName); + + if (!Uri.TryCreate(url, UriKind.Absolute, out var uri) || string.IsNullOrEmpty(uri.Host)) + { + throw new ArgumentException($"The `{url}` is not valid.", paramName); + } + + if (!allowQuery && !string.IsNullOrEmpty(uri.Query)) + { + throw new ArgumentException($"The `{url}` is not valid: it cannot contain query parameters.", paramName); + } + + if (!string.IsNullOrEmpty(uri.Fragment)) { - ThrowInvalidName(kind, name); + throw new ArgumentException($"The `{url}` is not valid: it cannot contain URL fragments.", paramName); } } @@ -83,7 +120,7 @@ internal static void DirectoryExists(string path) /// Make sure every function parameter name is unique /// /// List of parameters - internal static void ParametersUniqueness(IReadOnlyList parameters) + internal static void ParametersUniqueness(IReadOnlyList parameters) { int count = parameters.Count; if (count > 0) @@ -91,7 +128,7 @@ internal static void ParametersUniqueness(IReadOnlyList parameter var seen = new HashSet(StringComparer.OrdinalIgnoreCase); for (int i = 0; i < count; i++) { - ParameterView p = parameters[i]; + KernelParameterMetadata p = parameters[i]; if (string.IsNullOrWhiteSpace(p.Name)) { string paramName = $"{nameof(parameters)}[{i}].{p.Name}"; @@ -107,15 +144,15 @@ internal static void ParametersUniqueness(IReadOnlyList parameter if (!seen.Add(p.Name)) { - throw new SKException($"The function has two or more parameters with the same name '{p.Name}'"); + throw new ArgumentException($"The function has two or more parameters with the same name '{p.Name}'"); } } } } [DoesNotReturn] - private static void ThrowInvalidName(string kind, string name) => - throw new SKException($"A {kind} can contain only ASCII letters, digits, and underscores: '{name}' is not a valid name."); + private static void ThrowArgumentInvalidName(string kind, string name, string? paramName) => + throw new ArgumentException($"A {kind} can contain only ASCII letters, digits, and underscores: '{name}' is not a valid name.", paramName); [DoesNotReturn] internal static void ThrowArgumentNullException(string? paramName) => diff --git a/dotnet/src/InternalUtilities/src/Http/HttpClientExtensions.cs b/dotnet/src/InternalUtilities/src/Http/HttpClientExtensions.cs index 964bef838399..638a0026bb96 100644 --- a/dotnet/src/InternalUtilities/src/Http/HttpClientExtensions.cs +++ b/dotnet/src/InternalUtilities/src/Http/HttpClientExtensions.cs @@ -1,11 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Diagnostics.CodeAnalysis; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; +namespace Microsoft.SemanticKernel.Http; + +[ExcludeFromCodeCoverage] internal static class HttpClientExtensions { /// @@ -23,27 +27,34 @@ internal static class HttpClientExtensions internal static async Task SendWithSuccessCheckAsync(this HttpClient client, HttpRequestMessage request, HttpCompletionOption completionOption, CancellationToken cancellationToken) { HttpResponseMessage? response = null; - try { response = await client.SendAsync(request, completionOption, cancellationToken).ConfigureAwait(false); - - response.EnsureSuccessStatusCode(); - - return response; } catch (HttpRequestException e) { - string? responseContent = null; + throw new HttpOperationException(HttpStatusCode.BadRequest, null, e.Message, e); + } + if (!response.IsSuccessStatusCode) + { + string? responseContent = null; try { + // On .NET Framework, EnsureSuccessStatusCode disposes of the response content; + // that was changed years ago in .NET Core, but for .NET Framework it means in order + // to read the response content in the case of failure, that has to be + // done before calling EnsureSuccessStatusCode. responseContent = await response!.Content.ReadAsStringAsync().ConfigureAwait(false); + response.EnsureSuccessStatusCode(); // will always throw + } + catch (Exception e) + { + throw new HttpOperationException(response.StatusCode, responseContent, e.Message, e); } - catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead. - - throw new HttpOperationException(response?.StatusCode ?? HttpStatusCode.BadRequest, responseContent, e.Message, e); } + + return response; } /// diff --git a/dotnet/src/InternalUtilities/src/Http/HttpClientProvider.cs b/dotnet/src/InternalUtilities/src/Http/HttpClientProvider.cs index 8acff7db5e04..d11b6dfa8641 100644 --- a/dotnet/src/InternalUtilities/src/Http/HttpClientProvider.cs +++ b/dotnet/src/InternalUtilities/src/Http/HttpClientProvider.cs @@ -1,30 +1,72 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Diagnostics.CodeAnalysis; using System.Net.Http; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Http; +using Microsoft.Extensions.DependencyInjection; + +#pragma warning disable CA2215 // Dispose methods should call base class dispose + +namespace Microsoft.SemanticKernel.Http; /// /// Provides functionality for retrieving instances of HttpClient. /// +[ExcludeFromCodeCoverage] internal static class HttpClientProvider { /// /// Retrieves an instance of HttpClient. /// - /// The to be used when the HttpClient is not provided already - /// An optional pre-existing instance of HttpClient. - /// The to use for logging. If null, no logging will be performed. /// An instance of HttpClient. - public static HttpClient GetHttpClient(IDelegatingHandlerFactory httpHandlerFactory, HttpClient? httpClient, ILoggerFactory? loggerFactory) + public static HttpClient GetHttpClient() => new(NonDisposableHttpClientHandler.Instance, disposeHandler: false); + + /// + /// Retrieves an instance of HttpClient. + /// + /// An instance of HttpClient. + public static HttpClient GetHttpClient(HttpClient? httpClient = null) => httpClient ?? GetHttpClient(); + + /// + /// Retrieves an instance of HttpClient. + /// + /// An instance of HttpClient. + public static HttpClient GetHttpClient(IServiceProvider? serviceProvider = null) => GetHttpClient(serviceProvider?.GetService()); + + /// + /// Retrieves an instance of HttpClient. + /// + /// An instance of HttpClient. + public static HttpClient GetHttpClient(HttpClient? httpClient, IServiceProvider serviceProvider) => httpClient ?? GetHttpClient(serviceProvider?.GetService()); + + /// + /// Represents a singleton implementation of that is not disposable. + /// + private sealed class NonDisposableHttpClientHandler : HttpClientHandler { - if (httpClient is null) + /// + /// Private constructor to prevent direct instantiation of the class. + /// + private NonDisposableHttpClientHandler() { - var providedHttpHandler = httpHandlerFactory.Create(loggerFactory); - providedHttpHandler.InnerHandler = NonDisposableHttpClientHandler.Instance; - return new HttpClient(providedHttpHandler, false); // We should refrain from disposing the underlying SK default HttpClient handler as it would impact other HTTP clients that utilize the same handler. + this.CheckCertificateRevocationList = true; } - return httpClient; + /// + /// Gets the singleton instance of . + /// + public static NonDisposableHttpClientHandler Instance { get; } = new(); + + /// + /// Disposes the underlying resources held by the . + /// This implementation does nothing to prevent unintended disposal, as it may affect all references. + /// + /// True if called from , false if called from a finalizer. + protected override void Dispose(bool disposing) + { + // Do nothing if called explicitly from Dispose, as it may unintentionally affect all references. + // The base.Dispose(disposing) is not called to avoid invoking the disposal of HttpClientHandler resources. + // This implementation assumes that the HttpClientHandler is being used as a singleton and should not be disposed directly. + } } } diff --git a/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs b/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs index c6f8ea5aa39f..917e8d3af6aa 100644 --- a/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs +++ b/dotnet/src/InternalUtilities/src/Http/HttpContentExtensions.cs @@ -1,13 +1,16 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Diagnostics.CodeAnalysis; using System.IO; using System.Net.Http; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; + +namespace Microsoft.SemanticKernel.Http; /// /// Provides extension methods for working with HTTP content in a way that translates HttpRequestExceptions into HttpOperationExceptions. /// +[ExcludeFromCodeCoverage] internal static class HttpContentExtensions { /// diff --git a/dotnet/src/InternalUtilities/src/Http/HttpHeaderValues.cs b/dotnet/src/InternalUtilities/src/Http/HttpHeaderValues.cs new file mode 100644 index 000000000000..9f918c6c0dc3 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Http/HttpHeaderValues.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel.Http; + +/// Provides HTTP header values for common purposes. +[ExcludeFromCodeCoverage] +internal static class HttpHeaderValues +{ + /// User agent string to use for all HTTP requests issued by Semantic Kernel. + public static string UserAgent => "Semantic-Kernel"; +} diff --git a/dotnet/src/InternalUtilities/src/Http/HttpRequest.cs b/dotnet/src/InternalUtilities/src/Http/HttpRequest.cs index 206c6e18ae77..d04afb58d594 100644 --- a/dotnet/src/InternalUtilities/src/Http/HttpRequest.cs +++ b/dotnet/src/InternalUtilities/src/Http/HttpRequest.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.Net.Http; using System.Net.Http.Headers; using System.Text; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel; +[ExcludeFromCodeCoverage] internal static class HttpRequest { private static readonly HttpMethod s_patchMethod = new("PATCH"); @@ -44,7 +46,7 @@ private static HttpRequestMessage CreateRequest(HttpMethod method, Uri url, obje { byte[] utf8Bytes = payload is string s ? Encoding.UTF8.GetBytes(s) : - JsonSerializer.SerializeToUtf8Bytes(payload, s_jsonSerializerOptions); + JsonSerializer.SerializeToUtf8Bytes(payload, JsonOptionsCache.Default); content = new ByteArrayContent(utf8Bytes); content.Headers.ContentType = new MediaTypeHeaderValue("application/json") { CharSet = "utf-8" }; @@ -52,13 +54,4 @@ private static HttpRequestMessage CreateRequest(HttpMethod method, Uri url, obje return content; } - - private static readonly JsonSerializerOptions s_jsonSerializerOptions = CreateSerializerOptions(); - - private static JsonSerializerOptions CreateSerializerOptions() - { - var jso = new JsonSerializerOptions(); - jso.Converters.Add(new ReadOnlyMemoryConverter()); - return jso; - } } diff --git a/dotnet/src/InternalUtilities/src/Http/NonDisposableHttpClientHandler.cs b/dotnet/src/InternalUtilities/src/Http/NonDisposableHttpClientHandler.cs deleted file mode 100644 index c2167f286937..000000000000 --- a/dotnet/src/InternalUtilities/src/Http/NonDisposableHttpClientHandler.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. -using System.Net.Http; - -/// -/// Represents a singleton implementation of that is not disposable. -/// -internal sealed class NonDisposableHttpClientHandler : HttpClientHandler -{ - /// - /// Private constructor to prevent direct instantiation of the class. - /// - private NonDisposableHttpClientHandler() - { - this.CheckCertificateRevocationList = true; - } - - /// - /// Gets the singleton instance of . - /// - public static NonDisposableHttpClientHandler Instance { get; } = new(); - - /// - /// Disposes the underlying resources held by the . - /// This implementation does nothing to prevent unintended disposal, as it may affect all references. - /// - /// True if called from , false if called from a finalizer. -#pragma warning disable CA2215 // Dispose methods should call base class dispose - protected override void Dispose(bool disposing) -#pragma warning restore CA2215 // Dispose methods should call base class dispose - { - // Do nothing if called explicitly from Dispose, as it may unintentionally affect all references. - // The base.Dispose(disposing) is not called to avoid invoking the disposal of HttpClientHandler resources. - // This implementation assumes that the HttpClientHandler is being used as a singleton and should not be disposed directly. - } -} diff --git a/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs b/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs index 8c6b081f7d03..844ae7e2f573 100644 --- a/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs +++ b/dotnet/src/InternalUtilities/src/Linq/AsyncEnumerable.cs @@ -1,12 +1,15 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; +using Microsoft.SemanticKernel; // Used for compatibility with System.Linq.Async Nuget pkg namespace System.Linq; +[ExcludeFromCodeCoverage] internal static class AsyncEnumerable { public static IAsyncEnumerable Empty() => EmptyAsyncEnumerable.Instance; @@ -113,15 +116,8 @@ public static async ValueTask CountAsync(this IAsyncEnumerable source /// The return type of this operator differs from the corresponding operator on IEnumerable in order to retain asynchronous behavior. public static ValueTask AnyAsync(this IAsyncEnumerable source, Func predicate, CancellationToken cancellationToken = default) { - if (source == null) - { - throw new ArgumentNullException(nameof(source)); - } - - if (predicate == null) - { - throw new ArgumentNullException(nameof(predicate)); - } + Verify.NotNull(source); + Verify.NotNull(predicate); return Core(source, predicate, cancellationToken); diff --git a/dotnet/src/InternalUtilities/src/System/EnvExtensions.cs b/dotnet/src/InternalUtilities/src/System/EnvExtensions.cs index 7edc707bdd22..05cb235d7140 100644 --- a/dotnet/src/InternalUtilities/src/System/EnvExtensions.cs +++ b/dotnet/src/InternalUtilities/src/System/EnvExtensions.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -#pragma warning disable IDE0130 // Namespace does not match folder structure -// ReSharper disable once CheckNamespace +using System.Diagnostics.CodeAnalysis; + namespace System; -#pragma warning restore IDE0130 +[ExcludeFromCodeCoverage] internal static class EnvExtensions { /// diff --git a/dotnet/src/InternalUtilities/src/System/InternalTypeConverter.cs b/dotnet/src/InternalUtilities/src/System/InternalTypeConverter.cs new file mode 100644 index 000000000000..bd92f686ab61 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/System/InternalTypeConverter.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides internal utility methods for converting types to strings with consideration for CultureInfo. +/// +[ExcludeFromCodeCoverage] +internal static class InternalTypeConverter +{ + /// + /// Converts the given object value to a string representation using the appropriate CultureInfo. + /// + /// The object to convert. + /// The CultureInfo to consider during conversion. + /// A string representation of the object value, considering the specified CultureInfo. + public static string? ConvertToString(object? value, CultureInfo? culture = null) + { + if (value == null) { return null; } + + var sourceType = value.GetType(); + + var converterDelegate = GetTypeToStringConverterDelegate(sourceType); + + return converterDelegate == null + ? value.ToString() + : converterDelegate(value, culture ?? CultureInfo.InvariantCulture); + } + + /// + /// Retrieves a type-to-string converter delegate for the specified source type. + /// + /// The source Type for which to retrieve the type-to-string converter delegate. + /// A Func delegate for converting the source type to a string, considering CultureInfo, or null if no suitable converter is found. + private static Func? GetTypeToStringConverterDelegate(Type sourceType) => + s_converters.GetOrAdd(sourceType, static sourceType => + { + // Strings just render as themselves. + if (sourceType == typeof(string)) + { + return (input, cultureInfo) => (string)input!; + } + + // Look up and use a type converter. + if (TypeConverterFactory.GetTypeConverter(sourceType) is TypeConverter converter && converter.CanConvertTo(typeof(string))) + { + return (input, cultureInfo) => + { + return converter.ConvertToString(context: null, cultureInfo, input); + }; + } + + return null; + }); + + /// Converter functions for converting types to strings. + private static readonly ConcurrentDictionary?> s_converters = new(); +} diff --git a/dotnet/src/InternalUtilities/src/System/NonNullCollection.cs b/dotnet/src/InternalUtilities/src/System/NonNullCollection.cs new file mode 100644 index 000000000000..ae9efbe969b9 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/System/NonNullCollection.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides a collection of non-null items. +/// +[ExcludeFromCodeCoverage] +[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "This class is an internal utility.")] +internal sealed class NonNullCollection : IList, IReadOnlyList +{ + /// + /// The underlying list of items. + /// + private readonly List _items; + + /// + /// Initializes a new instance of the class. + /// + public NonNullCollection() => this._items = new(); + + /// + /// Initializes a new instance of the class. + /// + /// The initial collection of items to populate this collection. + public NonNullCollection(IEnumerable items) + { + Verify.NotNull(items); + this._items = new(items); + } + + /// + /// Gets or sets the item at the specified index in the collection. + /// + /// The index of the item to get or set. + /// The item at the specified index. + /// is null. + /// The was not valid for this collection. + public T this[int index] + { + get => this._items[index]; + set + { + Verify.NotNull(value); + this._items[index] = value; + } + } + + /// + /// Gets the number of items in the collection. + /// + public int Count => this._items.Count; + + /// + /// Adds an item to the collection. + /// + /// The item to add. + /// is null. + public void Add(T item) + { + Verify.NotNull(item); + this._items.Add(item); + } + + /// + /// Removes all items from the collection. + /// + public void Clear() => this._items.Clear(); + + /// + /// Determines whether an item is in the collection. + /// + /// The item to locate. + /// True if the item is found in the collection; otherwise, false. + /// is null. + public bool Contains(T item) + { + Verify.NotNull(item); + return this._items.Contains(item); + } + + /// + /// Copies all of the items in the collection to an array, starting at the specified destination array index. + /// + /// The destination array into which the items should be copied. + /// The zero-based index into at which copying should begin. + /// is null. + /// The number of items in the collection is greater than the available space from to the end of . + /// is less than 0. + public void CopyTo(T[] array, int arrayIndex) => this._items.CopyTo(array, arrayIndex); + + /// + /// Searches for the specified item and returns the index of the first occurrence. + /// + /// The item to locate. + /// The index of the first found occurrence of the specified item; -1 if the item could not be found. + public int IndexOf(T item) + { + Verify.NotNull(item); + return this._items.IndexOf(item); + } + + /// + /// Inserts an item into the collection at the specified index. + /// + /// The index at which the item should be inserted. + /// The item to insert. + /// is null. + public void Insert(int index, T item) + { + Verify.NotNull(item); + this._items.Insert(index, item); + } + + /// + /// Removes the first occurrence of the specified item from the collection. + /// + /// The item to remove from the collection. + /// True if the item was successfully removed; false if it wasn't located in the collection. + /// is null. + public bool Remove(T item) + { + Verify.NotNull(item); + return this._items.Remove(item); + } + + /// + /// Removes the item at the specified index from the collection. + /// + /// The index of the item to remove. + public void RemoveAt(int index) => this._items.RemoveAt(index); + + bool ICollection.IsReadOnly => false; + + IEnumerator IEnumerable.GetEnumerator() => this._items.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => this._items.GetEnumerator(); +} diff --git a/dotnet/src/InternalUtilities/src/System/TypeConverterFactory.cs b/dotnet/src/InternalUtilities/src/System/TypeConverterFactory.cs new file mode 100644 index 000000000000..2049dda894af --- /dev/null +++ b/dotnet/src/InternalUtilities/src/System/TypeConverterFactory.cs @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; + +namespace Microsoft.SemanticKernel; + +/// +/// Factory for creating TypeConverter instances based on a provided type. +/// +[ExcludeFromCodeCoverage] +internal static class TypeConverterFactory +{ + /// + /// Returns a TypeConverter instance for the specified type. + /// + /// The Type of the object to convert. + /// A TypeConverter instance if a suitable converter is found, otherwise null. + internal static TypeConverter? GetTypeConverter(Type type) + { + // In an ideal world, this would use TypeDescriptor.GetConverter. However, that is not friendly to + // any form of ahead-of-time compilation, as it could end up requiring functionality that was trimmed. + // Instead, we just use a hard-coded set of converters for the types we know about and then also support + // types that are explicitly attributed with TypeConverterAttribute. + + if (type == typeof(string)) { return new StringConverter(); } + if (type == typeof(byte)) { return new ByteConverter(); } + if (type == typeof(sbyte)) { return new SByteConverter(); } + if (type == typeof(bool)) { return new BooleanConverter(); } + if (type == typeof(ushort)) { return new UInt16Converter(); } + if (type == typeof(short)) { return new Int16Converter(); } + if (type == typeof(char)) { return new CharConverter(); } + if (type == typeof(uint)) { return new UInt32Converter(); } + if (type == typeof(int)) { return new Int32Converter(); } + if (type == typeof(ulong)) { return new UInt64Converter(); } + if (type == typeof(long)) { return new Int64Converter(); } + if (type == typeof(float)) { return new SingleConverter(); } + if (type == typeof(double)) { return new DoubleConverter(); } + if (type == typeof(decimal)) { return new DecimalConverter(); } + if (type == typeof(TimeSpan)) { return new TimeSpanConverter(); } + if (type == typeof(DateTime)) { return new DateTimeConverter(); } + if (type == typeof(DateTimeOffset)) { return new DateTimeOffsetConverter(); } + if (type == typeof(Uri)) { return new UriTypeConverter(); } + if (type == typeof(Guid)) { return new GuidConverter(); } + if (type.IsEnum) { return new EnumConverter(type); } + + if (type.GetCustomAttribute() is TypeConverterAttribute tca && + Type.GetType(tca.ConverterTypeName, throwOnError: false) is Type converterType && + Activator.CreateInstance(converterType) is TypeConverter converter) + { + return converter; + } + + return null; + } +} diff --git a/dotnet/src/InternalUtilities/src/Text/Json.cs b/dotnet/src/InternalUtilities/src/Text/Json.cs deleted file mode 100644 index d85597ba3e1b..000000000000 --- a/dotnet/src/InternalUtilities/src/Text/Json.cs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json; - -namespace Microsoft.SemanticKernel.Text; - -internal static class Json -{ - internal static string Serialize(object? o) => JsonSerializer.Serialize(o, s_options); - - internal static byte[] SerializeToUtf8Bytes(object? o) => JsonSerializer.SerializeToUtf8Bytes(o, s_options); - - internal static T? Deserialize(string json) => JsonSerializer.Deserialize(json, s_options); - - #region private ================================================================================ - - private static readonly JsonSerializerOptions s_options = CreateOptions(); - - private static JsonSerializerOptions CreateOptions() - { - JsonSerializerOptions options = new() - { - WriteIndented = true, - MaxDepth = 20, - AllowTrailingCommas = true, - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - }; - - options.Converters.Add(new ReadOnlyMemoryConverter()); - - return options; - } - - #endregion -} diff --git a/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs b/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs new file mode 100644 index 000000000000..c8278b4b06e5 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; + +namespace Microsoft.SemanticKernel.Text; + +/// Caches common configurations of .\ +/// +/// All of the instances include a converter for . +/// Once the System.Text.Json package is upgraded to 8.0+, this will no longer be +/// necessary and the actual default can be used. +/// +[ExcludeFromCodeCoverage] +internal static class JsonOptionsCache +{ + /// Singleton for . + public static ReadOnlyMemoryConverter ReadOnlyMemoryConverter { get; } = new(); + + /// + /// Cached instance for reading and writing JSON using the default settings. + /// + public static JsonSerializerOptions Default { get; } = new() + { + Converters = { ReadOnlyMemoryConverter }, + }; + + /// + /// Cached instance for writing JSON with indentation. + /// + public static JsonSerializerOptions WriteIndented { get; } = new() + { + WriteIndented = true, + Converters = { ReadOnlyMemoryConverter }, + }; + + /// + /// Cached instance for reading JSON in a permissive way, + /// including support for trailing commas, case-insensitive property names, and comments. + /// + public static JsonSerializerOptions ReadPermissive { get; } = new() + { + AllowTrailingCommas = true, + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + Converters = { ReadOnlyMemoryConverter }, + }; +} diff --git a/dotnet/src/InternalUtilities/src/Text/ReadOnlyMemoryConverter.cs b/dotnet/src/InternalUtilities/src/Text/ReadOnlyMemoryConverter.cs index ecce02508e23..1a754aa5524e 100644 --- a/dotnet/src/InternalUtilities/src/Text/ReadOnlyMemoryConverter.cs +++ b/dotnet/src/InternalUtilities/src/Text/ReadOnlyMemoryConverter.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using System.Text.Json; using System.Text.Json.Serialization; @@ -12,6 +13,7 @@ namespace Microsoft.SemanticKernel.Text; // It should be removed once SK projects upgrade to System.Text.Json v8.0.0. /// Provides a converter for . +[ExcludeFromCodeCoverage] internal sealed class ReadOnlyMemoryConverter : JsonConverter> { /// An instance of a converter for float[] that all operations delegate to. diff --git a/dotnet/src/InternalUtilities/src/Text/StringExtensions.cs b/dotnet/src/InternalUtilities/src/Text/StringExtensions.cs deleted file mode 100644 index 8a6eb993bc5b..000000000000 --- a/dotnet/src/InternalUtilities/src/Text/StringExtensions.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; - -namespace Microsoft.SemanticKernel.Text; - -internal static class StringExtensions -{ - internal static string NormalizeLineEndings(this string src) - { -#if NET6_0_OR_GREATER - return src.ReplaceLineEndings("\n"); -#else - return src.Replace("\r\n", "\n"); -#endif - } - - public static bool IsNullOrEmpty([NotNullWhen(false)] this string? data) - { - return string.IsNullOrEmpty(data); - } - - public static bool IsNullOrWhitespace([NotNullWhen(false)] this string? data) - { - return string.IsNullOrWhiteSpace(data); - } -} diff --git a/dotnet/src/InternalUtilities/src/Type/TypeExtensions.cs b/dotnet/src/InternalUtilities/src/Type/TypeExtensions.cs new file mode 100644 index 000000000000..266ac9ac0b9a --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Type/TypeExtensions.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; + +namespace System; + +/// +/// Extensions methods for . +/// +[ExcludeFromCodeCoverage] +internal static class TypeExtensions +{ + /// + /// Tries to get the result type from a generic parameter. + /// + /// Return type. + /// The result type of the Nullable generic parameter. + /// true if the result type was successfully retrieved; otherwise, false. + /// TODO [@teresaqhoang]: Issue #4202 Cache Generic Types Extraction - Handlebars + public static bool TryGetGenericResultType(this Type? returnType, out Type resultType) + { + resultType = typeof(object); + if (returnType is null) + { + return false; + } + + if (returnType.IsGenericType) + { + Type genericTypeDef = returnType.GetGenericTypeDefinition(); + + if (genericTypeDef == typeof(Task<>) + || genericTypeDef == typeof(Nullable<>) + || genericTypeDef == typeof(ValueTask<>)) + { + resultType = returnType.GetGenericArguments()[0]; + } + else if (genericTypeDef == typeof(IEnumerable<>) + || genericTypeDef == typeof(IList<>) + || genericTypeDef == typeof(ICollection<>)) + { + resultType = typeof(List<>).MakeGenericType(returnType.GetGenericArguments()[0]); + } + else if (genericTypeDef == typeof(IDictionary<,>)) + { + Type[] genericArgs = returnType.GetGenericArguments(); + resultType = typeof(Dictionary<,>).MakeGenericType(genericArgs[0], genericArgs[1]); + } + + return true; + } + + return false; + } +} diff --git a/dotnet/src/InternalUtilities/test/FunctionHelpers.cs b/dotnet/src/InternalUtilities/test/FunctionHelpers.cs deleted file mode 100644 index 8a1c6638a357..000000000000 --- a/dotnet/src/InternalUtilities/test/FunctionHelpers.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; - -namespace SemanticKernel.UnitTests; - -/// Test helpers for working with native functions. -internal static class FunctionHelpers -{ - /// - /// Invokes a function on a plugin instance via the kernel. - /// - public static Task CallViaKernelAsync( - object pluginInstance, - string methodName, - params (string Name, object Value)[] variables) - { - var kernel = new KernelBuilder().Build(); - - IDictionary functions = kernel.ImportFunctions(pluginInstance); - - SKContext context = kernel.CreateNewContext(); - foreach ((string Name, object Value) pair in variables) - { - context.Variables.Set(pair.Name, pair.Value.ToString()); - } - - return kernel.RunAsync(context.Variables, functions[methodName]); - } -} diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Action/ActionPlannerTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Action/ActionPlannerTests.cs index d57414742986..328827d2c0ea 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/Action/ActionPlannerTests.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Action/ActionPlannerTests.cs @@ -1,18 +1,11 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Globalization; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; using Moq; using Xunit; -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel.Planners.Action.UnitTests; -#pragma warning restore IDE0130 // Namespace does not match folder structure +namespace Microsoft.SemanticKernel.Planning.Action.UnitTests; public sealed class ActionPlannerTests { @@ -20,11 +13,11 @@ public sealed class ActionPlannerTests public async Task ExtractsAndDeserializesWellFormedJsonFromPlannerResultAsync() { // Arrange - var plugins = this.CreateMockFunctionCollection(); + var plugins = this.CreatePluginCollection(); - var kernel = this.CreateMockKernelAndFunctionFlowWithTestString(ValidPlanString, plugins); + var kernel = this.CreateKernel(ValidPlanString, plugins); - var planner = new ActionPlanner(kernel.Object); + var planner = new ActionPlanner(kernel); // Act var plan = await planner.CreatePlanAsync("goal"); @@ -43,28 +36,29 @@ public async Task InvalidJsonThrowsAsync() // Arrange string invalidJsonString = "<>"; - var kernel = this.CreateMockKernelAndFunctionFlowWithTestString(invalidJsonString); + var kernel = this.CreateKernel(invalidJsonString); - var planner = new ActionPlanner(kernel.Object); + var planner = new ActionPlanner(kernel); // Act & Assert - await Assert.ThrowsAsync(() => planner.CreatePlanAsync("goal")); + await Assert.ThrowsAsync(() => planner.CreatePlanAsync("goal")); } [Fact] public void UsesPromptDelegateWhenProvided() { // Arrange - var kernel = new Mock(); - kernel.Setup(x => x.LoggerFactory).Returns(NullLoggerFactory.Instance); + var kernel = this.CreateKernel(string.Empty); + var getPromptTemplateMock = new Mock>(); + var config = new ActionPlannerConfig() { GetPromptTemplate = getPromptTemplateMock.Object }; // Act - var planner = new ActionPlanner(kernel.Object, config); + var planner = new ActionPlanner(kernel, config); // Assert getPromptTemplateMock.Verify(x => x(), Times.Once()); @@ -76,42 +70,42 @@ public async Task MalformedJsonThrowsAsync() // Arrange // Extra opening brace before rationale - string invalidJsonString = @"Here is a possible plan to accomplish the user intent: - -{ - ""plan"": { { - ""rationale"": ""the list contains a function that allows to list pull requests"", - ""function"": ""GitHubPlugin.PullsList"", - ""parameters"": { - ""owner"": ""microsoft"", - ""repo"": ""semantic-kernel"", - ""state"": ""open"" - } - } -} - -This plan uses the `GitHubPlugin.PullsList` function to list the open pull requests for the `semantic-kernel` repository owned by `microsoft`. The `state` parameter is set to `""open""` to filter the results to only show open pull requests. -"; - - var kernel = this.CreateMockKernelAndFunctionFlowWithTestString(invalidJsonString); - - var planner = new ActionPlanner(kernel.Object); + string invalidJsonString = + @"Here is a possible plan to accomplish the user intent: + { + ""plan"": { { + ""rationale"": ""the list contains a function that allows to list pull requests"", + ""function"": ""GitHubPlugin.PullsList"", + ""parameters"": { + ""owner"": ""microsoft"", + ""repo"": ""semantic-kernel"", + ""state"": ""open"" + } + } + } + + This plan uses the `GitHubPlugin.PullsList` function to list the open pull requests for the `semantic-kernel` repository owned by `microsoft`. The `state` parameter is set to `""open""` to filter the results to only show open pull requests."; + + var kernel = this.CreateKernel(invalidJsonString); + + var planner = new ActionPlanner(kernel); // Act & Assert - await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync("goal")); + await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync("goal")); } [Fact] - public async Task ListOfFunctionsIncludesNativeAndSemanticFunctionsAsync() + public async Task ListOfFunctionsIncludesNativeAndPromptFunctionsAsync() { // Arrange - var plugins = this.CreateMockFunctionCollection(); - var kernel = this.CreateMockKernelAndFunctionFlowWithTestString(ValidPlanString, plugins); - var planner = new ActionPlanner(kernel.Object); - var context = kernel.Object.CreateNewContext(); + var plugins = this.CreatePluginCollection(); + + var kernel = this.CreateKernel(ValidPlanString, plugins); + + var planner = new ActionPlanner(kernel); // Act - var result = await planner.ListOfFunctionsAsync("goal", context); + var result = await planner.ListOfFunctionsAsync("goal"); // Assert var expected = $"// Send an e-mail.{Environment.NewLine}email.SendEmail{Environment.NewLine}// List pull requests.{Environment.NewLine}GitHubPlugin.PullsList{Environment.NewLine}// List repositories.{Environment.NewLine}GitHubPlugin.RepoList{Environment.NewLine}"; @@ -122,15 +116,17 @@ public async Task ListOfFunctionsIncludesNativeAndSemanticFunctionsAsync() public async Task ListOfFunctionsExcludesExcludedPluginsAsync() { // Arrange - var plugins = this.CreateMockFunctionCollection(); - var kernel = this.CreateMockKernelAndFunctionFlowWithTestString(ValidPlanString, plugins); + var plugins = this.CreatePluginCollection(); + + var kernel = this.CreateKernel(ValidPlanString, plugins); + var config = new ActionPlannerConfig(); config.ExcludedPlugins.Add("GitHubPlugin"); - var planner = new ActionPlanner(kernel.Object, config: config); - var context = kernel.Object.CreateNewContext(); + + var planner = new ActionPlanner(kernel, config: config); // Act - var result = await planner.ListOfFunctionsAsync("goal", context); + var result = await planner.ListOfFunctionsAsync("goal"); // Assert var expected = $"// Send an e-mail.{Environment.NewLine}email.SendEmail{Environment.NewLine}"; @@ -141,113 +137,79 @@ public async Task ListOfFunctionsExcludesExcludedPluginsAsync() public async Task ListOfFunctionsExcludesExcludedFunctionsAsync() { // Arrange - var plugins = this.CreateMockFunctionCollection(); - var kernel = this.CreateMockKernelAndFunctionFlowWithTestString(ValidPlanString, plugins); + var plugins = this.CreatePluginCollection(); + + var kernel = this.CreateKernel(ValidPlanString, plugins); + var config = new ActionPlannerConfig(); config.ExcludedFunctions.Add("PullsList"); - var planner = new ActionPlanner(kernel.Object, config: config); - var context = kernel.Object.CreateNewContext(); + + var planner = new ActionPlanner(kernel, config: config); // Act - var result = await planner.ListOfFunctionsAsync("goal", context); + var result = await planner.ListOfFunctionsAsync("goal"); // Assert var expected = $"// Send an e-mail.{Environment.NewLine}email.SendEmail{Environment.NewLine}// List repositories.{Environment.NewLine}GitHubPlugin.RepoList{Environment.NewLine}"; Assert.Equal(expected, result); } - private Mock CreateMockKernelAndFunctionFlowWithTestString(string testPlanString, Mock? functions = null) + private Kernel CreateKernel(string testPlanString, KernelPluginCollection? plugins = null) { - if (functions is null) - { - functions = new Mock(); - functions.Setup(x => x.GetFunctionViews()).Returns(new List()); - } - var functionRunner = new Mock(); - var serviceProvider = new Mock(); - var serviceSelector = new Mock(); - var kernel = new Mock(); + plugins ??= new KernelPluginCollection(); - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(testPlanString), functions.Object); + var textResult = new Mock(); + textResult + .Setup(tr => tr.GetCompletionAsync(It.IsAny())) + .ReturnsAsync(testPlanString); - var context = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, functions: functions.Object); + var textGenerationResult = new List { textResult.Object }; - var mockFunctionFlowFunction = new Mock(); - mockFunctionFlowFunction.Setup(x => x.InvokeAsync( - It.IsAny(), - null, - default - )).Callback( - (c, s, ct) => c.Variables.Update("Hello world!") - ).Returns(() => Task.FromResult(new FunctionResult("FunctionName", "PluginName", returnContext, testPlanString))); + var textGeneration = new Mock(); + textGeneration + .Setup(tc => tc.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(textGenerationResult); - kernel.Setup(x => x.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(context); - kernel.Setup(x => x.Functions).Returns(functions.Object); - kernel.Setup(x => x.LoggerFactory).Returns(NullLoggerFactory.Instance); - - kernel.Setup(x => x.RegisterCustomFunction(It.IsAny())) - .Returns(mockFunctionFlowFunction.Object); + var serviceSelector = new Mock(); + serviceSelector + .Setup(ss => ss.SelectAIService(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns((textGeneration.Object, new PromptExecutionSettings())); - return kernel; - } + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(serviceSelector.Object); - // Method to create Mock objects - private static Mock CreateMockFunction(FunctionView functionView) - { - var mockFunction = new Mock(); - mockFunction.Setup(x => x.Describe()).Returns(functionView); - mockFunction.Setup(x => x.Name).Returns(functionView.Name); - mockFunction.Setup(x => x.PluginName).Returns(functionView.PluginName); - return mockFunction; + return new Kernel(serviceCollection.BuildServiceProvider(), plugins); } - private Mock CreateMockFunctionCollection() + private KernelPluginCollection CreatePluginCollection() { - var functions = new List<(string name, string pluginName, string description, bool isSemantic)>() + return new() { - ("SendEmail", "email", "Send an e-mail", false), - ("PullsList", "GitHubPlugin", "List pull requests", true), - ("RepoList", "GitHubPlugin", "List repositories", true), + new KernelPlugin("email", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "SendEmail", "Send an e-mail") + }), + new KernelPlugin("GitHubPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "PullsList", "List pull requests"), + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "RepoList", "List repositories") + }) }; - - var functionsView = new List(); - var plugins = new Mock(); - foreach (var (name, pluginName, description, isSemantic) in functions) - { - var functionView = new FunctionView(name, pluginName, description); - var mockFunction = CreateMockFunction(functionView); - functionsView.Add(functionView); - - mockFunction.Setup(x => - x.InvokeAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((context, settings, CancellationToken) => - { - context.Variables.Update("MOCK FUNCTION CALLED"); - return Task.FromResult(new FunctionResult(name, pluginName, context)); - }); - plugins.Setup(x => x.GetFunction(pluginName, name)) - .Returns(mockFunction.Object); - ISKFunction? outFunc = mockFunction.Object; - plugins.Setup(x => x.TryGetFunction(pluginName, name, out outFunc)).Returns(true); - } - - plugins.Setup(x => x.GetFunctionViews()).Returns(functionsView); - return plugins; } - private const string ValidPlanString = @"Here is a possible plan to accomplish the user intent: -{ - ""plan"":{ - ""rationale"": ""the list contains a function that allows to list pull requests"", - ""function"": ""GitHubPlugin.PullsList"", - ""parameters"": { - ""owner"": ""microsoft"", - ""repo"": ""semantic-kernel"", - ""state"": ""open"" + private const string ValidPlanString = + @"Here is a possible plan to accomplish the user intent: + { + ""plan"":{ + ""rationale"": ""the list contains a function that allows to list pull requests"", + ""function"": ""GitHubPlugin.PullsList"", + ""parameters"": { + ""owner"": ""microsoft"", + ""repo"": ""semantic-kernel"", + ""state"": ""open"" + } + } } - } -} -This plan uses the `GitHubPlugin.PullsList` function to list the open pull requests for the `semantic-kernel` repository owned by `microsoft`. The `state` parameter is set to `""open""` to filter the results to only show open pull requests."; + This plan uses the `GitHubPlugin.PullsList` function to list the open pull requests for the `semantic-kernel` repository owned by `microsoft`. The `state` parameter is set to `""open""` to filter the results to only show open pull requests."; } diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Extensions/ReadOnlyFunctionCollectionExtensionsTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Extensions/ReadOnlyFunctionCollectionExtensionsTests.cs index e755b93005b4..15b9a49cd050 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/Extensions/ReadOnlyFunctionCollectionExtensionsTests.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Extensions/ReadOnlyFunctionCollectionExtensionsTests.cs @@ -1,14 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; using Moq; using Xunit; -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel.Planners.UnitTests; -#pragma warning restore IDE0130 // Namespace does not match folder structure +namespace Microsoft.SemanticKernel.Planning.UnitTests; public class ReadOnlyFunctionCollectionExtensionsTests { @@ -34,11 +30,9 @@ private async IAsyncEnumerable GetAsyncEnumerableAsync(IEnumerable resu public async Task CanCallGetAvailableFunctionsWithNoFunctionsAsync(Type t) { // Arrange - var kernel = new Mock(); - - var variables = new ContextVariables(); - var functions = new FunctionCollection(); + var plugins = new KernelPluginCollection(); var cancellationToken = default(CancellationToken); + var kernel = new Kernel(new Mock().Object, plugins); // Arrange Mock Memory and Result var memory = new Mock(); @@ -57,17 +51,15 @@ public async Task CanCallGetAvailableFunctionsWithNoFunctionsAsync(Type t) x.SearchAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) .Returns(asyncEnumerable); - var functionRunner = new Mock(); - var serviceProvider = new Mock(); + var serviceProvider = new Mock(); var serviceSelector = new Mock(); // Arrange GetAvailableFunctionsAsync parameters - var context = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, variables); var config = InitializeConfig(t); var semanticQuery = "test"; // Act - var result = await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); + var result = await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); // Assert Assert.NotNull(result); @@ -78,7 +70,7 @@ public async Task CanCallGetAvailableFunctionsWithNoFunctionsAsync(Type t) config.SemanticMemoryConfig = new(); // Act - result = await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); + result = await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); // Assert Assert.NotNull(result); @@ -89,7 +81,7 @@ public async Task CanCallGetAvailableFunctionsWithNoFunctionsAsync(Type t) config.SemanticMemoryConfig = new() { Memory = memory.Object }; // Act - result = await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); + result = await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); // Assert Assert.NotNull(result); @@ -105,18 +97,22 @@ public async Task CanCallGetAvailableFunctionsWithNoFunctionsAsync(Type t) public async Task CanCallGetAvailableFunctionsWithFunctionsAsync(Type t) { // Arrange - var kernel = new Mock(); - var variables = new ContextVariables(); var cancellationToken = default(CancellationToken); - // Arrange FunctionView - var functionMock = new Mock(); - var functionView = new FunctionView("functionName", "pluginName", "description"); - var nativeFunctionView = new FunctionView("nativeFunctionName", "pluginName", "description"); - var functionsView = new List() { functionView, nativeFunctionView }; - // Arrange Mock Memory and Result - var functions = new Mock(); + var plugins = new KernelPluginCollection() + { + new KernelPlugin("pluginName", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "functionName", "description"), + KernelFunctionFactory.CreateFromMethod(() => { }, "nativeFunctionName", "description"), + }), + }; + var functionView = new KernelFunctionMetadata(plugins["pluginName"]["functionName"].Metadata) { PluginName = "pluginName" }; + var nativeFunctionView = new KernelFunctionMetadata(plugins["pluginName"]["nativeFunctionName"].Metadata) { PluginName = "pluginName" }; + + var kernel = new Kernel(new Mock().Object, plugins); + var memoryQueryResult = new MemoryQueryResult( new MemoryRecordMetadata( @@ -134,39 +130,33 @@ public async Task CanCallGetAvailableFunctionsWithFunctionsAsync(Type t) x.SearchAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) .Returns(asyncEnumerable); - functions.Setup(x => x.TryGetFunction(It.IsAny(), It.IsAny(), out It.Ref.IsAny)).Returns(true); - functions.Setup(x => x.GetFunction(It.IsAny(), It.IsAny())).Returns(functionMock.Object); - functions.Setup(x => x.GetFunctionViews()).Returns(functionsView); - - var functionRunner = new Mock(); - var serviceProvider = new Mock(); + var serviceProvider = new Mock(); var serviceSelector = new Mock(); // Arrange GetAvailableFunctionsAsync parameters - var context = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, variables, functions.Object); var config = InitializeConfig(t); var semanticQuery = "test"; // Act - var result = (await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken)).ToList(); + var result = (await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken)).ToList(); // Assert Assert.NotNull(result); Assert.Equal(2, result.Count); - Assert.Equal(functionView, result[0]); + Assert.Equivalent(functionView, result[0]); // Arrange update IncludedFunctions config.SemanticMemoryConfig = new() { Memory = memory.Object }; config.SemanticMemoryConfig.IncludedFunctions.UnionWith(new List<(string, string)> { ("pluginName", "nativeFunctionName") }); // Act - result = (await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery)).ToList(); + result = (await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery)).ToList(); // Assert Assert.NotNull(result); Assert.Equal(2, result.Count); // IncludedFunctions should be added to the result - Assert.Equal(functionView, result[0]); - Assert.Equal(nativeFunctionView, result[1]); + Assert.Equivalent(functionView, result[0]); + Assert.Equivalent(nativeFunctionView, result[1]); } [Theory] @@ -176,19 +166,23 @@ public async Task CanCallGetAvailableFunctionsWithFunctionsAsync(Type t) public async Task CanCallGetAvailableFunctionsWithFunctionsWithRelevancyAsync(Type t) { // Arrange - var kernel = new Mock(); - - var variables = new ContextVariables(); var cancellationToken = default(CancellationToken); - // Arrange FunctionView - var functionMock = new Mock(); - var functionView = new FunctionView("functionName", "pluginName", "description"); - var nativeFunctionView = new FunctionView("nativeFunctionName", "pluginName", "description"); - var functionsView = new List() { functionView, nativeFunctionView }; - // Arrange Mock Memory and Result - var functions = new Mock(); + var plugins = new KernelPluginCollection() + { + new KernelPlugin("pluginName", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "functionName", "description"), + KernelFunctionFactory.CreateFromMethod(() => { }, "nativeFunctionName", "description"), + }), + }; + + var kernel = new Kernel(new Mock().Object, plugins); + + var functionView = new KernelFunctionMetadata(plugins["pluginName"]["functionName"].Metadata) { PluginName = "pluginName" }; + var nativeFunctionView = new KernelFunctionMetadata(plugins["pluginName"]["nativeFunctionName"].Metadata) { PluginName = "pluginName" }; + var memoryQueryResult = new MemoryQueryResult( new MemoryRecordMetadata( @@ -206,39 +200,33 @@ public async Task CanCallGetAvailableFunctionsWithFunctionsWithRelevancyAsync(Ty x.SearchAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) .Returns(asyncEnumerable); - functions.Setup(x => x.TryGetFunction(It.IsAny(), It.IsAny(), out It.Ref.IsAny)).Returns(true); - functions.Setup(x => x.GetFunction(It.IsAny(), It.IsAny())).Returns(functionMock.Object); - functions.Setup(x => x.GetFunctionViews()).Returns(functionsView); - - var functionRunner = new Mock(); - var serviceProvider = new Mock(); + var serviceProvider = new Mock(); var serviceSelector = new Mock(); // Arrange GetAvailableFunctionsAsync parameters - var context = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, variables, functions.Object); var config = InitializeConfig(t); config.SemanticMemoryConfig = new() { RelevancyThreshold = 0.78, Memory = memory.Object }; var semanticQuery = "test"; // Act - var result = (await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken)).ToList(); + var result = (await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken)).ToList(); // Assert Assert.NotNull(result); Assert.Single(result); - Assert.Equal(functionView, result[0]); + Assert.Equivalent(functionView, result[0]); // Arrange update IncludedFunctions config.SemanticMemoryConfig.IncludedFunctions.UnionWith(new List<(string, string)> { ("pluginName", "nativeFunctionName") }); // Act - result = (await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery)).ToList(); + result = (await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery)).ToList(); // Assert Assert.NotNull(result); Assert.Equal(2, result.Count); // IncludedFunctions should be added to the result - Assert.Equal(functionView, result[0]); - Assert.Equal(nativeFunctionView, result[1]); + Assert.Equivalent(functionView, result[0]); + Assert.Equivalent(nativeFunctionView, result[1]); } [Theory] @@ -248,15 +236,14 @@ public async Task CanCallGetAvailableFunctionsWithFunctionsWithRelevancyAsync(Ty public async Task CanCallGetAvailableFunctionsAsyncWithDefaultRelevancyAsync(Type t) { // Arrange - var kernel = new Mock(); - var functionRunner = new Mock(); - var serviceProvider = new Mock(); + var serviceProvider = new Mock(); var serviceSelector = new Mock(); - var variables = new ContextVariables(); - var functions = new FunctionCollection(); + var plugins = new KernelPluginCollection(); var cancellationToken = default(CancellationToken); + var kernel = new Kernel(new Mock().Object, plugins); + // Arrange Mock Memory and Result var memory = new Mock(); var memoryQueryResult = @@ -276,13 +263,12 @@ public async Task CanCallGetAvailableFunctionsAsyncWithDefaultRelevancyAsync(Typ .Returns(asyncEnumerable); // Arrange GetAvailableFunctionsAsync parameters - var context = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, variables); var config = InitializeConfig(t); config.SemanticMemoryConfig = new() { RelevancyThreshold = 0.78, Memory = memory.Object }; var semanticQuery = "test"; // Act - var result = await context.Functions.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); + var result = await kernel.Plugins.GetAvailableFunctionsAsync(config, semanticQuery, null, cancellationToken); // Assert Assert.NotNull(result); diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Planners.Core.UnitTests.csproj b/dotnet/src/Planners/Planners.Core.UnitTests/Planners.Core.UnitTests.csproj index 0b772375dd48..8c75fc595bf6 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/Planners.Core.UnitTests.csproj +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Planners.Core.UnitTests.csproj @@ -13,6 +13,7 @@ + diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanSerializationTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanSerializationTests.cs new file mode 100644 index 000000000000..2e0ec9372a91 --- /dev/null +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanSerializationTests.cs @@ -0,0 +1,405 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Planning; +using Moq; +using Xunit; + +namespace Microsoft.SemanticKernel.Planners.UnitTests.Planning; + +public sealed class PlanSerializationTests +{ + private readonly Kernel _kernel = new(new Mock().Object); + + [Fact] + public void CanSerializePlan() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var expectedSteps = "\"steps\":[]"; + var plan = new Plan(goal); + + // Act + var serializedPlan = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan); + Assert.NotEmpty(serializedPlan); + Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); + Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void CanSerializePlanWithGoalAndSteps() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var expectedSteps = "\"steps\":[{"; + var function1 = KernelFunctionFactory.CreateFromMethod(() => true); + var function2 = KernelFunctionFactory.CreateFromMethod(() => true); + var plan = new Plan(goal, function1, function2); + + // Act + var serializedPlan = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan); + Assert.NotEmpty(serializedPlan); + Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); + Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void CanSerializePlanWithGoalAndSubPlans() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var expectedSteps = "\"steps\":[{"; + var plan = new Plan(goal, new Plan("Write a poem or joke"), new Plan("Send it in an e-mail to Kai")); + + // Act + var serializedPlan = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan); + Assert.NotEmpty(serializedPlan); + Assert.Contains($"\"description\":\"{goal}\"", serializedPlan, StringComparison.OrdinalIgnoreCase); + Assert.Contains("\"description\":\"Write a poem or joke\"", serializedPlan, StringComparison.OrdinalIgnoreCase); + Assert.Contains("\"description\":\"Send it in an e-mail to Kai\"", serializedPlan, StringComparison.OrdinalIgnoreCase); + Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void CanSerializePlanWithPlanStep() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + + // Arrange Mocks + var function = KernelFunctionFactory.CreateFromMethod(() => { }, "function"); + + plan.AddSteps(new Plan(function)); + + // Act + var serializedPlan = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan); + Assert.NotEmpty(serializedPlan); + Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); + + var deserializedPlan = Plan.FromJson(serializedPlan); + + Assert.NotNull(deserializedPlan); + Assert.Single(deserializedPlan.Steps); + Assert.Equal("function", deserializedPlan.Steps[0].Name); + } + + [Fact] + public void CanSerializePlanWithFunctionStep() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => { }, "function"); + + plan.AddSteps(function); + + // Act + var serializedPlan = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan); + Assert.NotEmpty(serializedPlan); + Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); + + var deserializedPlan = Plan.FromJson(serializedPlan); + + Assert.NotNull(deserializedPlan); + Assert.Single(deserializedPlan.Steps); + Assert.Equal("function", deserializedPlan.Steps[0].Name); + } + + [Fact] + public void CanSerializePlanWithFunctionSteps() + { + // Arrange// Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + + // Arrange + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "function1"); + + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "function2"); + + plan.AddSteps(function1, function2); + + // Act + var serializedPlan = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan); + Assert.NotEmpty(serializedPlan); + Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); + + var deserializedPlan = Plan.FromJson(serializedPlan); + + Assert.NotNull(deserializedPlan); + Assert.Equal(2, deserializedPlan.Steps.Count); + Assert.Equal("function1", deserializedPlan.Steps[0].Name); + Assert.Equal("function2", deserializedPlan.Steps[1].Name); + } + + [Fact] + public void CanSerializePlanWithSteps() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + + // Arrange + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "function1"); + + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "function2"); + + plan.AddSteps(new Plan(function1), new Plan(function2)); + + // Act + var serializedPlan = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan); + Assert.NotEmpty(serializedPlan); + } + + [Fact] + public async Task CanStepAndSerializePlanWithStepsAsync() + { + // Arrange + var plan = new Plan("Write a poem or joke and send it in an e-mail to Kai."); + + var function = KernelFunctionFactory.CreateFromMethod(() => { }, "function"); + + plan.AddSteps(function, function); + + var serializedPlan1 = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan1); + Assert.NotEmpty(serializedPlan1); + Assert.Contains("\"next_step_index\":0", serializedPlan1, StringComparison.OrdinalIgnoreCase); + + var result = await this._kernel.StepAsync("Some input", plan); + + // Act + var serializedPlan2 = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan2); + Assert.NotEmpty(serializedPlan2); + Assert.NotEqual(serializedPlan1, serializedPlan2); + Assert.Contains("\"next_step_index\":1", serializedPlan2, StringComparison.OrdinalIgnoreCase); + + result = await this._kernel.StepAsync(result); + var serializedPlan3 = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan3); + Assert.NotEmpty(serializedPlan3); + Assert.NotEqual(serializedPlan1, serializedPlan3); + Assert.NotEqual(serializedPlan2, serializedPlan3); + Assert.Contains("\"next_step_index\":2", serializedPlan3, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task CanStepAndSerializePlanWithStepsAndContextAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + var contextVariables = new ContextVariables(planInput); + contextVariables.Set("variables", "foo"); + + static string method(ContextVariables localVariables) + { + localVariables.TryGetValue("variables", out string? v); + return localVariables.Input + v; + }; + var function = KernelFunctionFactory.CreateFromMethod(method, "function", "description"); + + plan.AddSteps(function, function); + + plan = await this._kernel.StepAsync(contextVariables, plan); + + // Act + var serializedPlan1 = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan1); + Assert.NotEmpty(serializedPlan1); + Assert.Contains("\"next_step_index\":1", serializedPlan1, StringComparison.OrdinalIgnoreCase); + + // Act + contextVariables.Set("variables", "bar"); + contextVariables.Update(string.Empty); + plan = await this._kernel.StepAsync(contextVariables, plan); + + // Assert + Assert.NotNull(plan); + Assert.Equal($"{planInput}foobar", plan.State.ToString()); + + // Act + var serializedPlan2 = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan2); + Assert.NotEmpty(serializedPlan2); + Assert.NotEqual(serializedPlan1, serializedPlan2); + Assert.Contains("\"next_step_index\":2", serializedPlan2, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task CanStepAndSerializeAndDeserializePlanWithStepsAndContextAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + var plugins = new KernelPluginCollection(); + + static string method(ContextVariables localVariables) + { + localVariables.TryGetValue("variables", out string? v); + return localVariables.Input + v; + }; + var function = KernelFunctionFactory.CreateFromMethod(method, "function", "description"); + + plugins.Add(new KernelPlugin("pluginName", new[] { function })); + + plan.AddSteps(function, function); + + var serializedPlan = plan.ToJson(); + + var cv = new ContextVariables(planInput); + cv.Set("variables", "foo"); + plan = await this._kernel.StepAsync(cv, plan); + + // Act + var serializedPlan1 = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan1); + Assert.NotEmpty(serializedPlan1); + Assert.NotEqual(serializedPlan, serializedPlan1); + Assert.Contains("\"next_step_index\":1", serializedPlan1, StringComparison.OrdinalIgnoreCase); + + // Act + cv.Set("variables", "bar"); + cv.Update(string.Empty); + + plan = Plan.FromJson(serializedPlan1, plugins); + plan = await this._kernel.StepAsync(cv, plan); + + // Assert + Assert.NotNull(plan); + Assert.Equal($"{planInput}foobar", plan.State.ToString()); + + // Act + var serializedPlan2 = plan.ToJson(); + + // Assert + Assert.NotNull(serializedPlan2); + Assert.NotEmpty(serializedPlan2); + Assert.NotEqual(serializedPlan1, serializedPlan2); + Assert.Contains("\"next_step_index\":2", serializedPlan2, StringComparison.OrdinalIgnoreCase); + } + + [Theory] + [InlineData(false)] + [InlineData(true)] + public void CanDeserializePlan(bool requireFunctions) + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + + // Arrange + var plugins = new KernelPluginCollection(); + + var mockFunction = KernelFunctionFactory.CreateFromMethod((string input) => input + input, "functionName"); + plugins.Add(new KernelPlugin("test", new[] { mockFunction })); + + plan.AddSteps(new Plan("Step1", mockFunction), new Plan(mockFunction)); + + // Act + var serializedPlan = plan.ToJson(); + var deserializedPlan = Plan.FromJson(serializedPlan, plugins, requireFunctions); + + // Assert + Assert.NotNull(deserializedPlan); + Assert.Equal(goal, deserializedPlan.Description); + + Assert.Equal(string.Join(",", plan.Outputs), + string.Join(",", deserializedPlan.Outputs)); + Assert.Equal(string.Join(",", plan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}")), + string.Join(",", deserializedPlan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}"))); + Assert.Equal(string.Join(",", plan.State.Select(kv => $"{kv.Key}:{kv.Value}")), + string.Join(",", deserializedPlan.State.Select(kv => $"{kv.Key}:{kv.Value}"))); + + Assert.Equal(plan.Steps[0].Name, deserializedPlan.Steps[0].Name); + Assert.Equal(plan.Steps[1].Name, deserializedPlan.Steps[1].Name); + } + + [Theory] + [InlineData(false)] + [InlineData(true)] + public void DeserializeWithMissingFunctions(bool requireFunctions) + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var stepOutput = "Output: The input was: "; + var plan = new Plan(goal); + + // Arrange + var plugins = new KernelPluginCollection(); + + var variables = new ContextVariables(stepOutput); + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables localVariables) => + { + variables.Update(variables.Input + localVariables.Input); + }, "function"); + + plan.AddSteps(new Plan("Step1", function), new Plan(function)); + + var serializedPlan = plan.ToJson(); + + if (requireFunctions) + { + // Act + Assert + Assert.Throws(() => Plan.FromJson(serializedPlan, plugins)); + } + else + { + // Act + var deserializedPlan = Plan.FromJson(serializedPlan, plugins, requireFunctions); + + // Assert + Assert.NotNull(deserializedPlan); + Assert.Equal(goal, deserializedPlan.Description); + + Assert.Equal(string.Join(",", plan.Outputs), + string.Join(",", deserializedPlan.Outputs)); + Assert.Equal(string.Join(",", plan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}")), + string.Join(",", deserializedPlan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}"))); + Assert.Equal(string.Join(",", plan.State.Select(kv => $"{kv.Key}:{kv.Value}")), + string.Join(",", deserializedPlan.State.Select(kv => $"{kv.Key}:{kv.Value}"))); + + Assert.Equal(plan.Steps[0].Name, deserializedPlan.Steps[0].Name); + Assert.Equal(plan.Steps[1].Name, deserializedPlan.Steps[1].Name); + } + } +} diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanTests.cs new file mode 100644 index 000000000000..bbbb264263fc --- /dev/null +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanTests.cs @@ -0,0 +1,1123 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Events; +using Microsoft.SemanticKernel.Planning; +using Moq; +using Xunit; + +namespace Microsoft.SemanticKernel.Planners.UnitTests.Planning; + +public sealed class PlanTests +{ + [Fact] + public Task CanCreatePlanAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + // Act + var plan = new Plan(goal); + + // Assert + Assert.Equal(goal, plan.Description); + Assert.Empty(plan.Steps); + return Task.CompletedTask; + } + + [Fact] + public async Task CanExecutePlanWithContextAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var variables = new ContextVariables("Some input"); + + // Act + var result = await plan.InvokeAsync(kernel, variables); + + // Assert + Assert.NotNull(result); + Assert.Equal("Some input", variables.Input); + Assert.Null(result.GetValue()); + + plan = new Plan(goal); + // Act + variables.Update("other input"); + result = await plan.InvokeAsync(kernel, variables); + // Assert + Assert.NotNull(result); + Assert.Equal("other input", variables.Input); + Assert.Null(result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithPlanStepAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var actualInput = string.Empty; + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + actualInput = variables.Input; + return "fake result"; + }, "function"); + + plan.AddSteps(new Plan(function)); + + // Act + var result = await plan.InvokeAsync(kernel, planInput); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake result", result.GetValue()); + Assert.Equal(planInput, actualInput); + } + + [Fact] + public async Task CanExecutePlanWithFunctionStepAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal(planInput, variables.Input); + return "fake result"; + }, "function"); + + plan.AddSteps(function); + + // Act + var result = await plan.InvokeAsync(kernel, planInput); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake result", result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithFunctionStepsAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function1 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal(planInput, variables.Input); + return "fake result of function 1"; + }, "function1"); + + var function2 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal("fake result of function 1", variables.Input); + return "fake result of function2"; + }, "function2"); + + plan.AddSteps(function1, function2); + + // Act + var result = await plan.InvokeAsync(kernel, planInput); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake result of function2", result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithStepsAndFunctionAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function1 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal(planInput, variables.Input); + return "fake result of function 1"; + }, "function1"); + + var function2 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal("fake result of function 1", variables.Input); + return "fake result of function2"; + }, "function2"); + + plan.AddSteps(new Plan(function1), new Plan(function2)); + + // Act + var result = await plan.InvokeAsync(kernel, planInput); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake result of function2", result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithStepsAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function1 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal(planInput, variables.Input); + return "fake result of function 1"; + }, "function1"); + + var function2 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal("fake result of function 1", variables.Input); + return "fake result of function2"; + }, "function2"); + + plan.AddSteps(new Plan(function1), new Plan(function2)); + + // Act + var result = await plan.InvokeAsync(kernel, planInput); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake result of function2", result.GetValue()); + } + + [Fact] + public async Task CanStepPlanWithStepsAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function1 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal(planInput, variables.Input); + return "fake result of function 1"; + }, "function1"); + + var function2 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal("fake result of function 1", variables.Input); + return "fake result of function2"; + }, "function2"); + + plan.AddSteps(function1, function2); + + // Act + var result = await kernel.StepAsync(planInput, plan); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake result of function 1", result.State.ToString()); + + // Act + result = await kernel.StepAsync(result); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake result of function2", result.State.ToString()); + } + + [Fact] + public async Task CanStepPlanWithStepsAndContextAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function1 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal(planInput, variables.Input); + Assert.Equal("foo", variables["variables"]); + + return "fake result of function 1"; + }, "function1"); + + var function2 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + Assert.Equal("fake result of function 1", variables.Input); + Assert.Equal("bar", variables["variables"]); + + return "fake result of function2"; + }, "function2"); + + plan.AddSteps(function1, function2); + + // Act + var cv = new ContextVariables(planInput); + cv.Set("variables", "foo"); + plan = await kernel.StepAsync(cv, plan); + + // Assert + Assert.NotNull(plan); + Assert.Equal("fake result of function 1", plan.State.ToString()); + + // Act + cv.Set("variables", "bar"); + cv.Update(string.Empty); + plan = await kernel.StepAsync(cv, plan); + + // Assert + Assert.NotNull(plan); + Assert.Equal("fake result of function2", plan.State.ToString()); + } + + [Fact] + public async Task StepExceptionIsThrownAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + static void method() => throw new ArgumentException("Error message"); + var function = KernelFunctionFactory.CreateFromMethod(method, "function", "description"); + + plan.AddSteps(function, function); + + // Act + var cv = new ContextVariables(planInput); + await Assert.ThrowsAsync(async () => await kernel.StepAsync(cv, plan)); + } + + [Fact] + public async Task PlanStepExceptionIsThrownAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var planInput = "Some input"; + var plan = new Plan(goal); + + // Arrange + var logger = new Mock(); + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + static void method() => throw new ArgumentException("Error message"); + var function = KernelFunctionFactory.CreateFromMethod(method, "function", "description"); + + plan.AddSteps(new Plan(function), new Plan(function)); + + // Act + var cv = new ContextVariables(planInput); + await Assert.ThrowsAsync(async () => await kernel.StepAsync(cv, plan)); + } + + [Fact] + public async Task CanExecutePlanWithTreeStepsAsync() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + var subPlan = new Plan("Write a poem or joke"); + + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var childFunction1 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return "Child 1 output!" + variables.Input; + }, + "childFunction1"); + + var childFunction2 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return "Child 2 is happy about " + variables.Input; + }, + "childFunction2"); + + var childFunction3 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return "Child 3 heard " + variables.Input; + }, + "childFunction3"); + + var nodeFunction1 = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return variables.Input + " - this just happened."; + }, + "nodeFunction1"); + + subPlan.AddSteps(childFunction1, childFunction2, childFunction3); + plan.AddSteps(subPlan); + plan.AddSteps(nodeFunction1); + + // Act + while (plan.HasNextStep) + { + plan = await kernel.StepAsync(plan); + } + + // Assert + Assert.NotNull(plan); + Assert.Equal("Child 3 heard Child 2 is happy about Child 1 output!Write a poem or joke - this just happened.", plan.State.ToString()); + } + + [Fact] + public void CanCreatePlanWithGoalAndSteps() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var function1 = KernelFunctionFactory.CreateFromMethod(() => true); + var function2 = KernelFunctionFactory.CreateFromMethod(() => true); + var plan = new Plan(goal, function1, function2); + + // Assert + Assert.NotNull(plan); + Assert.Equal(goal, plan.Description); + Assert.Equal(2, plan.Steps.Count); + } + + [Fact] + public void CanCreatePlanWithGoalAndSubPlans() + { + // Arrange + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal, new Plan("Write a poem or joke"), new Plan("Send it in an e-mail to Kai")); + + // Assert + Assert.NotNull(plan); + Assert.Equal(goal, plan.Description); + Assert.Equal(2, plan.Steps.Count); + } + + [Fact] + public async Task CanExecutePlanWithOneStepAndStateAsync() + { + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return "Here is a poem about " + variables.Input; + }, + "function"); + + var plan = new Plan(function); + plan.State.Set("input", "Cleopatra"); + + // Act + var result = await plan.InvokeAsync(kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("Here is a poem about Cleopatra", result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithStateAsync() + { + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + variables.TryGetValue("type", out string? t); + return $"Here is a {t} about " + variables.Input; + }, + "function"); + + var planStep = new Plan(function); + planStep.Parameters.Set("type", string.Empty); + + var plan = new Plan(string.Empty); + plan.AddSteps(planStep); + plan.State.Set("input", "Cleopatra"); + plan.State.Set("type", "poem"); + + // Act + var result = await plan.InvokeAsync(kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("Here is a poem about Cleopatra", result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithCustomContextAsync() + { + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + variables.TryGetValue("type", out string? t); + return $"Here is a {t} about " + variables.Input; + }, + "function"); + + var plan = new Plan(function); + plan.State.Set("input", "Cleopatra"); + plan.State.Set("type", "poem"); + + // Act + var result = await plan.InvokeAsync(kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("Here is a poem about Cleopatra", result.GetValue()); + + plan = new Plan(function); + plan.State.Set("input", "Cleopatra"); + plan.State.Set("type", "poem"); + + var variablesOverride = new ContextVariables(); + variablesOverride.Set("type", "joke"); + variablesOverride.Update("Medusa"); + + // Act + result = await plan.InvokeAsync(kernel, variablesOverride); + + // Assert + Assert.NotNull(result); + Assert.Equal("Here is a joke about Medusa", result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithCustomStateAsync() + { + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + variables.TryGetValue("type", out string? t); + return $"Here is a {t} about " + variables.Input; + }, + "function"); + + var planStep = new Plan(function); + planStep.Parameters.Set("type", string.Empty); + var plan = new Plan("A plan"); + plan.State.Set("input", "Medusa"); + plan.State.Set("type", "joke"); + plan.AddSteps(planStep); + + // Act + var result = await plan.InvokeAsync(kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("Here is a joke about Medusa", result.GetValue()); + + planStep = new Plan(function); + plan = new Plan("A plan"); + planStep.Parameters.Set("input", "Medusa"); + planStep.Parameters.Set("type", "joke"); + plan.State.Set("input", "Cleopatra"); // state input will not override parameter + plan.State.Set("type", "poem"); + plan.AddSteps(planStep); + + // Act + result = await plan.InvokeAsync(kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("Here is a poem about Medusa", result.GetValue()); + + planStep = new Plan(function); + plan = new Plan("A plan"); + planStep.Parameters.Set("input", "Cleopatra"); + planStep.Parameters.Set("type", "poem"); + plan.AddSteps(planStep); + + var variablesOverride = new ContextVariables(); + variablesOverride.Set("type", "joke"); + variablesOverride.Update("Medusa"); // context input will not override parameters + + // Act + result = await plan.InvokeAsync(kernel, variablesOverride); + + // Assert + Assert.NotNull(result); + Assert.Equal("Here is a joke about Cleopatra", result.GetValue()); + } + + [Fact] + public async Task CanExecutePlanWithJoinedResultAsync() + { + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var outlineFunction = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return $"Here is a {variables["chapterCount"]} chapter outline about " + variables.Input; + }, + "outlineFunction"); + + var elementAtIndexFunction = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return $"Outline section #{variables["index"]} of {variables["count"]}: " + variables.Input; + }, + "elementAtIndexFunction"); + + var novelChapterFunction = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return $"Chapter #{variables["chapterIndex"]}: {variables.Input}\nTheme:{variables["theme"]}\nPreviously:{variables["previousChapter"]}"; + }, + "novelChapterFunction"); + + var plan = new Plan("A plan with steps that alternate appending to the plan result."); + + // Steps: + // - WriterPlugin.NovelOutline chapterCount='3' INPUT='A group of kids in a club called 'The Thinking Caps' that solve mysteries and puzzles using their creativity and logic.' endMarker='' => OUTLINE + // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='0' => CHAPTER_1_SYNOPSIS + // - WriterPlugin.NovelChapter chapterIndex='1' previousChapter='' INPUT='$CHAPTER_1_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_1 + // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='1' => CHAPTER_2_SYNOPSIS + // - WriterPlugin.NovelChapter chapterIndex='2' previousChapter='$CHAPTER_1_SYNOPSIS' INPUT='$CHAPTER_2_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_2 + // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='2' => CHAPTER_3_SYNOPSIS + // - WriterPlugin.NovelChapter chapterIndex='3' previousChapter='$CHAPTER_2_SYNOPSIS' INPUT='$CHAPTER_3_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_3 + var planStep = new Plan(outlineFunction); + planStep.Parameters.Set("input", + "NovelOutline function input."); + planStep.Parameters.Set("chapterCount", "3"); + planStep.Outputs.Add("OUTLINE"); + plan.AddSteps(planStep); + + planStep = new Plan(elementAtIndexFunction); + planStep.Parameters.Set("count", "3"); + planStep.Parameters.Set("INPUT", "$OUTLINE"); + planStep.Parameters.Set("index", "0"); + planStep.Outputs.Add("CHAPTER_1_SYNOPSIS"); + plan.AddSteps(planStep); + + planStep = new Plan(novelChapterFunction); + planStep.Parameters.Set("chapterIndex", "1"); + planStep.Parameters.Set("previousChapter", " "); + planStep.Parameters.Set("INPUT", "$CHAPTER_1_SYNOPSIS"); + planStep.Parameters.Set("theme", "Children's mystery"); + planStep.Outputs.Add("RESULT__CHAPTER_1"); + plan.Outputs.Add("RESULT__CHAPTER_1"); + plan.AddSteps(planStep); + + planStep = new Plan(elementAtIndexFunction); + planStep.Parameters.Set("count", "3"); + planStep.Parameters.Set("INPUT", "$OUTLINE"); + planStep.Parameters.Set("index", "1"); + planStep.Outputs.Add("CHAPTER_2_SYNOPSIS"); + plan.AddSteps(planStep); + + planStep = new Plan(novelChapterFunction); + planStep.Parameters.Set("chapterIndex", "2"); + planStep.Parameters.Set("previousChapter", "$CHAPTER_1_SYNOPSIS"); + planStep.Parameters.Set("INPUT", "$CHAPTER_2_SYNOPSIS"); + planStep.Parameters.Set("theme", "Children's mystery"); + planStep.Outputs.Add("RESULT__CHAPTER_2"); + plan.Outputs.Add("RESULT__CHAPTER_2"); + plan.AddSteps(planStep); + + planStep = new Plan(elementAtIndexFunction); + planStep.Parameters.Set("count", "3"); + planStep.Parameters.Set("INPUT", "$OUTLINE"); + planStep.Parameters.Set("index", "2"); + planStep.Outputs.Add("CHAPTER_3_SYNOPSIS"); + plan.AddSteps(planStep); + + planStep = new Plan(novelChapterFunction); + planStep.Parameters.Set("chapterIndex", "3"); + planStep.Parameters.Set("previousChapter", "$CHAPTER_2_SYNOPSIS"); + planStep.Parameters.Set("INPUT", "$CHAPTER_3_SYNOPSIS"); + planStep.Parameters.Set("theme", "Children's mystery"); + planStep.Outputs.Add("CHAPTER_3"); + plan.Outputs.Add("CHAPTER_3"); + plan.AddSteps(planStep); + + // Act + var result = await plan.InvokeAsync(kernel); + + var expected = + @"Chapter #1: Outline section #0 of 3: Here is a 3 chapter outline about NovelOutline function input. +Theme:Children's mystery +Previously: +Chapter #2: Outline section #1 of 3: Here is a 3 chapter outline about NovelOutline function input. +Theme:Children's mystery +Previously:Outline section #0 of 3: Here is a 3 chapter outline about NovelOutline function input. +Chapter #3: Outline section #2 of 3: Here is a 3 chapter outline about NovelOutline function input. +Theme:Children's mystery +Previously:Outline section #1 of 3: Here is a 3 chapter outline about NovelOutline function input."; + + // Assert + var res = result.GetValue(); + Assert.Equal(expected, result.GetValue()); + Assert.True(result.TryGetMetadataValue("RESULT__CHAPTER_1", out var chapter1)); + Assert.True(result.TryGetMetadataValue("RESULT__CHAPTER_2", out var chapter2)); + Assert.True(result.TryGetMetadataValue("CHAPTER_3", out var chapter3)); + Assert.False(result.TryGetMetadataValue("CHAPTER_3_SYNOPSIS", out var chapter3Synopsis)); + } + + [Fact] + public async Task CanExecutePlanWithExpandedAsync() + { + // Arrange + var (kernel, serviceProvider, serviceSelector) = this.SetupKernel(); + + var function = KernelFunctionFactory.CreateFromMethod((ContextVariables variables) => + { + return $"Here is a payload '{variables["payload"]}' for " + variables.Input; + }, + "function"); + + var plan = new Plan("A plan with steps that have variables with a $ in them but not associated with an output"); + + var planStep = new Plan(function); + planStep.Parameters.Set("input", "Function input."); + planStep.Parameters.Set("payload", @"{""prop"":""value"", ""$prop"": 3, ""prop2"": ""my name is $pop and $var""}"); + plan.AddSteps(planStep); + plan.State.Set("var", "foobar"); + + // Act + var result = await plan.InvokeAsync(kernel); + + var expected = @"Here is a payload '{""prop"":""value"", ""$prop"": 3, ""prop2"": ""my name is $pop and foobar""}' for Function input."; + + // Assert + Assert.Equal(expected, result.GetValue()); + } + + [Fact] + public async Task CanPlanStepsTriggerKernelEventsAsync() + { + List functions = new(); + + // Arrange + static string Function2() => "Poem"; + functions.Add(KernelFunctionFactory.CreateFromMethod(Method(Function2), functionName: "WritePoem")); + + static string Function3() => "Sent Email"; + functions.Add(KernelFunctionFactory.CreateFromMethod(Method(Function3), functionName: "SendEmail")); + + var goal = "Write a poem or joke and send it in an e-mail to Kai."; + var plan = new Plan(goal); + plan.AddSteps(functions.ToArray()); + + var expectedInvocations = 2; + var sut = new Kernel(); + + // 1 - Plan - Write poem and send email goal + // 2 - Plan - Step 1 - WritePoem + // 3 - Plan - Step 2 - WritePoem + + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + } + + sut.FunctionInvoking += FunctionInvoking; + sut.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(sut, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvocations, invokingCalls); + Assert.Equal(expectedInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, functions[0].Name); + Assert.Equal(invokingListFunctions[1].Name, functions[1].Name); + + // Expected invoked sequence + Assert.Equal(invokedListFunctions[0].Name, functions[0].Name); + Assert.Equal(invokedListFunctions[1].Name, functions[1].Name); + } + + [Fact] + public async Task PlanIsCancelledWhenInvokingHandlerTriggersCancelAsync() + { + // Arrange + this.PrepareKernelAndPlan(out var sut, out var plan); + + var expectedInvokingHandlerInvocations = 1; + var expectedInvokedHandlerInvocations = 0; + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + + e.Cancel(); + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + } + + sut.FunctionInvoking += FunctionInvoking; + sut.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(sut, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvokingHandlerInvocations, invokingCalls); + Assert.Equal(expectedInvokedHandlerInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, plan.Steps[0].Name); + Assert.Equal(expectedInvokingHandlerInvocations, invokingListFunctions.Count); + + // Expected invoked sequence + Assert.Equal(expectedInvokedHandlerInvocations, invokedListFunctions.Count); + } + + [Fact] + public async Task PlanStopsAtTheStepWhenInvokingHandlerTriggersCancelAsync() + { + // Arrange + this.PrepareKernelAndPlan(out var sut, out var plan); + + var expectedInvokingHandlerInvocations = 1; + var expectedInvokedHandlerInvocations = 0; + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + + if (e.Function.Name == "WritePoem") + { + e.Cancel(); + } + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + } + + sut.FunctionInvoking += FunctionInvoking; + sut.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(sut, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvokingHandlerInvocations, invokingCalls); + Assert.Equal(expectedInvokedHandlerInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, plan.Steps[0].Name); + Assert.Equal(expectedInvokingHandlerInvocations, invokingListFunctions.Count); + + // Expected invoked sequence + Assert.Equal(expectedInvokedHandlerInvocations, invokedListFunctions.Count); + + // Aborting at any step of a plan, will invalidate the full plan result + Assert.Null(result.GetValue()); + } + + [Fact] + public async Task PlanStopsAtTheStepWhenInvokedHandlerTriggersCancelAsync() + { + // Arrange + this.PrepareKernelAndPlan(out var sut, out var plan); + + var expectedInvokingHandlerInvocations = 1; + var expectedInvokedHandlerInvocations = 1; + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + + if (e.Function.Name == "WritePoem") + { + e.Cancel(); + } + } + + sut.FunctionInvoking += FunctionInvoking; + sut.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(sut, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvokingHandlerInvocations, invokingCalls); + Assert.Equal(expectedInvokedHandlerInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, plan.Steps[0].Name); + Assert.Equal(expectedInvokingHandlerInvocations, invokingListFunctions.Count); + + // Expected invoked sequence + Assert.Equal(expectedInvokedHandlerInvocations, invokedListFunctions.Count); + Assert.Equal(invokedListFunctions[0].Name, plan.Steps[0].Name); + + // Aborting in invoked of the first step will abort the result and + // the plan will render no result as no step succeeded previously. + Assert.Null(result.GetValue()); + } + + [Fact] + public async Task PlanStopsAtFinalStepWhenInvokedHandlerTriggersCancelAsync() + { + // Arrange + this.PrepareKernelAndPlan(out var sut, out var plan); + + var expectedInvokingHandlerInvocations = 2; + var expectedInvokedHandlerInvocations = 2; + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + + if (e.Function.Name == "SendEmail") + { + e.Cancel(); + } + } + + sut.FunctionInvoking += FunctionInvoking; + sut.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(sut, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvokingHandlerInvocations, invokingCalls); + Assert.Equal(expectedInvokedHandlerInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, plan.Steps[0].Name); + Assert.Equal(invokingListFunctions[1].Name, plan.Steps[1].Name); + Assert.Equal(expectedInvokingHandlerInvocations, invokingListFunctions.Count); + + // Expected invoked sequence + Assert.Equal(expectedInvokedHandlerInvocations, invokedListFunctions.Count); + Assert.Equal(invokedListFunctions[0].Name, plan.Steps[0].Name); + Assert.Equal(invokedListFunctions[1].Name, plan.Steps[1].Name); + + // Aborting last step in invoked will stop the plan result + // and return the previous succeeded step result value. + Assert.Equal("WritePoem", result.GetValue()); + } + + [Fact(Skip = "Skipping is currently not supported for plans")] + public async Task PlapSkippingFirstStepShouldGiveSendStepResultAsync() + { + // Arrange + this.PrepareKernelAndPlan(out var sut, out var plan); + + var expectedInvokingHandlerInvocations = 3; + var expectedInvokedHandlerInvocations = 2; + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + + if (e.Function.Name == "WritePoem") + { + e.Skip(); + } + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + } + + sut.FunctionInvoking += FunctionInvoking; + sut.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(sut, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvokingHandlerInvocations, invokingCalls); + Assert.Equal(expectedInvokedHandlerInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, plan.Name); + Assert.Equal(invokingListFunctions[1].Name, plan.Steps[0].Name); + Assert.Equal(invokingListFunctions[2].Name, plan.Steps[1].Name); + Assert.Equal(expectedInvokingHandlerInvocations, invokingListFunctions.Count); + + // Expected invoked sequence + Assert.Equal(expectedInvokedHandlerInvocations, invokedListFunctions.Count); + + // Skipped the first step (will not trigger invoked for it) + Assert.Equal(invokedListFunctions[0].Name, plan.Steps[1].Name); + Assert.Equal("SendEmail", result.GetValue()); + } + + [Fact] + public async Task PlanStopsAtTheMiddleStepWhenHandlerTriggersInvokingCancelAsync() + { + // Arrange + this.PrepareKernelAndPlan(out var sut, out var plan); + + var expectedInvokingHandlerInvocations = 2; + var expectedInvokedHandlerInvocations = 1; + var invokingCalls = 0; + var invokedCalls = 0; + var invokingListFunctions = new List(); + var invokedListFunctions = new List(); + + void FunctionInvoking(object? sender, FunctionInvokingEventArgs e) + { + invokingListFunctions.Add(e.Function.Metadata); + invokingCalls++; + + if (e.Function.Name == "SendEmail") + { + e.Cancel(); + } + } + + void FunctionInvoked(object? sender, FunctionInvokedEventArgs e) + { + invokedListFunctions.Add(e.Function.Metadata); + invokedCalls++; + } + + sut.FunctionInvoking += FunctionInvoking; + sut.FunctionInvoked += FunctionInvoked; + + // Act + var result = await plan.InvokeAsync(sut, "PlanInput"); + + // Assert + Assert.NotNull(result); + Assert.Equal(expectedInvokingHandlerInvocations, invokingCalls); + Assert.Equal(expectedInvokedHandlerInvocations, invokedCalls); + + // Expected invoking sequence + Assert.Equal(invokingListFunctions[0].Name, plan.Steps[0].Name); + Assert.Equal(invokingListFunctions[1].Name, plan.Steps[1].Name); + Assert.Equal(expectedInvokingHandlerInvocations, invokingListFunctions.Count); + + // Expected invoked sequence + Assert.Equal(expectedInvokedHandlerInvocations, invokedListFunctions.Count); + + // Cancelling the second step, don't block the triggering "invoked" for the first step. + Assert.Equal(invokedListFunctions[0].Name, plan.Steps[0].Name); + + // Aborting one any step of a plan, will render the value of the last executed step + Assert.Equal("WritePoem", result.GetValue()); + } + + private void PrepareKernelAndPlan(out Kernel kernel, out Plan plan) + { + kernel = new Kernel(); + + plan = new Plan("Write a poem or joke and send it in an e-mail to Kai."); + plan.AddSteps(new[] + { + kernel.CreateFunctionFromMethod(() => "WritePoem", "WritePoem"), + kernel.CreateFunctionFromMethod(() => "SendEmail", "SendEmail"), + }); + + // 1 - Plan - Write poem and send email goal + // 2 - Plan - Step 1 - WritePoem + // 3 - Plan - Step 2 - SendEmail + } + + private static MethodInfo Method(Delegate method) + { + return method.Method; + } + + private (Kernel kernel, Mock serviceProviderMock, Mock serviceSelectorMock) SetupKernel(IEnumerable? plugins = null) + { + var serviceProvider = new Mock(); + var serviceSelector = new Mock(); + + var kernel = new Kernel(serviceProvider.Object, plugins is not null ? new KernelPluginCollection(plugins) : null); + + return (kernel, serviceProvider, serviceSelector); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Planning/PlanVariableExpansionTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanVariableExpansionTests.cs similarity index 96% rename from dotnet/src/SemanticKernel.UnitTests/Planning/PlanVariableExpansionTests.cs rename to dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanVariableExpansionTests.cs index 56e218b1db22..e0ca84335358 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Planning/PlanVariableExpansionTests.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Planning/PlanVariableExpansionTests.cs @@ -1,10 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Planning; using Xunit; -namespace SemanticKernel.UnitTests.Planning; +namespace Microsoft.SemanticKernel.Planners.UnitTests.Planning; public sealed class PlanVariableExpansionTests { diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlanParserTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlanParserTests.cs index 232e0b649886..c1208eac4051 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlanParserTests.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlanParserTests.cs @@ -1,18 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Globalization; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.AI; using Moq; using Xunit; using Xunit.Abstractions; -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel.Planners.Sequential.UnitTests; -#pragma warning restore IDE0130 // Namespace does not match folder structure +namespace Microsoft.SemanticKernel.Planning.Sequential.UnitTests; public class SequentialPlanParserTests { @@ -23,117 +17,41 @@ public SequentialPlanParserTests(ITestOutputHelper testOutputHelper) this._testOutputHelper = testOutputHelper; } - private Mock CreateKernelMock( - out Mock mockFunctionCollection, - out Mock mockLogger) - { - mockFunctionCollection = new Mock(); - mockLogger = new Mock(); - - var kernelMock = new Mock(); - kernelMock.SetupGet(k => k.Functions).Returns(mockFunctionCollection.Object); - kernelMock.SetupGet(k => k.LoggerFactory).Returns(new Mock().Object); - - return kernelMock; - } - - private SKContext CreateSKContext( - IFunctionRunner functionRunner, - IAIServiceProvider serviceProvider, - IAIServiceSelector serviceSelector, - ContextVariables? variables = null) - { - return new SKContext(functionRunner, serviceProvider, serviceSelector, variables); - } - - private static Mock CreateMockFunction(FunctionView functionView, string result = "") - { - var mockFunction = new Mock(); - mockFunction.Setup(x => x.Describe()).Returns(functionView); - mockFunction.Setup(x => x.Name).Returns(functionView.Name); - mockFunction.Setup(x => x.PluginName).Returns(functionView.PluginName); - return mockFunction; - } - - private void CreateKernelAndFunctionCreateMocks(List<(string name, string pluginName, string description, bool isSemantic, string result)> functions, - out IKernel kernel) - { - var kernelMock = this.CreateKernelMock(out var functionCollection, out _); - kernel = kernelMock.Object; - - var functionRunnerMock = new Mock(); - var serviceProviderMock = new Mock(); - var serviceSelector = new Mock(); - - // For Create - kernelMock.Setup(k => k.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((contextVariables, skills, loggerFactory, culture) => - { - return this.CreateSKContext(functionRunnerMock.Object, serviceProviderMock.Object, serviceSelector.Object, contextVariables); - }); - - var functionsView = new List(); - foreach (var (name, pluginName, description, isSemantic, resultString) in functions) - { - var functionView = new FunctionView(name, pluginName, description) - { - Parameters = new ParameterView[] { new("param", "description") } - }; - var mockFunction = CreateMockFunction(functionView); - functionsView.Add(functionView); - - var result = this.CreateSKContext(functionRunnerMock.Object, serviceProviderMock.Object, serviceSelector.Object); - result.Variables.Update(resultString); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .ReturnsAsync(new FunctionResult(name, pluginName, result)); - - if (string.IsNullOrEmpty(name)) - { - kernelMock.Setup(x => x.RegisterSemanticFunction( - It.IsAny(), - It.IsAny(), - It.IsAny(), - It.IsAny() - )).Returns(mockFunction.Object); - } - else - { - functionCollection.Setup(x => x.GetFunction(It.Is(s => s == pluginName), It.Is(s => s == name))) - .Returns(mockFunction.Object); - ISKFunction? outFunc = mockFunction.Object; - functionCollection.Setup(x => x.TryGetFunction(It.Is(s => s == name), out outFunc)).Returns(true); - functionCollection.Setup(x => x.TryGetFunction(It.Is(s => s == pluginName), It.Is(s => s == name), out outFunc)).Returns(true); - } - } - - functionCollection.Setup(x => x.GetFunctionViews()).Returns(functionsView); - } - [Fact] public void CanCallToPlanFromXml() { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("Summarize", "SummarizePlugin", "Summarize an input", true, "This is the summary."), - ("Translate", "WriterPlugin", "Translate to french", true, "Bonjour!"), - ("GetEmailAddressAsync", "email", "Get email address", false, "johndoe@email.com"), - ("SendEmailAsync", "email", "Send email", false, "Email sent."), + new KernelPlugin("email", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "SendEmailAsync", "Send an e-mail"), + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "GetEmailAddressAsync", "Get email address") + }), + new KernelPlugin("SummarizePlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Summarize", "Summarize an input") + }), + new KernelPlugin("WriterPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Translate", "Translate to french") + }) }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); var planString = - @" - - - - - -"; + @" + + + + + "; + + var kernel = this.CreateKernel(planString, plugins); + var goal = "Summarize an input, translate to french, and e-mail to John Doe"; // Act - var plan = planString.ToPlanFromXml(goal, kernel.Functions.GetFunctionCallback()); + var plan = planString.ToPlanFromXml(goal, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); @@ -170,37 +88,40 @@ public void CanCallToPlanFromXml() ); } - private const string GoalText = "Solve the equation x^2 = 2."; - [Fact] public void InvalidPlanExecutePlanReturnsInvalidResult() { // Arrange - this.CreateKernelAndFunctionCreateMocks(new(), out var kernel); var planString = ""; + var kernel = this.CreateKernel(planString); + // Act - Assert.Throws(() => planString.ToPlanFromXml(GoalText, kernel.Functions.GetFunctionCallback())); + Assert.Throws(() => planString.ToPlanFromXml("Solve the equation x^2 = 2.", kernel.Plugins.GetFunctionCallback())); } // Test that contains a #text node in the plan [Theory] [InlineData("Test the functionFlowRunner", @"Test the functionFlowRunner - - - This is some text - ")] + + + This is some text + ")] public void CanCreatePlanWithTextNodes(string goalText, string planText) { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("Echo", "MockPlugin", "Echo an input", true, "Mock Echo Result"), + new KernelPlugin("MockPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Echo", "Echo an input"), + }), }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); + + var kernel = this.CreateKernel(planText, plugins); // Act - var plan = planText.ToPlanFromXml(goalText, kernel.Functions.GetFunctionCallback()); + var plan = planText.ToPlanFromXml(goalText, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); @@ -212,19 +133,23 @@ public void CanCreatePlanWithTextNodes(string goalText, string planText) [Theory] [InlineData("Test the functionFlowRunner", @"Test the functionFlowRunner - - ")] + + ")] public void CanCreatePlanWithPartialXml(string goalText, string planText) { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("Echo", "MockPlugin", "Echo an input", true, "Mock Echo Result"), + new KernelPlugin("MockPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Echo", "Echo an input"), + }), }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); + + var kernel = this.CreateKernel(planText, plugins); // Act - var plan = planText.ToPlanFromXml(goalText, kernel.Functions.GetFunctionCallback()); + var plan = planText.ToPlanFromXml(goalText, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); @@ -236,26 +161,30 @@ public void CanCreatePlanWithPartialXml(string goalText, string planText) [Theory] [InlineData("Test the functionFlowRunner", @"Test the functionFlowRunner - - - ")] + + + ")] public void CanCreatePlanWithFunctionName(string goalText, string planText) { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("Echo", FunctionCollection.GlobalFunctionsPluginName, "Echo an input", true, "Mock Echo Result"), + new KernelPlugin("Global", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Echo", "Echo an input"), + }), }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); + + var kernel = this.CreateKernel(planText, plugins); // Act - var plan = planText.ToPlanFromXml(goalText, kernel.Functions.GetFunctionCallback()); + var plan = planText.ToPlanFromXml(goalText, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); Assert.Equal(goalText, plan.Description); Assert.Single(plan.Steps); - Assert.Equal(FunctionCollection.GlobalFunctionsPluginName, plan.Steps[0].PluginName); + Assert.Equal("Global", plan.Steps[0].PluginName); Assert.Equal("Echo", plan.Steps[0].Name); } @@ -263,28 +192,32 @@ public void CanCreatePlanWithFunctionName(string goalText, string planText) [Theory] [InlineData(@" - - + + ", true)] [InlineData(@" - - + + ", false)] public void CanCreatePlanWithInvalidFunctionNodes(string planText, bool allowMissingFunctions) { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("Echo", "MockPlugin", "Echo an input", true, "Mock Echo Result"), + new KernelPlugin("MockPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Echo", "Echo an input"), + }), }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); + + var kernel = this.CreateKernel(planText, plugins); // Act if (allowMissingFunctions) { // it should not throw - var plan = planText.ToPlanFromXml(string.Empty, kernel.Functions.GetFunctionCallback(), allowMissingFunctions); + var plan = planText.ToPlanFromXml(string.Empty, kernel.Plugins.GetFunctionCallback(), allowMissingFunctions); // Assert Assert.NotNull(plan); @@ -292,49 +225,52 @@ public void CanCreatePlanWithInvalidFunctionNodes(string planText, bool allowMis Assert.Equal("MockPlugin", plan.Steps[0].PluginName); Assert.Equal("Echo", plan.Steps[0].Name); - Assert.Null(plan.Steps[0].Description); + Assert.Equal("Echo an input", plan.Steps[0].Description); - Assert.Equal(plan.GetType().Name, plan.Steps[1].PluginName); + Assert.Equal("MockPlugin", plan.Steps[1].PluginName); Assert.NotEmpty(plan.Steps[1].Name); Assert.Equal("MockPlugin.DoesNotExist", plan.Steps[1].Description); } else { - Assert.Throws(() => planText.ToPlanFromXml(string.Empty, kernel.Functions.GetFunctionCallback(), allowMissingFunctions)); + Assert.Throws(() => planText.ToPlanFromXml(string.Empty, kernel.Plugins.GetFunctionCallback(), allowMissingFunctions)); } } [Theory] - [InlineData("Test the functionFlowRunner", @"Possible result: Test the functionFlowRunner - - - This is some text - ")] - [InlineData("Test the functionFlowRunner", @" - - - This is some text - - - plan end")] - [InlineData("Test the functionFlowRunner", @" - - - This is some text - - - plan end")] + [InlineData("Test the functionFlowRunner", + @"Possible result: Test the functionFlowRunner + + + This is some text + ")] + [InlineData("Test the functionFlowRunner", + @" + + This is some text + + plan end")] + [InlineData("Test the functionFlowRunner", + @" + + This is some text + + plan end")] public void CanCreatePlanWithOtherText(string goalText, string planText) { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("Echo", "MockPlugin", "Echo an input", true, "Mock Echo Result"), + new KernelPlugin("MockPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Echo", "Echo an input"), + }), }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); + + var kernel = this.CreateKernel(planText, plugins); // Act - var plan = planText.ToPlanFromXml(goalText, kernel.Functions.GetFunctionCallback()); + var plan = planText.ToPlanFromXml(goalText, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); @@ -351,14 +287,18 @@ public void CanCreatePlanWithOtherText(string goalText, string planText) public void CanCreatePlanWithOpenApiPlugin(string planText) { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("codesearchresults_post", "CodeSearch", "Echo an input", true, "Mock Echo Result"), + new KernelPlugin("CodeSearch", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "codesearchresults_post", "Echo an input"), + }), }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); + + var kernel = this.CreateKernel(planText, plugins); // Act - var plan = planText.ToPlanFromXml(string.Empty, kernel.Functions.GetFunctionCallback()); + var plan = planText.ToPlanFromXml(string.Empty, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); @@ -369,22 +309,27 @@ public void CanCreatePlanWithOpenApiPlugin(string planText) // test that a that is not will just get skipped [Theory] - [InlineData("Test the functionFlowRunner", @" - - Some other tag - - ")] + [InlineData("Test the functionFlowRunner", + @" + + Some other tag + + ")] public void CanCreatePlanWithIgnoredNodes(string goalText, string planText) { // Arrange - var functions = new List<(string name, string pluginName, string description, bool isSemantic, string result)>() + var plugins = new KernelPluginCollection() { - ("Echo", "MockPlugin", "Echo an input", true, "Mock Echo Result"), + new KernelPlugin("MockPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Echo", "Echo an input"), + }), }; - this.CreateKernelAndFunctionCreateMocks(functions, out var kernel); + + var kernel = this.CreateKernel(planText, plugins); // Act - var plan = planText.ToPlanFromXml(goalText, kernel.Functions.GetFunctionCallback()); + var plan = planText.ToPlanFromXml(goalText, kernel.Plugins.GetFunctionCallback()); // Assert Assert.NotNull(plan); @@ -396,4 +341,31 @@ public void CanCreatePlanWithIgnoredNodes(string goalText, string planText) Assert.Equal("MockPlugin", plan.Steps[1].PluginName); Assert.Equal("Echo", plan.Steps[1].Name); } + + private Kernel CreateKernel(string testPlanString, KernelPluginCollection? plugins = null) + { + plugins ??= new KernelPluginCollection(); + + var textResult = new Mock(); + textResult + .Setup(tr => tr.GetCompletionAsync(It.IsAny())) + .ReturnsAsync(testPlanString); + + var textGenerationResult = new List { textResult.Object }; + + var textGeneration = new Mock(); + textGeneration + .Setup(tc => tc.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(textGenerationResult); + + var serviceSelector = new Mock(); + serviceSelector + .Setup(ss => ss.SelectAIService(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns((textGeneration.Object, new PromptExecutionSettings())); + + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(serviceSelector.Object); + + return new Kernel(serviceCollection.BuildServiceProvider(), plugins); + } } diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlannerTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlannerTests.cs index 2506fb1f27c1..3cae1725f4a0 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlannerTests.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Sequential/SequentialPlannerTests.cs @@ -1,17 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Globalization; -using Microsoft.Extensions.Logging; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.AI.TextGeneration; using Moq; using Xunit; -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel.Planners.Sequential.UnitTests; -#pragma warning restore IDE0130 // Namespace does not match folder structure +namespace Microsoft.SemanticKernel.Planning.Sequential.UnitTests; public sealed class SequentialPlannerTests { @@ -20,95 +15,19 @@ public sealed class SequentialPlannerTests public async Task ItCanCreatePlanAsync(string goal) { // Arrange - var kernel = new Mock(); - kernel.Setup(x => x.LoggerFactory).Returns(new Mock().Object); - kernel.Setup(x => x.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(async (vars, cancellationToken, functions) => - { - var functionResult = await functions[0].InvokeAsync(kernel.Object, vars, cancellationToken: cancellationToken); - return KernelResult.FromFunctionResults(functionResult.GetValue(), new List { functionResult }); - }); - - var input = new List<(string name, string pluginName, string description, bool isSemantic)>() - { - ("SendEmail", "email", "Send an e-mail", false), - ("GetEmailAddress", "email", "Get an e-mail address", false), - ("Translate", "WriterPlugin", "Translate something", true), - ("Summarize", "SummarizePlugin", "Summarize something", true) - }; - - var functionsView = new List(); - var functions = new Mock(); - foreach (var (name, pluginName, description, isSemantic) in input) - { - var functionView = new FunctionView(name, pluginName, description); - var mockFunction = CreateMockFunction(functionView); - functionsView.Add(functionView); - - mockFunction.Setup(x => - x.InvokeAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((context, settings, cancellationToken) => - { - context.Variables.Update("MOCK FUNCTION CALLED"); - return Task.FromResult(new FunctionResult(name, pluginName, context)); - }); - - functions.Setup(x => x.GetFunction(It.Is(s => s == pluginName), It.Is(s => s == name))) - .Returns(mockFunction.Object); - ISKFunction? outFunc = mockFunction.Object; - functions.Setup(x => x.TryGetFunction(It.Is(s => s == pluginName), It.Is(s => s == name), out outFunc)).Returns(true); - } - - functions.Setup(x => x.GetFunctionViews()).Returns(functionsView); - var functionRunner = new Mock(); - var serviceProvider = new Mock(); - var serviceSelector = new Mock(); - kernel.Setup(x => x.LoggerFactory).Returns(new Mock().Object); - - var expectedFunctions = input.Select(x => x.name).ToList(); - var expectedPlugins = input.Select(x => x.pluginName).ToList(); + var plugins = this.CreatePluginCollection(); - var context = new SKContext( - functionRunner.Object, - serviceProvider.Object, - serviceSelector.Object, - new ContextVariables()); + var planString = + @" + + + + + "; - var returnContext = new SKContext( - functionRunner.Object, - serviceProvider.Object, - serviceSelector.Object, - new ContextVariables()); + var kernel = this.CreateKernel(planString, plugins); - var planString = - @" - - - - - -"; - - returnContext.Variables.Update(planString); - - var mockFunctionFlowFunction = new Mock(); - mockFunctionFlowFunction.Setup(x => x.InvokeAsync( - It.IsAny(), - null, - default - )).Callback( - (c, s, ct) => c.Variables.Update("Hello world!") - ).Returns(() => Task.FromResult(new FunctionResult("FunctionName", "PluginName", returnContext, planString))); - - // Mock Plugins - kernel.Setup(x => x.Functions).Returns(functions.Object); - kernel.Setup(x => x.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(context); - - kernel.Setup(x => x.RegisterCustomFunction(It.IsAny())) - .Returns(mockFunctionFlowFunction.Object); - - var planner = new SequentialPlanner(kernel.Object); + var planner = new SequentialPlanner(kernel); // Act var plan = await planner.CreatePlanAsync(goal, default); @@ -116,91 +35,41 @@ public async Task ItCanCreatePlanAsync(string goal) // Assert Assert.Equal(goal, plan.Description); - Assert.Contains( - plan.Steps, - step => - expectedFunctions.Contains(step.Name) && - expectedPlugins.Contains(step.PluginName)); + Assert.Equal(4, plan.Steps.Count); - foreach (var expectedFunction in expectedFunctions) - { - Assert.Contains( - plan.Steps, - step => step.Name == expectedFunction); - } - - foreach (var expectedPlugin in expectedPlugins) - { - Assert.Contains( - plan.Steps, - step => step.PluginName == expectedPlugin); - } + Assert.Contains(plan.Steps, step => plugins.TryGetFunction(step.PluginName, step.Name, out var _)); } [Fact] public async Task EmptyGoalThrowsAsync() { // Arrange - var kernel = new Mock(); + var kernel = this.CreateKernel(string.Empty); - var planner = new SequentialPlanner(kernel.Object); + var planner = new SequentialPlanner(kernel); - // Act - await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync("")); + // Act & Assert + await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync("")); } [Fact] public async Task InvalidXMLThrowsAsync() { // Arrange - var functionRunner = new Mock(); - var serviceProvider = new Mock(); - var serviceSelector = new Mock(); - var kernel = new Mock(); - var functions = new Mock(); + var kernel = this.CreateKernel("notvalid<"); - functions.Setup(x => x.GetFunctionViews()).Returns(new List()); - - var planString = "notvalid<"; - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(planString)); - - var context = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables()); - - var mockFunctionFlowFunction = new Mock(); - mockFunctionFlowFunction.Setup(x => x.InvokeAsync( - It.IsAny(), - null, - default - )).Callback( - (c, s, ct) => c.Variables.Update("Hello world!") - ).Returns(() => Task.FromResult(new FunctionResult("FunctionName", "PluginName", returnContext, planString))); - - // Mock Plugins - kernel.Setup(x => x.Functions).Returns(functions.Object); - kernel.Setup(x => x.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(async (vars, cancellationToken, functions) => - { - var functionResult = await functions[0].InvokeAsync(kernel.Object, vars, cancellationToken: cancellationToken); - return KernelResult.FromFunctionResults(functionResult.GetValue(), new List { functionResult }); - }); + var planner = new SequentialPlanner(kernel); - kernel.Setup(x => x.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(context); - - kernel.Setup(x => x.RegisterCustomFunction(It.IsAny())) - .Returns(mockFunctionFlowFunction.Object); - - var planner = new SequentialPlanner(kernel.Object); - - // Act - await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync("goal")); + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync("goal")); + Assert.True(exception?.InnerException?.Message?.Contains("Failed to parse plan xml strings", StringComparison.InvariantCulture)); } [Fact] public void UsesPromptDelegateWhenProvided() { // Arrange - var kernel = new Mock(); + var kernel = this.CreateKernel(string.Empty); var getPromptTemplateMock = new Mock>(); var config = new SequentialPlannerConfig() { @@ -208,19 +77,56 @@ public void UsesPromptDelegateWhenProvided() }; // Act - var planner = new SequentialPlanner(kernel.Object, config); + var planner = new SequentialPlanner(kernel, config); // Assert getPromptTemplateMock.Verify(x => x(), Times.Once()); } - // Method to create Mock objects - private static Mock CreateMockFunction(FunctionView functionView) + private Kernel CreateKernel(string testPlanString, KernelPluginCollection? plugins = null) { - var mockFunction = new Mock(); - mockFunction.Setup(x => x.Describe()).Returns(functionView); - mockFunction.Setup(x => x.Name).Returns(functionView.Name); - mockFunction.Setup(x => x.PluginName).Returns(functionView.PluginName); - return mockFunction; + plugins ??= new KernelPluginCollection(); + + var textResult = new Mock(); + textResult + .Setup(tr => tr.GetCompletionAsync(It.IsAny())) + .ReturnsAsync(testPlanString); + + var textGenerationResult = new List { textResult.Object }; + + var textGeneration = new Mock(); + textGeneration + .Setup(tc => tc.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(textGenerationResult); + + var serviceSelector = new Mock(); + serviceSelector + .Setup(ss => ss.SelectAIService(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns((textGeneration.Object, new PromptExecutionSettings())); + + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(serviceSelector.Object); + + return new Kernel(serviceCollection.BuildServiceProvider(), plugins); + } + + private KernelPluginCollection CreatePluginCollection() + { + return new() + { + new KernelPlugin("email", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "SendEmail", "Send an e-mail"), + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "GetEmailAddress", "Get an e-mail address") + }), + new KernelPlugin("WriterPlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Translate", "Translate something"), + }), + new KernelPlugin("SummarizePlugin", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Summarize", "Summarize something"), + }) + }; } } diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/ParseResultTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/ParseResultTests.cs index 566c7982905a..ab07fdca8970 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/ParseResultTests.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/ParseResultTests.cs @@ -1,12 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.Extensions.Logging.Abstractions; using Moq; using Xunit; -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel.Planners.Stepwise.UnitTests; -#pragma warning restore IDE0130 // Namespace does not match folder structure +namespace Microsoft.SemanticKernel.Planning.Stepwise.UnitTests; public sealed class ParseResultTests { @@ -24,10 +21,9 @@ public sealed class ParseResultTests public void WhenInputIsFinalAnswerReturnsFinalAnswer(string input, string expected) { // Arrange - var kernel = new Mock(); - kernel.Setup(x => x.LoggerFactory).Returns(NullLoggerFactory.Instance); + var kernel = new Kernel(new Mock().Object); - var planner = new StepwisePlanner(kernel.Object); + var planner = new StepwisePlanner(kernel); // Act var result = planner.ParseResult(input); @@ -76,10 +72,9 @@ public void ParseActionReturnsAction(string input, string expectedThought, strin } // Arrange - var kernel = new Mock(); - kernel.Setup(x => x.LoggerFactory).Returns(NullLoggerFactory.Instance); + var kernel = new Kernel(new Mock().Object); - var planner = new StepwisePlanner(kernel.Object); + var planner = new StepwisePlanner(kernel); // Act var result = planner.ParseResult(input); @@ -89,14 +84,4 @@ public void ParseActionReturnsAction(string input, string expectedThought, strin Assert.Equal(expectedDictionary, result.ActionVariables); Assert.Equal(expectedThought ?? string.Empty, result.Thought); } - - // Method to create Mock objects - private static Mock CreateMockFunction(FunctionView functionView) - { - var mockFunction = new Mock(); - mockFunction.Setup(x => x.Describe()).Returns(functionView); - mockFunction.Setup(x => x.Name).Returns(functionView.Name); - mockFunction.Setup(x => x.PluginName).Returns(functionView.PluginName); - return mockFunction; - } } diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/StepwisePlannerTests.cs b/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/StepwisePlannerTests.cs index 42fba5e88830..41d175a9eb25 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/StepwisePlannerTests.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/Stepwise/StepwisePlannerTests.cs @@ -1,12 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.Extensions.Logging.Abstractions; using Moq; using Xunit; -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel.Planners.Stepwise.UnitTests; -#pragma warning restore IDE0130 // Namespace does not match folder structure +namespace Microsoft.SemanticKernel.Planning.Stepwise.UnitTests; public sealed class StepwisePlannerTests { @@ -14,8 +11,8 @@ public sealed class StepwisePlannerTests public void UsesPromptDelegateWhenProvided() { // Arrange - var kernel = new Mock(); - kernel.Setup(x => x.LoggerFactory).Returns(NullLoggerFactory.Instance); + var kernel = new Kernel(new Mock().Object); + var getPromptTemplateMock = new Mock>(); var config = new StepwisePlannerConfig() { @@ -23,7 +20,7 @@ public void UsesPromptDelegateWhenProvided() }; // Act - var planner = new StepwisePlanner(kernel.Object, config); + var planner = new StepwisePlanner(kernel, config); // Assert getPromptTemplateMock.Verify(x => x(), Times.Once()); diff --git a/dotnet/src/Planners/Planners.Core.UnitTests/XunitHelpers/TestConsoleLogger.cs b/dotnet/src/Planners/Planners.Core.UnitTests/XunitHelpers/TestConsoleLogger.cs index d71996dccd49..7bee46c51b99 100644 --- a/dotnet/src/Planners/Planners.Core.UnitTests/XunitHelpers/TestConsoleLogger.cs +++ b/dotnet/src/Planners/Planners.Core.UnitTests/XunitHelpers/TestConsoleLogger.cs @@ -2,7 +2,7 @@ using Microsoft.Extensions.Logging; -namespace Microsoft.SemanticKernel.Planners.UnitTests.XunitHelpers; +namespace Microsoft.SemanticKernel.Planning.UnitTests.XunitHelpers; /// /// Basic logger printing to console @@ -16,7 +16,7 @@ internal static class TestConsoleLogger private static ILoggerFactory LogBuilder() { - return Extensions.Logging.LoggerFactory.Create(builder => + return Microsoft.Extensions.Logging.LoggerFactory.Create(builder => { builder.SetMinimumLevel(LogLevel.Trace); // builder.AddFilter("Microsoft", LogLevel.Trace); diff --git a/dotnet/src/Planners/Planners.Core/Action/ActionPlanResponse.cs b/dotnet/src/Planners/Planners.Core/Action/ActionPlanResponse.cs index d902659ccc7c..bd4634128ad0 100644 --- a/dotnet/src/Planners/Planners.Core/Action/ActionPlanResponse.cs +++ b/dotnet/src/Planners/Planners.Core/Action/ActionPlanResponse.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; -namespace Microsoft.SemanticKernel.Planners.Action; +namespace Microsoft.SemanticKernel.Planning.Action; /// /// Plan data structure returned by the basic planner semantic function diff --git a/dotnet/src/Planners/Planners.Core/Action/ActionPlanner.cs b/dotnet/src/Planners/Planners.Core/Action/ActionPlanner.cs index 39a6c0408bf2..63028013ecf0 100644 --- a/dotnet/src/Planners/Planners.Core/Action/ActionPlanner.cs +++ b/dotnet/src/Planners/Planners.Core/Action/ActionPlanner.cs @@ -10,16 +10,11 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Planners.Action; -using Microsoft.SemanticKernel.Planning; +using Microsoft.SemanticKernel.Planning.Action; -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// Action Planner allows to select one function out of many, to achieve a given goal. @@ -30,7 +25,7 @@ namespace Microsoft.SemanticKernel.Planners; /// The rationale is currently available only in the prompt, we might include it in /// the Plan object in future. /// -public sealed class ActionPlanner : IActionPlanner +public sealed class ActionPlanner { private const string StopSequence = "#END-OF-PLAN"; private const string PluginName = "this"; @@ -40,12 +35,20 @@ public sealed class ActionPlanner : IActionPlanner /// private static readonly Regex s_planRegex = new("^[^{}]*(((?'Open'{)[^{}]*)+((?'Close-Open'})[^{}]*)+)*(?(Open)(?!))", RegexOptions.Singleline | RegexOptions.Compiled); + /// Deserialization options for use with . + private static readonly JsonSerializerOptions s_actionPlayResponseOptions = new() + { + AllowTrailingCommas = true, + DictionaryKeyPolicy = null, + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + PropertyNameCaseInsensitive = true, + }; + // Planner semantic function - private readonly ISKFunction _plannerFunction; + private readonly KernelFunction _plannerFunction; - // Context used to access the list of functions in the kernel - private readonly SKContext _context; - private readonly IKernel _kernel; + private readonly ContextVariables _contextVariables; + private readonly Kernel _kernel; private readonly ILogger _logger; // TODO: allow to inject plugin store @@ -55,7 +58,7 @@ public sealed class ActionPlanner : IActionPlanner /// The semantic kernel instance. /// The planner configuration. public ActionPlanner( - IKernel kernel, + Kernel kernel, ActionPlannerConfig? config = null) { Verify.NotNull(kernel); @@ -67,41 +70,51 @@ public ActionPlanner( string promptTemplate = this.Config.GetPromptTemplate?.Invoke() ?? EmbeddedResource.Read("Action.skprompt.txt"); - this._plannerFunction = kernel.CreateSemanticFunction( - pluginName: PluginName, + this._plannerFunction = kernel.CreateFunctionFromPrompt( promptTemplate: promptTemplate, - requestSettings: new AIRequestSettings() + new PromptExecutionSettings() { - ExtensionData = new Dictionary() + ExtensionData = new() { { "StopSequences", new[] { StopSequence } }, { "MaxTokens", this.Config.MaxTokens }, } }); - kernel.ImportFunctions(this, pluginName: PluginName); + kernel.ImportPluginFromObject(this, pluginName: PluginName); // Create context and logger - this._context = kernel.CreateNewContext(); - this._logger = this._kernel.LoggerFactory.CreateLogger(this.GetType()); + this._contextVariables = new ContextVariables(); + this._logger = kernel.LoggerFactory.CreateLogger(this.GetType()) ?? NullLogger.Instance; } - /// - public async Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default) + /// Creates a plan for the specified goal. + /// The goal for which a plan should be created. + /// The to monitor for cancellation requests. The default is . + /// The created plan. + /// is null. + /// is empty or entirely composed of whitespace. + /// A plan could not be created. + public Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default) { - if (string.IsNullOrEmpty(goal)) - { - throw new SKException("The goal specified is empty"); - } + Verify.NotNullOrWhiteSpace(goal); - this._context.Variables.Update(goal); + return PlannerInstrumentation.CreatePlanAsync( + static (ActionPlanner planner, string goal, CancellationToken cancellationToken) => planner.CreatePlanCoreAsync(goal, cancellationToken), + static (Plan plan) => plan.ToSafePlanString(), + this, goal, this._logger, cancellationToken); + } - FunctionResult result = await this._plannerFunction.InvokeAsync(this._context, cancellationToken: cancellationToken).ConfigureAwait(false); + private async Task CreatePlanCoreAsync(string goal, CancellationToken cancellationToken) + { + this._contextVariables.Update(goal); + + FunctionResult result = await this._plannerFunction.InvokeAsync(this._kernel, this._contextVariables, cancellationToken: cancellationToken).ConfigureAwait(false); ActionPlanResponse? planData = this.ParsePlannerResult(result); if (planData == null) { - throw new SKException("The plan deserialized to a null object"); + throw new KernelException("The plan deserialized to a null object"); } // Build and return plan @@ -110,11 +123,12 @@ public async Task CreatePlanAsync(string goal, CancellationToken cancellat FunctionUtils.SplitPluginFunctionName(planData.Plan.Function, out var pluginName, out var functionName); if (!string.IsNullOrEmpty(functionName)) { - var getFunctionCallback = this.Config.GetFunctionCallback ?? this._kernel.Functions.GetFunctionCallback(); + var getFunctionCallback = this.Config.GetFunctionCallback ?? this._kernel.Plugins.GetFunctionCallback(); var pluginFunction = getFunctionCallback(pluginName, functionName); if (pluginFunction != null) { plan = new Plan(goal, pluginFunction); + plan.Steps[0].PluginName = pluginName; } } @@ -125,9 +139,9 @@ public async Task CreatePlanAsync(string goal, CancellationToken cancellat { foreach (KeyValuePair p in planData.Plan.Parameters) { - if (p.Value != null) + if (p.Value?.ToString() is string value) { - plan.Steps[0].Parameters[p.Key] = p.Value.ToString(); + plan.Steps[0].Parameters[p.Key] = value; } } } @@ -141,18 +155,16 @@ public async Task CreatePlanAsync(string goal, CancellationToken cancellat /// excluding functions in the planner itself. /// /// Currently unused. Will be used to handle long lists of functions. - /// Function execution context /// The token to use to request cancellation. /// List of functions, formatted accordingly to the prompt - [SKFunction, Description("List all functions available in the kernel")] + [KernelFunction, Description("List all functions available in the kernel")] public async Task ListOfFunctionsAsync( [Description("The current goal processed by the planner")] string goal, - SKContext context, CancellationToken cancellationToken = default) { // Prepare list using the format used by skprompt.txt var list = new StringBuilder(); - var availableFunctions = await context.Functions.GetFunctionsAsync(this.Config, goal, this._logger, cancellationToken).ConfigureAwait(false); + var availableFunctions = await this._kernel.Plugins.GetFunctionsAsync(this.Config, goal, this._logger, cancellationToken).ConfigureAwait(false); this.PopulateList(list, availableFunctions); return list.ToString(); @@ -164,12 +176,12 @@ public async Task ListOfFunctionsAsync( /// Native function that provides a list of good examples of plans to generate. /// /// The current goal processed by the planner. - /// Function execution context. + /// Function execution context variables. /// List of good examples, formatted accordingly to the prompt. - [SKFunction, Description("List a few good examples of plans to generate")] + [KernelFunction, Description("List a few good examples of plans to generate")] public string GoodExamples( [Description("The current goal processed by the planner")] string goal, - SKContext context) + ContextVariables variables) { return @" [EXAMPLE] @@ -205,12 +217,12 @@ No parameters. /// Native function that provides a list of edge case examples of plans to handle. /// /// The current goal processed by the planner. - /// Function execution context. + /// Function execution context variables. /// List of edge case examples, formatted accordingly to the prompt. - [SKFunction, Description("List a few edge case examples of plans to handle")] + [KernelFunction, Description("List a few edge case examples of plans to handle")] public string EdgeCaseExamples( [Description("The current goal processed by the planner")] string goal, - SKContext context) + ContextVariables variables) { return @" [EXAMPLE] @@ -254,35 +266,30 @@ No parameters. /// Instance of object deserialized from extracted JSON. private ActionPlanResponse? ParsePlannerResult(FunctionResult plannerResult) { - Match match = s_planRegex.Match(plannerResult.GetValue()); - - if (match.Success && match.Groups["Close"].Length > 0) + if (plannerResult.GetValue() is string result) { - string planJson = $"{{{match.Groups["Close"]}}}"; - try + Match match = s_planRegex.Match(result); + + if (match.Success && match.Groups["Close"] is { Length: > 0 } close) { - return JsonSerializer.Deserialize(planJson, new JsonSerializerOptions + string planJson = $"{{{close}}}"; + try { - AllowTrailingCommas = true, - DictionaryKeyPolicy = null, - DefaultIgnoreCondition = JsonIgnoreCondition.Never, - PropertyNameCaseInsensitive = true, - }); - } - catch (Exception e) - { - throw new SKException("Plan parsing error, invalid JSON", e); + return JsonSerializer.Deserialize(planJson, s_actionPlayResponseOptions); + } + catch (Exception e) + { + throw new KernelException("Plan parsing error, invalid JSON", e); + } } } - else - { - throw new SKException($"Failed to extract valid json string from planner result: '{plannerResult}'"); - } + + throw new KernelException($"Failed to extract valid json string from planner result: '{plannerResult}'"); } - private void PopulateList(StringBuilder list, IEnumerable functions) + private void PopulateList(StringBuilder list, IEnumerable functions) { - foreach (FunctionView func in functions) + foreach (KernelFunctionMetadata func in functions) { // Function description if (func.Description != null) diff --git a/dotnet/src/Planners/Planners.Core/Action/ActionPlannerConfig.cs b/dotnet/src/Planners/Planners.Core/Action/ActionPlannerConfig.cs index 8c606fe9f3e3..f925d679dbf6 100644 --- a/dotnet/src/Planners/Planners.Core/Action/ActionPlannerConfig.cs +++ b/dotnet/src/Planners/Planners.Core/Action/ActionPlannerConfig.cs @@ -1,9 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// Configuration for Action planner instances. diff --git a/dotnet/src/Planners/Planners.Core/Action/ActionPlannerExtensions.cs b/dotnet/src/Planners/Planners.Core/Action/ActionPlannerExtensions.cs deleted file mode 100644 index 9fea5ad4f7de..000000000000 --- a/dotnet/src/Planners/Planners.Core/Action/ActionPlannerExtensions.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.Logging; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Extension methods for class. -/// -public static class ActionPlannerExtensions -{ - /// - /// Returns decorated instance of with enabled instrumentation. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public static IActionPlanner WithInstrumentation(this IActionPlanner planner, ILoggerFactory? loggerFactory = null) - { - return new InstrumentedActionPlanner(planner, loggerFactory); - } -} diff --git a/dotnet/src/Planners/Planners.Core/Action/IActionPlanner.cs b/dotnet/src/Planners/Planners.Core/Action/IActionPlanner.cs deleted file mode 100644 index d0cb4e2bfe9d..000000000000 --- a/dotnet/src/Planners/Planners.Core/Action/IActionPlanner.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Planning; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Interface for planner that uses a set of semantic functions to select one function out of many and create a plan. -/// -public interface IActionPlanner -{ - /// - /// Create a plan for a goal. - /// - /// The goal to create a plan for. - /// The to monitor for cancellation requests. The default is . - /// The plan. - /// Thrown when the plan cannot be created. - Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/Planners/Planners.Core/Action/InstrumentedActionPlanner.cs b/dotnet/src/Planners/Planners.Core/Action/InstrumentedActionPlanner.cs deleted file mode 100644 index e29b49670b60..000000000000 --- a/dotnet/src/Planners/Planners.Core/Action/InstrumentedActionPlanner.cs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Planning; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Instrumented planner that uses set of semantic functions to select one function out of many and create a plan. -/// Captures planner-related logs and metrics. -/// -internal sealed class InstrumentedActionPlanner : IActionPlanner -{ - /// - /// Initialize a new instance of the class. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public InstrumentedActionPlanner( - IActionPlanner planner, - ILoggerFactory? loggerFactory = null) - { - this._planner = planner; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(InstrumentedActionPlanner)) : NullLogger.Instance; - } - - /// - public async Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default) - { - using var activity = s_activitySource.StartActivity($"{PlannerType}.CreatePlan"); - - this._logger.LogInformation("{PlannerType}: Plan creation started.", PlannerType); - - // Sensitive data, logging as trace, disabled by default - this._logger.LogTrace("{PlannerType}: Plan Goal: {Goal}", PlannerType, goal); - - var stopwatch = new Stopwatch(); - - try - { - stopwatch.Start(); - - var plan = await this._planner.CreatePlanAsync(goal, cancellationToken).ConfigureAwait(false); - - stopwatch.Stop(); - - this._logger.LogInformation("{PlannerType}: Plan creation status: {Status}", PlannerType, "Success"); - - this._logger.LogInformation("{PlannerType}: Created plan: \n {Plan}", PlannerType, plan.ToSafePlanString()); - - // Sensitive data, logging as trace, disabled by default - this._logger.LogTrace("{PlannerType}: Created plan with details: \n {Plan}", PlannerType, plan.ToPlanString()); - - return plan; - } - catch (Exception ex) - { - this._logger.LogInformation("{PlannerType}: Plan creation status: {Status}", PlannerType, "Failed"); - this._logger.LogError(ex, "{PlannerType}: Plan creation exception details: {Message}", PlannerType, ex.Message); - - throw; - } - finally - { - this._logger.LogInformation("{PlannerType}: Plan creation finished in {ExecutionTime}ms.", PlannerType, stopwatch.ElapsedMilliseconds); - - s_createPlanExecutionTime.Record(stopwatch.ElapsedMilliseconds); - } - } - - #region private ================================================================================ - - private const string PlannerType = nameof(ActionPlanner); - - private readonly IActionPlanner _planner; - private readonly ILogger _logger; - - /// - /// Instance of for planner-related activities. - /// - private static readonly ActivitySource s_activitySource = new(typeof(InstrumentedActionPlanner).FullName); - - /// - /// Instance of for planner-related metrics. - /// - private static readonly Meter s_meter = new(typeof(InstrumentedActionPlanner).FullName); - - /// - /// Instance of to record plan creation execution time. - /// - private static readonly Histogram s_createPlanExecutionTime = - s_meter.CreateHistogram( - name: $"SK.{PlannerType}.CreatePlan.ExecutionTime", - unit: "ms", - description: "Execution time of plan creation"); - - #endregion -} diff --git a/dotnet/src/Planners/Planners.Core/Extensions/FunctionViewExtensions.cs b/dotnet/src/Planners/Planners.Core/Extensions/FunctionViewExtensions.cs deleted file mode 100644 index 7d2c5be80baa..000000000000 --- a/dotnet/src/Planners/Planners.Core/Extensions/FunctionViewExtensions.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Linq; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the class. -/// -internal static class FunctionViewExtensions -{ - /// - /// Create a manual-friendly string for a function. - /// - /// The function to create a manual-friendly string for. - /// A manual-friendly string for a function. - internal static string ToManualString(this FunctionView function) - { - var inputs = string.Join("\n", function.Parameters.Select(parameter => - { - var defaultValueString = string.IsNullOrEmpty(parameter.DefaultValue) ? string.Empty : $" (default value: {parameter.DefaultValue})"; - return $" - {parameter.Name}: {parameter.Description}{defaultValueString}"; - })); - - // description and inputs are indented by 2 spaces - // While each parameter in inputs is indented by 4 spaces - return $@"{function.ToFullyQualifiedName()}: - description: {function.Description} - inputs: -{inputs}"; - } - - /// - /// Create a fully qualified name for a function. - /// - /// The function to create a fully qualified name for. - /// A fully qualified name for a function. - internal static string ToFullyQualifiedName(this FunctionView function) - { - return $"{function.PluginName}.{function.Name}"; - } - - /// - /// Create a string for generating an embedding for a function. - /// - /// The function to create a string for generating an embedding for. - /// A string for generating an embedding for a function. - internal static string ToEmbeddingString(this FunctionView function) - { - var inputs = string.Join("\n", function.Parameters.Select(p => $" - {p.Name}: {p.Description}")); - return $"{function.Name}:\n description: {function.Description}\n inputs:\n{inputs}"; - } -} diff --git a/dotnet/src/Planners/Planners.Core/Extensions/PromptTemplateConfigExtensions.cs b/dotnet/src/Planners/Planners.Core/Extensions/PromptTemplateConfigExtensions.cs index 594e053d4426..e402b3b91da2 100644 --- a/dotnet/src/Planners/Planners.Core/Extensions/PromptTemplateConfigExtensions.cs +++ b/dotnet/src/Planners/Planners.Core/Extensions/PromptTemplateConfigExtensions.cs @@ -1,12 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. -// ReSharper disable once CheckNamespace - Using the namespace of IKernel using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.TemplateEngine; -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// Extension methods for PromptTemplateConfig @@ -20,11 +16,11 @@ internal static class PromptTemplateConfigExtensions /// Value of max tokens to set internal static void SetMaxTokens(this PromptTemplateConfig config, int maxTokens) { - AIRequestSettings requestSettings = config.GetDefaultRequestSettings() ?? new(); + PromptExecutionSettings executionSettings = config.GetDefaultRequestSettings() ?? new(); if (config.ModelSettings.Count == 0) { - config.ModelSettings.Add(requestSettings); + config.ModelSettings.Add(executionSettings); } - requestSettings.ExtensionData["max_tokens"] = maxTokens; + executionSettings.ExtensionData["max_tokens"] = maxTokens; } } diff --git a/dotnet/src/Planners/Planners.Core/Extensions/ReadOnlyFunctionCollectionPlannerExtensions.cs b/dotnet/src/Planners/Planners.Core/Extensions/ReadOnlyFunctionCollectionPlannerExtensions.cs deleted file mode 100644 index a4e32df5fc40..000000000000 --- a/dotnet/src/Planners/Planners.Core/Extensions/ReadOnlyFunctionCollectionPlannerExtensions.cs +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Memory; - -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Provides extension methods for the implementations for planners. -/// -public static class ReadOnlyFunctionCollectionPlannerExtensions -{ - internal const string PlannerMemoryCollectionName = "Planning.SKFunctionsManual"; - - /// - /// Returns a function callback that can be used to retrieve a function from the function provider. - /// - /// The function provider. - /// A function callback that can be used to retrieve a function from the function provider. - public static Func GetFunctionCallback(this IReadOnlyFunctionCollection functions) - { - return (pluginName, functionName) => - { - if (string.IsNullOrEmpty(pluginName)) - { - if (functions.TryGetFunction(functionName, out var pluginFunction)) - { - return pluginFunction; - } - } - else if (functions.TryGetFunction(pluginName, functionName, out var pluginFunction)) - { - return pluginFunction; - } - - return null; - }; - } - - /// - /// Returns a string containing the manual for all available functions. - /// - /// The function provider. - /// The planner config. - /// The semantic query for finding relevant registered functions - /// The logger to use for logging. - /// The to monitor for cancellation requests. The default is . - /// A string containing the manual for all available functions. - public static async Task GetFunctionsManualAsync( - this IReadOnlyFunctionCollection functions, - PlannerConfigBase config, - string? semanticQuery = null, - ILogger? logger = null, - CancellationToken cancellationToken = default) - { - IOrderedEnumerable availableFunctions = await functions.GetFunctionsAsync(config, semanticQuery, logger, cancellationToken).ConfigureAwait(false); - - return string.Join("\n\n", availableFunctions.Select(x => x.ToManualString())); - } - - /// - /// Returns a list of functions that are available to the user based on the semantic query and the excluded plugins and functions. - /// - /// The function provider. - /// The planner config. - /// The semantic query for finding relevant registered functions - /// The logger to use for logging. - /// The to monitor for cancellation requests. The default is . - /// A list of functions that are available to the user based on the semantic query and the excluded plugins and functions. - public static async Task> GetFunctionsAsync( - this IReadOnlyFunctionCollection functions, - PlannerConfigBase config, - string? semanticQuery, - ILogger? logger, - CancellationToken cancellationToken) - { - // Use configured function provider if available, otherwise use the default SKContext function provider. - return config.GetAvailableFunctionsAsync is null ? - await functions.GetAvailableFunctionsAsync(config, semanticQuery, logger, cancellationToken).ConfigureAwait(false) : - await config.GetAvailableFunctionsAsync(config, semanticQuery, cancellationToken).ConfigureAwait(false); - } - - /// - /// Returns a list of functions that are available to the user based on the semantic query and the excluded plugins and functions. - /// - /// The function provider. - /// The planner config. - /// The semantic query for finding relevant registered functions - /// The logger to use for logging. - /// The to monitor for cancellation requests. The default is . - /// A list of functions that are available to the user based on the semantic query and the excluded plugins and functions. - public static async Task> GetAvailableFunctionsAsync( - this IReadOnlyFunctionCollection functions, - PlannerConfigBase config, - string? semanticQuery = null, - ILogger? logger = null, - CancellationToken cancellationToken = default) - { - var functionsView = functions.GetFunctionViews(); - - var availableFunctions = functionsView - .Where(s => !config.ExcludedPlugins.Contains(s.PluginName, StringComparer.OrdinalIgnoreCase) - && !config.ExcludedFunctions.Contains(s.Name, StringComparer.OrdinalIgnoreCase)) - .ToList(); - - List? result = null; - var semanticMemoryConfig = config.SemanticMemoryConfig; - if (string.IsNullOrEmpty(semanticQuery) || semanticMemoryConfig.Memory is NullMemory) - { - // If no semantic query is provided, return all available functions. - // If a Memory provider has not been registered, return all available functions. - result = availableFunctions; - } - else - { - result = new List(); - - // Remember functions in memory so that they can be searched. - await RememberFunctionsAsync(semanticMemoryConfig.Memory, availableFunctions, cancellationToken).ConfigureAwait(false); - - // Search for functions that match the semantic query. - var memories = semanticMemoryConfig.Memory.SearchAsync( - PlannerMemoryCollectionName, - semanticQuery!, - semanticMemoryConfig.MaxRelevantFunctions, - semanticMemoryConfig.RelevancyThreshold ?? 0.0, - cancellationToken: cancellationToken); - - // Add functions that were found in the search results. - result.AddRange(await GetRelevantFunctionsAsync(availableFunctions, memories, logger ?? NullLogger.Instance, cancellationToken).ConfigureAwait(false)); - - // Add any missing functions that were included but not found in the search results. - var missingFunctions = semanticMemoryConfig.IncludedFunctions - .Except(result.Select(x => (x.PluginName, x.Name))) - .Join(availableFunctions, f => f, af => (af.PluginName, af.Name), (_, af) => af); - - result.AddRange(missingFunctions); - } - - return result - .OrderBy(x => x.PluginName) - .ThenBy(x => x.Name); - } - - private static async Task> GetRelevantFunctionsAsync( - IEnumerable availableFunctions, - IAsyncEnumerable memories, - ILogger logger, - CancellationToken cancellationToken = default) - { - var relevantFunctions = new ConcurrentBag(); - await foreach (var memoryEntry in memories.WithCancellation(cancellationToken)) - { - var function = availableFunctions.FirstOrDefault(x => x.ToFullyQualifiedName() == memoryEntry.Metadata.Id); - if (function != null) - { - if (logger.IsEnabled(LogLevel.Debug)) - { - logger.LogDebug("Found relevant function. Relevance Score: {0}, Function: {1}", memoryEntry.Relevance, function.ToFullyQualifiedName()); - } - - relevantFunctions.Add(function); - } - } - - return relevantFunctions; - } - - /// - /// Saves all available functions to memory. - /// - /// The memory provided to store the functions to. - /// The available functions to save. - /// The to monitor for cancellation requests. The default is . - private static async Task RememberFunctionsAsync( - ISemanticTextMemory memory, - List availableFunctions, - CancellationToken cancellationToken = default) - { - foreach (var function in availableFunctions) - { - var functionName = function.ToFullyQualifiedName(); - var key = functionName; - var description = string.IsNullOrEmpty(function.Description) ? functionName : function.Description; - var textToEmbed = function.ToEmbeddingString(); - - // It'd be nice if there were a saveIfNotExists method on the memory interface - var memoryEntry = await memory.GetAsync(collection: PlannerMemoryCollectionName, key: key, withEmbedding: false, - cancellationToken: cancellationToken).ConfigureAwait(false); - if (memoryEntry == null) - { - // TODO It'd be nice if the minRelevanceScore could be a parameter for each item that was saved to memory - // As folks may want to tune their functions to be more or less relevant. - // Memory now supports these such strategies. - await memory.SaveInformationAsync(collection: PlannerMemoryCollectionName, text: textToEmbed, id: key, description: description, - additionalMetadata: string.Empty, cancellationToken: cancellationToken).ConfigureAwait(false); - } - } - } -} diff --git a/dotnet/src/Planners/Planners.Core/KernelPlanExtensions.cs b/dotnet/src/Planners/Planners.Core/KernelPlanExtensions.cs new file mode 100644 index 000000000000..411aa4646feb --- /dev/null +++ b/dotnet/src/Planners/Planners.Core/KernelPlanExtensions.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Planning; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods for running plans using a kernel +/// +public static class KernelPlanExtensions +{ + /// + /// Run the next step in a plan asynchronously + /// + /// Kernel instance to use + /// Plan to run + /// The to monitor for cancellation requests. The default is . + /// Result of the plan execution + public static Task StepAsync(this Kernel kernel, Plan plan, CancellationToken cancellationToken = default) + { + return kernel.StepAsync(plan.State, plan, cancellationToken); + } + + /// + /// Run the next step in a plan asynchronously + /// + /// Kernel instance to use + /// Input to use + /// Plan to run + /// The to monitor for cancellation requests. The default is . + public static Task StepAsync(this Kernel kernel, string input, Plan plan, CancellationToken cancellationToken = default) + { + return kernel.StepAsync(new ContextVariables(input), plan, cancellationToken); + } + + /// + /// Run the next step in a plan asynchronously + /// + /// Kernel instance to use + /// Input to process + /// Plan to run + /// The to monitor for cancellation requests. The default is . + /// Result of the plan execution + public static Task StepAsync(this Kernel kernel, ContextVariables variables, Plan plan, CancellationToken cancellationToken = default) + { + return plan.RunNextStepAsync(kernel, variables, cancellationToken); + } +} diff --git a/dotnet/src/Planners/Planners.Core/Plan.cs b/dotnet/src/Planners/Planners.Core/Plan.cs new file mode 100644 index 000000000000..98868db2ce07 --- /dev/null +++ b/dotnet/src/Planners/Planners.Core/Plan.cs @@ -0,0 +1,692 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.AI; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Standard Semantic Kernel callable plan. +/// Plan is used to create trees of s. +/// +[DebuggerDisplay("{DebuggerDisplay,nq}")] +public sealed class Plan +{ + internal const string MainKey = "INPUT"; + + /// + /// State of the plan + /// + [JsonPropertyName("state")] + [JsonConverter(typeof(ContextVariablesConverter))] + public ContextVariables State { get; } = new(); + + /// + /// Steps of the plan + /// + [JsonPropertyName("steps")] + public IReadOnlyList Steps => this._steps.AsReadOnly(); + + /// + /// Parameters for the plan, used to pass information to the next step + /// + [JsonPropertyName("parameters")] + [JsonConverter(typeof(ContextVariablesConverter))] + public ContextVariables Parameters { get; set; } = new(); + + /// + /// Outputs for the plan, used to pass information to the caller + /// + [JsonPropertyName("outputs")] + public IList Outputs { get; set; } = new List(); + + /// + /// Gets whether the plan has a next step. + /// + [JsonIgnore] + public bool HasNextStep => this.NextStepIndex < this.Steps.Count; + + /// + /// Gets the next step index. + /// + [JsonPropertyName("next_step_index")] + public int NextStepIndex { get; private set; } + + /// + [JsonPropertyName("plugin_name")] + public string PluginName { get; set; } = string.Empty; + + /// + /// Initializes a new instance of the class with a goal description. + /// + /// The goal of the plan used as description. + public Plan(string goal) + { + this.PluginName = nameof(Plan); // TODO markwallace - remove this + this.Name = GetRandomPlanName(); + this.Description = goal; + } + + /// + /// Initializes a new instance of the class with a goal description and steps. + /// + /// The goal of the plan used as description. + /// The steps to add. + public Plan(string goal, params KernelFunction[] steps) : this(goal) + { + this.AddSteps(steps); + } + + /// + /// Initializes a new instance of the class with a goal description and steps. + /// + /// The goal of the plan used as description. + /// The steps to add. + public Plan(string goal, params Plan[] steps) : this(goal) + { + this.AddSteps(steps); + } + + /// + /// Initializes a new instance of the class with a function. + /// + /// The function to execute. + public Plan(KernelFunction function) + { + this.Function = function; + this.Name = function.Name; + this.Description = function.Description; + } + + /// + /// Initializes a new instance of the class with a function and steps. + /// + /// The name of the plan. + /// The name of the plugin. + /// The description of the plan. + /// The index of the next step. + /// The state of the plan. + /// The parameters of the plan. + /// The outputs of the plan. + /// The steps of the plan. + [JsonConstructor] + public Plan( + string name, + string pluginName, + string description, + int nextStepIndex, + ContextVariables state, + ContextVariables parameters, + IList outputs, + IReadOnlyList steps) + { + this.PluginName = pluginName; // TODO markwallace - remove this + this.Name = name; + this.Description = description; + this.NextStepIndex = nextStepIndex; + this.State = state; + this.Parameters = parameters; + this.Outputs = outputs; + this._steps.Clear(); + this.AddSteps(steps.ToArray()); + } + + /// + /// Deserialize a JSON string into a Plan object. + /// TODO: the context should never be null, it's required internally + /// + /// JSON string representation of a Plan + /// The collection of available functions.. + /// Whether to require functions to be registered. Only used when context is not null. + /// An instance of a Plan object. + /// If Context is not supplied, plan will not be able to execute. + public static Plan FromJson(string json, IReadOnlyKernelPluginCollection? plugins = null, bool requireFunctions = true) + { + var plan = JsonSerializer.Deserialize(json, s_includeFieldsOptions) ?? new Plan(string.Empty); + + if (plugins != null) + { + plan = SetAvailablePlugins(plan, plugins, requireFunctions); + } + + return plan; + } + + /// + /// Get JSON representation of the plan. + /// + /// Whether to emit indented JSON + /// Plan serialized using JSON format + public string ToJson(bool indented = false) => + indented ? + JsonSerializer.Serialize(this, JsonOptionsCache.WriteIndented) : + JsonSerializer.Serialize(this); + + /// + /// Adds one or more existing plans to the end of the current plan as steps. + /// + /// The plans to add as steps to the current plan. + /// + /// When you add a plan as a step to the current plan, the steps of the added plan are executed after the steps of the current plan have completed. + /// + public void AddSteps(params Plan[] steps) + { + this._steps.AddRange(steps); + } + + /// + /// Adds one or more new steps to the end of the current plan. + /// + /// The steps to add to the current plan. + /// + /// When you add a new step to the current plan, it is executed after the previous step in the plan has completed. Each step can be a function call or another plan. + /// + public void AddSteps(params KernelFunction[] steps) + { + this._steps.AddRange(steps.Select(step => new Plan(step))); + } + + /// + /// Runs the next step in the plan using the provided kernel instance and variables. + /// + /// The kernel instance to use for executing the plan. + /// The variables to use for the execution of the plan. + /// The to monitor for cancellation requests. The default is . + /// A task representing the asynchronous execution of the plan's next step. + /// + /// This method executes the next step in the plan using the specified kernel instance and context variables. + /// The context variables contain the necessary information for executing the plan, such as the functions and logger. + /// The method returns a task representing the asynchronous execution of the plan's next step. + /// + public Task RunNextStepAsync(Kernel kernel, ContextVariables variables, CancellationToken cancellationToken = default) + { + return this.InvokeNextStepAsync(kernel, variables, cancellationToken); + } + + /// + /// Invoke the next step of the plan + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Context variables to use + /// The to monitor for cancellation requests. The default is . + /// The updated plan + /// If an error occurs while running the plan + public async Task InvokeNextStepAsync(Kernel kernel, ContextVariables variables, CancellationToken cancellationToken = default) + { + if (this.HasNextStep) + { + await this.InternalInvokeNextStepAsync(kernel, variables, cancellationToken).ConfigureAwait(false); + } + + return this; + } + + #region ISKFunction implementation + /// + /// Gets the name of the function. + /// + /// + /// The name is used anywhere the function needs to be identified, such as in plans describing what functions + /// should be invoked when, or as part of lookups in a plugin's function collection. Function names are generally + /// handled in an ordinal case-insensitive manner. + /// + public string Name { get; } + + /// + /// Gets a description of the function. + /// + /// + /// The description may be supplied to a model in order to elaborate on the function's purpose, + /// in case it may be beneficial for the model to recommend invoking the function. + /// + public string Description { get; } + + /// + /// Gets the metadata describing the function. + /// + /// An instance of describing the function + public KernelFunctionMetadata GetMetadata() + { + if (this.Function is not null) + { + return this.Function.Metadata; + } + + // The parameter mapping definitions from Plan -> Function + var stepParameters = this.Steps.SelectMany(s => s.Parameters); + + // The parameter descriptions from the Function + var stepDescriptions = this.Steps.SelectMany(s => s.GetMetadata().Parameters); + + // The parameters for the Plan + var parameters = this.Parameters.Select(p => + { + var matchingParameter = stepParameters.FirstOrDefault(sp => sp.Value.Equals($"${p.Key}", StringComparison.OrdinalIgnoreCase)); + var stepDescription = stepDescriptions.FirstOrDefault(sd => sd.Name.Equals(matchingParameter.Key, StringComparison.OrdinalIgnoreCase)); + + return new KernelParameterMetadata(p.Key) + { + Description = stepDescription?.Description, + DefaultValue = stepDescription?.DefaultValue, + IsRequired = stepDescription?.IsRequired ?? false, + ParameterType = stepDescription?.ParameterType, + Schema = stepDescription?.Schema, + }; + }).ToList(); + + return new(this.Name) + { + PluginName = this.PluginName, + Description = this.Description, + Parameters = parameters + }; + } + + /// + /// Invoke the . + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Plan input + public async Task InvokeAsync( + Kernel kernel, + string input) + { + var contextVariables = new ContextVariables(); + contextVariables.Update(input); + + return await this.InvokeAsync(kernel, contextVariables).ConfigureAwait(false); + } + + /// + /// Invoke the . + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Context variables + /// LLM completion settings (for semantic functions only) + /// The updated context, potentially a new one if context switching is implemented. + /// The to monitor for cancellation requests. The default is . + public async Task InvokeAsync( + Kernel kernel, + ContextVariables? variables = null, + PromptExecutionSettings? executionSettings = null, + CancellationToken cancellationToken = default) + { + variables ??= new ContextVariables(); + var result = new FunctionResult(this.Name, variables); + + if (this.Function is not null) + { + // Merge state with the current context variables. + // Then filter the variables to only those needed for the next step. + // This is done to prevent the function from having access to variables that it shouldn't. + AddStateVariablesToContextVariables(this.State, variables); + + var functionVariables = this.GetNextStepVariables(variables, this); + + // Execute the step + result = await this.Function + .InvokeAsync(kernel, functionVariables, executionSettings, cancellationToken) + .ConfigureAwait(false); + this.UpdateFunctionResultWithOutputs(result); + } + else + { + // loop through steps and execute until completion + while (this.HasNextStep) + { + AddStateVariablesToContextVariables(this.State, variables); + + var stepResult = await this.InternalInvokeNextStepAsync(kernel, variables, cancellationToken).ConfigureAwait(false); + + // If a step was cancelled before invocation + // Return the last result state of the plan. + if (stepResult.IsCancellationRequested) + { + return result; + } + if (stepResult.IsSkipRequested) + { + continue; + } + + this.UpdateContextWithOutputs(variables); + + result = new FunctionResult(this.Name, variables, variables.Input); + this.UpdateFunctionResultWithOutputs(result); + } + } + + return result; + } + + #endregion ISKFunction implementation + + /// + /// Expand variables in the input string. + /// + /// Variables to use for expansion. + /// Input string to expand. + /// Expanded string. + internal string ExpandFromVariables(ContextVariables variables, string input) + { + var result = input; + var matches = s_variablesRegex.Matches(input); + var orderedMatches = matches.Cast().Select(m => m.Groups["var"].Value).Distinct().OrderByDescending(m => m.Length); + + foreach (var varName in orderedMatches) + { + if (variables.TryGetValue(varName, out string? value) || this.State.TryGetValue(varName, out value)) + { + result = result.Replace($"${varName}", value); + } + } + + return result; + } + + /// + /// Invoke the next step of the plan + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Context variables to use + /// The to monitor for cancellation requests. The default is . + /// Next step result + /// If an error occurs while running the plan + private async Task InternalInvokeNextStepAsync(Kernel kernel, ContextVariables variables, CancellationToken cancellationToken = default) + { + if (this.HasNextStep) + { + var step = this.Steps[this.NextStepIndex]; + + // Merge the state with the current context variables for step execution + var functionVariables = this.GetNextStepVariables(variables, step); + + // Execute the step + var result = await step.InvokeAsync(kernel, functionVariables, null, cancellationToken).ConfigureAwait(false); + + var resultValue = (result.TryGetVariableValue(MainKey, out string? value) ? value : string.Empty).Trim(); + + #region Update State + + // Update state with result + this.State.Update(resultValue); + + // Update Plan Result in State with matching outputs (if any) + if (this.Outputs.Intersect(step.Outputs).Any()) + { + if (this.State.TryGetValue(DefaultResultKey, out string? currentPlanResult)) + { + this.State.Set(DefaultResultKey, $"{currentPlanResult}\n{resultValue}"); + } + else + { + this.State.Set(DefaultResultKey, resultValue); + } + } + + // Update state with outputs (if any) + foreach (var item in step.Outputs) + { + if (result.TryGetVariableValue(item, out string? val)) + { + this.State.Set(item, val); + } + else + { + this.State.Set(item, resultValue); + } + } + + #endregion Update State + + this.NextStepIndex++; + + return result; + } + + throw new InvalidOperationException("There isn't a next step"); + } + + /// + /// Set functions for a plan and its steps. + /// + /// Plan to set functions for. + /// The collection of available plugins. + /// Whether to throw an exception if a function is not found. + /// The plan with functions set. + private static Plan SetAvailablePlugins(Plan plan, IReadOnlyKernelPluginCollection plugins, bool requireFunctions = true) + { + if (plan.Steps.Count == 0) + { + Verify.NotNull(plugins); + + if (plugins.TryGetFunction(plan.PluginName, plan.Name, out var planFunction)) + { + plan.Function = planFunction; + } + else if (requireFunctions) + { + throw new KernelException($"Function '{plan.PluginName}.{plan.Name}' not found in function collection"); + } + } + else + { + foreach (var step in plan.Steps) + { + SetAvailablePlugins(step, plugins, requireFunctions); + } + } + + return plan; + } + + /// + /// Add any missing variables from a plan state variables to the context. + /// + private static void AddStateVariablesToContextVariables(ContextVariables vars, ContextVariables contextVariables) + { + // Loop through vars and add anything missing to context + foreach (var item in vars) + { + if (!contextVariables.TryGetValue(item.Key, out string? value) || string.IsNullOrEmpty(value)) + { + contextVariables.Set(item.Key, item.Value); + } + } + } + + /// + /// Update the context with the outputs from the current step. + /// + /// The context variables to update. + /// The updated context variables. + private ContextVariables UpdateContextWithOutputs(ContextVariables variables) + { + var resultString = this.State.TryGetValue(DefaultResultKey, out string? result) ? result : this.State.ToString(); + variables.Update(resultString); + + // copy previous step's variables to the next step + foreach (var item in this._steps[this.NextStepIndex - 1].Outputs) + { + if (this.State.TryGetValue(item, out string? val)) + { + variables.Set(item, val); + } + else + { + variables.Set(item, resultString); + } + } + + return variables; + } + + /// + /// Update the function result with the outputs from the current state. + /// + /// The function result to update. + /// The updated function result. + private FunctionResult? UpdateFunctionResultWithOutputs(FunctionResult? functionResult) + { + if (functionResult is null) + { + return null; + } + + foreach (var output in this.Outputs) + { + if (this.State.TryGetValue(output, out var value)) + { + functionResult.Metadata[output] = value; + } + else if (functionResult.TryGetVariableValue(output, out var val)) + { + functionResult.Metadata[output] = val; + } + } + + return functionResult; + } + + /// + /// Get the variables for the next step in the plan. + /// + /// The current context variables. + /// The next step in the plan. + /// The context variables for the next step in the plan. + private ContextVariables GetNextStepVariables(ContextVariables variables, Plan step) + { + // Priority for Input + // - Parameters (expand from variables if needed) + // - SKContext.Variables + // - Plan.State + // - Empty if sending to another plan + // - Plan.Description + + var input = string.Empty; + if (!string.IsNullOrEmpty(step.Parameters.Input)) + { + input = this.ExpandFromVariables(variables, step.Parameters.Input!); + } + else if (!string.IsNullOrEmpty(variables.Input)) + { + input = variables.Input; + } + else if (!string.IsNullOrEmpty(this.State.Input)) + { + input = this.State.Input; + } + else if (step.Steps.Count > 0) + { + input = string.Empty; + } + else if (!string.IsNullOrEmpty(this.Description)) + { + input = this.Description; + } + + var stepVariables = new ContextVariables(input); + + // Priority for remaining stepVariables is: + // - Function Parameters (pull from variables or state by a key value) + // - Step Parameters (pull from variables or state by a key value) + // - All other variables. These are carried over in case the function wants access to the ambient content. + var functionParameters = step.GetMetadata(); + foreach (var param in functionParameters.Parameters) + { + if (param.Name.Equals(MainKey, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (variables.TryGetValue(param.Name, out string? value)) + { + stepVariables.Set(param.Name, value); + } + else if (this.State.TryGetValue(param.Name, out value) && !string.IsNullOrEmpty(value)) + { + stepVariables.Set(param.Name, value); + } + } + + foreach (var item in step.Parameters) + { + // Don't overwrite variable values that are already set + if (stepVariables.ContainsKey(item.Key)) + { + continue; + } + + var expandedValue = this.ExpandFromVariables(variables, item.Value); + if (!expandedValue.Equals(item.Value, StringComparison.OrdinalIgnoreCase)) + { + stepVariables.Set(item.Key, expandedValue); + } + else if (variables.TryGetValue(item.Key, out string? value)) + { + stepVariables.Set(item.Key, value); + } + else if (this.State.TryGetValue(item.Key, out value)) + { + stepVariables.Set(item.Key, value); + } + else + { + stepVariables.Set(item.Key, expandedValue); + } + } + + foreach (KeyValuePair item in variables) + { + if (!stepVariables.ContainsKey(item.Key)) + { + stepVariables.Set(item.Key, item.Value); + } + } + + return stepVariables; + } + + private static string GetRandomPlanName() => "plan" + Guid.NewGuid().ToString("N"); + + /// Deserialization options for including fields. + private static readonly JsonSerializerOptions s_includeFieldsOptions = new() { IncludeFields = true }; + + private KernelFunction? Function { get; set; } + + private readonly List _steps = new(); + + private static readonly Regex s_variablesRegex = new(@"\$(?\w+)"); + + private const string DefaultResultKey = "PLAN.RESULT"; + + [DebuggerBrowsable(DebuggerBrowsableState.Never)] + private string DebuggerDisplay + { + get + { + string display = this.Description; + + if (!string.IsNullOrWhiteSpace(this.Name)) + { + display = $"{this.Name} ({display})"; + } + + if (this._steps.Count > 0) + { + display += $", Steps = {this._steps.Count}, NextStep = {this.NextStepIndex}"; + } + + return display; + } + } +} diff --git a/dotnet/src/Planners/Planners.Core/PlanExtensions.cs b/dotnet/src/Planners/Planners.Core/PlanExtensions.cs new file mode 100644 index 000000000000..0732a622f365 --- /dev/null +++ b/dotnet/src/Planners/Planners.Core/PlanExtensions.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Extension methods for type. +/// +public static class PlanExtensions +{ + /// + /// Constructs string representation of without sensitive data. + /// + /// Instance of for string construction. + /// Optional indentation. + public static string ToSafePlanString(this Plan plan, string indent = " ") + { + string planString = string.Join("\n", plan.Steps.Select(step => + { + if (step.Steps.Count == 0) + { + string pluginName = step.PluginName; + string stepName = step.Name; + + return $"{indent}{indent}- {string.Join(".", pluginName, stepName)}"; + } + + return step.ToSafePlanString(indent + indent); + })); + + return planString; + } + + /// + /// Constructs string representation of . + /// + /// Instance of for string construction. + /// Optional indentation. + public static string ToPlanString(this Plan plan, string indent = " ") + { + string planString = string.Join("\n", plan.Steps.Select(step => + { + if (step.Steps.Count == 0) + { + string pluginName = step.PluginName; + string stepName = step.Name; + + string parameters = string.Join(" ", step.Parameters.Select(param => $"{param.Key}='{param.Value}'")); + if (!string.IsNullOrEmpty(parameters)) + { + parameters = $" {parameters}"; + } + + string? outputs = step.Outputs.FirstOrDefault(); + if (!string.IsNullOrEmpty(outputs)) + { + outputs = $" => {outputs}"; + } + + return $"{indent}{indent}- {string.Join(".", pluginName, stepName)}{parameters}{outputs}"; + } + + return step.ToPlanString(indent + indent); + })); + + return planString; + } +} diff --git a/dotnet/src/Planners/Planners.Core/PlannerConfigBase.cs b/dotnet/src/Planners/Planners.Core/PlannerConfigBase.cs deleted file mode 100644 index c0d6cf64728d..000000000000 --- a/dotnet/src/Planners/Planners.Core/PlannerConfigBase.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Base class for planner configs -/// -public abstract class PlannerConfigBase -{ - /// - /// Delegate to get the prompt template string. - /// - public Func? GetPromptTemplate { get; set; } - - /// - /// A list of plugins to exclude from the plan creation request. - /// - public HashSet ExcludedPlugins { get; } = new(); - - /// - /// A list of functions to exclude from the plan creation request. - /// - public HashSet ExcludedFunctions { get; } = new(); - - /// - /// Semantic Memory configuration, used to enable function filtering during plan creation. - /// - /// - /// This configuration will be ignored if is set. - /// - public SemanticMemoryConfig SemanticMemoryConfig { get; set; } = new(); - - /// - /// Callback to get the available functions for planning (optional). - /// Use if you want to override the default function lookup behavior. - /// If set, this function takes precedence over . - /// Setting , will be used to filter the results. - /// - public Func>>? GetAvailableFunctionsAsync { get; set; } - - /// - /// Callback to get a function by name (optional). - /// Use if you want to override the default function lookup behavior. - /// - public Func? GetFunctionCallback { get; set; } - - /// - /// The maximum total number of tokens to allow in a completion request, - /// which includes the tokens from the prompt and completion - /// - public int MaxTokens { get; set; } -} diff --git a/dotnet/src/Planners/Planners.Core/Planners.Core.csproj b/dotnet/src/Planners/Planners.Core/Planners.Core.csproj index 13899566ae3d..feb175d1c11a 100644 --- a/dotnet/src/Planners/Planners.Core/Planners.Core.csproj +++ b/dotnet/src/Planners/Planners.Core/Planners.Core.csproj @@ -1,14 +1,15 @@ - + Microsoft.SemanticKernel.Planners.Core - Microsoft.SemanticKernel.Planners + Microsoft.SemanticKernel.Planning netstandard2.0 + @@ -57,12 +58,10 @@ - - diff --git a/dotnet/src/Planners/Planners.Core/Sequential/ISequentialPlanner.cs b/dotnet/src/Planners/Planners.Core/Sequential/ISequentialPlanner.cs deleted file mode 100644 index da18d793d580..000000000000 --- a/dotnet/src/Planners/Planners.Core/Sequential/ISequentialPlanner.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Planning; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Interface for planner that uses a set of semantic functions to create a sequential plan. -/// -public interface ISequentialPlanner -{ - /// - /// Create a plan for a goal. - /// - /// The goal to create a plan for. - /// The to monitor for cancellation requests. The default is . - /// The plan. - /// Thrown when the plan cannot be created. - Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/Planners/Planners.Core/Sequential/InstrumentedSequentialPlanner.cs b/dotnet/src/Planners/Planners.Core/Sequential/InstrumentedSequentialPlanner.cs deleted file mode 100644 index 76773428ba07..000000000000 --- a/dotnet/src/Planners/Planners.Core/Sequential/InstrumentedSequentialPlanner.cs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Planning; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Instrumented planner that uses semantic function to create a sequential plan. -/// Captures planner-related logs and metrics. -/// -internal sealed class InstrumentedSequentialPlanner : ISequentialPlanner -{ - /// - /// Initialize a new instance of the class. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public InstrumentedSequentialPlanner( - ISequentialPlanner planner, - ILoggerFactory? loggerFactory = null) - { - this._planner = planner; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(InstrumentedSequentialPlanner)) : NullLogger.Instance; - } - - /// - public async Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default) - { - using var activity = s_activitySource.StartActivity($"{PlannerType}.CreatePlan"); - - this._logger.LogInformation("{PlannerType}: Plan creation started.", PlannerType); - - // Sensitive data, logging as trace, disabled by default - this._logger.LogTrace("{PlannerType}: Plan Goal: {Goal}", PlannerType, goal); - - var stopwatch = new Stopwatch(); - - try - { - stopwatch.Start(); - - var plan = await this._planner.CreatePlanAsync(goal, cancellationToken).ConfigureAwait(false); - - stopwatch.Stop(); - - this._logger.LogInformation("{PlannerType}: Plan creation status: {Status}", PlannerType, "Success"); - - this._logger.LogInformation("{PlannerType}: Created plan: \n {Plan}", PlannerType, plan.ToSafePlanString()); - - // Sensitive data, logging as trace, disabled by default - this._logger.LogTrace("{PlannerType}: Created plan with details: \n {Plan}", PlannerType, plan.ToPlanString()); - - return plan; - } - catch (Exception ex) - { - this._logger.LogInformation("{PlannerType}: Plan creation status: {Status}", PlannerType, "Failed"); - this._logger.LogError(ex, "{PlannerType}: Plan creation exception details: {Message}", PlannerType, ex.Message); - - throw; - } - finally - { - this._logger.LogInformation("{PlannerType}: Plan creation finished in {ExecutionTime}ms.", PlannerType, stopwatch.ElapsedMilliseconds); - - s_createPlanExecutionTime.Record(stopwatch.ElapsedMilliseconds); - } - } - - #region private ================================================================================ - - private const string PlannerType = nameof(SequentialPlanner); - - private readonly ISequentialPlanner _planner; - private readonly ILogger _logger; - - /// - /// Instance of for planner-related activities. - /// - private static readonly ActivitySource s_activitySource = new(typeof(InstrumentedSequentialPlanner).FullName); - - /// - /// Instance of for planner-related metrics. - /// - private static readonly Meter s_meter = new(typeof(InstrumentedSequentialPlanner).FullName); - - /// - /// Instance of to record plan creation execution time. - /// - private static readonly Histogram s_createPlanExecutionTime = - s_meter.CreateHistogram( - name: $"SK.{PlannerType}.CreatePlan.ExecutionTime", - unit: "ms", - description: "Execution time of plan creation"); - - #endregion -} diff --git a/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlanParser.cs b/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlanParser.cs index 5418a6bb9fbc..afe6e2ebcc81 100644 --- a/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlanParser.cs +++ b/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlanParser.cs @@ -4,11 +4,8 @@ using System.Collections.Generic; using System.Text.RegularExpressions; using System.Xml; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Planning; -namespace Microsoft.SemanticKernel.Planners.Sequential; +namespace Microsoft.SemanticKernel.Planning; /// /// Parse sequential plan text into a plan. @@ -49,8 +46,8 @@ internal static class SequentialPlanParser /// The callback to get a plugin function. /// Whether to allow missing functions in the plan on creation. /// The plan. - /// Thrown when the plan xml is invalid. - internal static Plan ToPlanFromXml(this string xmlString, string goal, Func getFunctionCallback, bool allowMissingFunctions = false) + /// Thrown when the plan xml is invalid. + internal static Plan ToPlanFromXml(this string xmlString, string goal, Func getFunctionCallback, bool allowMissingFunctions = false) { XmlDocument xmlDoc = new(); try @@ -85,12 +82,12 @@ internal static Plan ToPlanFromXml(this string xmlString, string goal, Func(); var functionResults = new List(); - var view = pluginFunction.Describe(); - foreach (var p in view.Parameters) + var metadata = pluginFunction.Metadata; + foreach (var p in metadata.Parameters) { functionVariables.Set(p.Name, p.DefaultValue); } @@ -170,11 +168,11 @@ internal static Plan ToPlanFromXml(this string xmlString, string goal, Func /// A planner that uses semantic function to create a sequential plan. /// -public sealed class SequentialPlanner : ISequentialPlanner +public sealed class SequentialPlanner { private const string StopSequence = ""; private const string AvailableFunctionsKey = "available_functions"; @@ -25,10 +20,10 @@ public sealed class SequentialPlanner : ISequentialPlanner /// /// Initialize a new instance of the class. /// - /// The semantic kernel instance. + /// The containing services, plugins, and other state for use throughout the operation. /// The planner configuration. public SequentialPlanner( - IKernel kernel, + Kernel kernel, SequentialPlannerConfig? config = null) { Verify.NotNull(kernel); @@ -40,14 +35,13 @@ public SequentialPlanner( // Set up prompt template string promptTemplate = this.Config.GetPromptTemplate?.Invoke() ?? EmbeddedResource.Read("Sequential.skprompt.txt"); - this._functionFlowFunction = kernel.CreateSemanticFunction( + this._functionFlowFunction = kernel.CreateFunctionFromPrompt( promptTemplate: promptTemplate, - pluginName: RestrictedPluginName, description: "Given a request or command or goal generate a step by step plan to " + "fulfill the request using functions. This ability is also known as decision making and function flow", - requestSettings: new AIRequestSettings() + executionSettings: new PromptExecutionSettings() { - ExtensionData = new Dictionary() + ExtensionData = new() { { "Temperature", 0.0 }, { "StopSequences", new[] { StopSequence } }, @@ -56,49 +50,61 @@ public SequentialPlanner( }); this._kernel = kernel; + this._logger = kernel.LoggerFactory.CreateLogger(this.GetType()) ?? NullLogger.Instance; } - /// - public async Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default) + /// Creates a plan for the specified goal. + /// The goal for which a plan should be created. + /// The to monitor for cancellation requests. The default is . + /// The created plan. + /// is null. + /// is empty or entirely composed of whitespace. + /// A plan could not be created. + public Task CreatePlanAsync(string goal, CancellationToken cancellationToken = default) { - if (string.IsNullOrEmpty(goal)) - { - throw new SKException("The goal specified is empty"); - } + Verify.NotNullOrWhiteSpace(goal); - string relevantFunctionsManual = await this._kernel.Functions.GetFunctionsManualAsync(this.Config, goal, null, cancellationToken).ConfigureAwait(false); + return PlannerInstrumentation.CreatePlanAsync( + createPlanAsync: static (SequentialPlanner planner, string goal, CancellationToken cancellationToken) => planner.CreatePlanCoreAsync(goal, cancellationToken), + planToString: static (Plan plan) => plan.ToSafePlanString(), + this, goal, this._logger, cancellationToken); + } + + private async Task CreatePlanCoreAsync(string goal, CancellationToken cancellationToken) + { + string relevantFunctionsManual = await this._kernel.Plugins.GetFunctionsManualAsync(this.Config, goal, null, cancellationToken).ConfigureAwait(false); ContextVariables vars = new(goal) { [AvailableFunctionsKey] = relevantFunctionsManual }; - KernelResult planResult = await this._kernel.RunAsync(this._functionFlowFunction, vars, cancellationToken).ConfigureAwait(false); + FunctionResult planResult = await this._kernel.InvokeAsync(this._functionFlowFunction, vars, cancellationToken).ConfigureAwait(false); string? planResultString = planResult.GetValue()?.Trim(); if (string.IsNullOrWhiteSpace(planResultString)) { - throw new SKException( + throw new KernelException( "Unable to create plan. No response from Function Flow function. " + $"\nGoal:{goal}\nFunctions:\n{relevantFunctionsManual}"); } - var getFunctionCallback = this.Config.GetFunctionCallback ?? this._kernel.Functions.GetFunctionCallback(); + var getFunctionCallback = this.Config.GetFunctionCallback ?? this._kernel.Plugins.GetFunctionCallback(); Plan plan; try { plan = planResultString!.ToPlanFromXml(goal, getFunctionCallback, this.Config.AllowMissingFunctions); } - catch (SKException e) + catch (KernelException e) { - throw new SKException($"Unable to create plan for goal with available functions.\nGoal:{goal}\nFunctions:\n{relevantFunctionsManual}", e); + throw new KernelException($"Unable to create plan for goal with available functions.\nGoal:{goal}\nFunctions:\n{relevantFunctionsManual}", e); } if (plan.Steps.Count == 0) { - throw new SKException($"Not possible to create plan for goal with available functions.\nGoal:{goal}\nFunctions:\n{relevantFunctionsManual}"); + throw new KernelException($"Not possible to create plan for goal with available functions.\nGoal:{goal}\nFunctions:\n{relevantFunctionsManual}"); } return plan; @@ -106,12 +112,13 @@ public async Task CreatePlanAsync(string goal, CancellationToken cancellat private SequentialPlannerConfig Config { get; } - private readonly IKernel _kernel; + private readonly Kernel _kernel; + private readonly ILogger _logger; /// /// the function flow semantic function, which takes a goal and creates an xml plan that can be executed /// - private readonly ISKFunction _functionFlowFunction; + private readonly KernelFunction _functionFlowFunction; /// /// The name to use when creating semantic functions that are restricted from plan creation diff --git a/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlannerConfig.cs b/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlannerConfig.cs index 807b644f4224..919b8400db04 100644 --- a/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlannerConfig.cs +++ b/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlannerConfig.cs @@ -1,9 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// Common configuration for planner instances. diff --git a/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlannerExtensions.cs b/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlannerExtensions.cs deleted file mode 100644 index 50eea1a2666d..000000000000 --- a/dotnet/src/Planners/Planners.Core/Sequential/SequentialPlannerExtensions.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.Logging; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Extension methods for class. -/// -public static class SequentialPlannerExtensions -{ - /// - /// Returns decorated instance of with enabled instrumentation. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public static ISequentialPlanner WithInstrumentation(this ISequentialPlanner planner, ILoggerFactory? loggerFactory = null) - { - return new InstrumentedSequentialPlanner(planner, loggerFactory); - } -} diff --git a/dotnet/src/Planners/Planners.Core/Stepwise/ChatHistoryExtensions.cs b/dotnet/src/Planners/Planners.Core/Stepwise/ChatHistoryExtensions.cs deleted file mode 100644 index 60e810f105b5..000000000000 --- a/dotnet/src/Planners/Planners.Core/Stepwise/ChatHistoryExtensions.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Linq; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using static Microsoft.SemanticKernel.Text.TextChunker; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Extension methods for class. -/// -public static class ChatHistoryExtensions -{ - /// - /// Returns the number of tokens in the chat history. - /// - // The chat history. - // An additional message to include in the token count. - // The index to start skipping messages. - // The number of messages to skip. - // The token counter to use. - internal static int GetTokenCount(this ChatHistory chatHistory, string? additionalMessage = null, int skipStart = 0, int skipCount = 0, TokenCounter? tokenCounter = null) - { - tokenCounter ??= DefaultTokenCounter; - - var messages = string.Join("\n", chatHistory.Where((m, i) => i < skipStart || i >= skipStart + skipCount).Select(m => m.Content)); - - if (!string.IsNullOrEmpty(additionalMessage)) - { - messages = $"{messages}\n{additionalMessage}"; - } - - var tokenCount = tokenCounter(messages); - return tokenCount; - } - - private static int DefaultTokenCounter(string input) - { - return input.Length / 4; - } -} diff --git a/dotnet/src/Planners/Planners.Core/Stepwise/IStepwisePlanner.cs b/dotnet/src/Planners/Planners.Core/Stepwise/IStepwisePlanner.cs deleted file mode 100644 index 484e2ecee57e..000000000000 --- a/dotnet/src/Planners/Planners.Core/Stepwise/IStepwisePlanner.cs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Planning; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Interface for planner that creates a Stepwise plan using Mrkl systems. -/// -public interface IStepwisePlanner -{ - /// - /// Create a plan for a goal. - /// - /// The goal to create a plan for. - /// The plan. - /// Thrown when the plan cannot be created. - Plan CreatePlan(string goal); -} diff --git a/dotnet/src/Planners/Planners.Core/Stepwise/InstrumentedStepwisePlanner.cs b/dotnet/src/Planners/Planners.Core/Stepwise/InstrumentedStepwisePlanner.cs deleted file mode 100644 index afec10f2e985..000000000000 --- a/dotnet/src/Planners/Planners.Core/Stepwise/InstrumentedStepwisePlanner.cs +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Planning; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Instrumented planner that creates a Stepwise plan using Mrkl systems. -/// Captures planner-related logs and metrics. -/// -internal class InstrumentedStepwisePlanner : IStepwisePlanner -{ - /// - /// Initialize a new instance of the class. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public InstrumentedStepwisePlanner( - IStepwisePlanner planner, - ILoggerFactory? loggerFactory = null) - { - this._planner = planner; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(InstrumentedStepwisePlanner)) : NullLogger.Instance; - } - - /// - public Plan CreatePlan(string goal) - { - using var activity = s_activitySource.StartActivity($"{PlannerType}.CreatePlan"); - - this._logger.LogInformation("{PlannerType}: Plan creation started.", PlannerType); - - // Sensitive data, logging as trace, disabled by default - this._logger.LogTrace("{PlannerType}: Plan Goal: {Goal}", PlannerType, goal); - - var stopwatch = new Stopwatch(); - - try - { - stopwatch.Start(); - - var plan = this._planner.CreatePlan(goal); - - stopwatch.Stop(); - - this._logger.LogInformation("{PlannerType}: Plan creation status: {Status}", PlannerType, "Success"); - - return plan; - } - catch (Exception ex) - { - this._logger.LogInformation("{PlannerType}: Plan creation status: {Status}", PlannerType, "Failed"); - this._logger.LogError(ex, "{PlannerType}: Plan creation exception details: {Message}", PlannerType, ex.Message); - - throw; - } - finally - { - this._logger.LogInformation("{PlannerType}: Plan creation finished in {ExecutionTime}ms.", PlannerType, stopwatch.ElapsedMilliseconds); - - s_createPlanExecutionTime.Record(stopwatch.ElapsedMilliseconds); - } - } - - #region private ================================================================================ - - private const string PlannerType = nameof(StepwisePlanner); - - private readonly IStepwisePlanner _planner; - private readonly ILogger _logger; - - /// - /// Instance of for planner-related activities. - /// - private static readonly ActivitySource s_activitySource = new(typeof(InstrumentedStepwisePlanner).FullName); - - /// - /// Instance of for planner-related metrics. - /// - private static readonly Meter s_meter = new(typeof(InstrumentedStepwisePlanner).FullName); - - /// - /// Instance of to record plan creation execution time. - /// - private static readonly Histogram s_createPlanExecutionTime = - s_meter.CreateHistogram( - name: $"SK.{PlannerType}.CreatePlan.ExecutionTime", - unit: "ms", - description: "Execution time of plan creation"); - - #endregion -} diff --git a/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlanner.cs b/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlanner.cs index 8adaf550b428..ef99fe7c01f2 100644 --- a/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlanner.cs +++ b/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlanner.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.ComponentModel; +using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; @@ -11,19 +12,11 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Planning; using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// A planner that creates a Stepwise plan using Mrkl systems. @@ -31,15 +24,15 @@ namespace Microsoft.SemanticKernel.Planners; /// /// An implementation of a Mrkl system as described in https://arxiv.org/pdf/2205.00445.pdf /// -public class StepwisePlanner : IStepwisePlanner +public class StepwisePlanner { /// /// Initialize a new instance of the class. /// - /// The semantic kernel instance. + /// The containing services, plugins, and other state for use throughout the operation. /// Optional configuration object public StepwisePlanner( - IKernel kernel, + Kernel kernel, StepwisePlannerConfig? config = null) { Verify.NotNull(kernel); @@ -54,72 +47,88 @@ public StepwisePlanner( this._manualTemplate = EmbeddedResource.Read("Stepwise.Plugin.RenderFunctionManual.skprompt.txt"); this._questionTemplate = EmbeddedResource.Read("Stepwise.Plugin.RenderQuestion.skprompt.txt"); - // Load or use default PromptConfig + // Load or use default PromptModel this._promptConfig = this.Config.PromptUserConfig ?? LoadPromptConfigFromResource(); // Set MaxTokens for the prompt config this._promptConfig.SetMaxTokens(this.Config.MaxCompletionTokens); + ILoggerFactory loggerFactory = this._kernel.LoggerFactory; + // Initialize prompt renderer - this._promptRenderer = new BasicPromptTemplateEngine(this._kernel.LoggerFactory); + this._promptTemplateFactory = new KernelPromptTemplateFactory(loggerFactory); // Import native functions - this._nativeFunctions = this._kernel.ImportFunctions(this, RestrictedPluginName); + this._nativeFunctions = this._kernel.ImportPluginFromObject(this, RestrictedPluginName); // Create context and logger - this._logger = this._kernel.LoggerFactory.CreateLogger(this.GetType()); + this._logger = loggerFactory.CreateLogger(this.GetType()) ?? NullLogger.Instance; } - /// + /// Creates a plan for the specified goal. + /// The goal for which a plan should be created. + /// The created plan. + /// is null. + /// is empty or entirely composed of whitespace. + /// A plan could not be created. public Plan CreatePlan(string goal) { - if (string.IsNullOrEmpty(goal)) - { - throw new SKException("The goal specified is empty"); - } - - Plan plan = new(this._nativeFunctions["ExecutePlan"]); - plan.Parameters.Set("question", goal); - - plan.Outputs.Add("stepCount"); - plan.Outputs.Add("functionCount"); - plan.Outputs.Add("stepsTaken"); - plan.Outputs.Add("iterations"); + Verify.NotNullOrWhiteSpace(goal); - return plan; + Task task = PlannerInstrumentation.CreatePlanAsync( + static (StepwisePlanner planner, string goal, CancellationToken _) => + { + Plan plan = new(planner._nativeFunctions["ExecutePlan"]) + { + PluginName = RestrictedPluginName, + Outputs = { "stepCount", "functionCount", "stepsTaken", "iterations" }, + }; + plan.Parameters.Set("question", goal); + return Task.FromResult(plan); + }, + static (Plan plan) => plan.ToSafePlanString(), + this, goal, this._logger, CancellationToken.None); + + // The instrumentation doesn't do any asynchronous work other than invoke the supplied callback, + // which we know will complete synchronously, so we can safely use GetResult without incurring + // blocking as the operation will have already completed by the time the call returns. + Debug.Assert(task.IsCompleted); +#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits + return task.GetAwaiter().GetResult(); +#pragma warning restore VSTHRD002 } /// /// Execute a plan /// /// The question to answer - /// The context to use + /// The context variables to use /// The to monitor for cancellation requests. The default is . - /// The context with the result - /// No AIService available for getting completions. - [SKFunction, SKName("ExecutePlan"), Description("Execute a plan")] - public async Task ExecutePlanAsync( + /// The result + /// No AIService available for getting completions. + [KernelFunction, Description("Execute a plan")] + public async Task ExecutePlanAsync( [Description("The question to answer")] string question, - SKContext context, + ContextVariables contextVariables, CancellationToken cancellationToken = default) { if (string.IsNullOrEmpty(question)) { - context.Variables.Update("Question not found."); - return context; + contextVariables.Update("Question not found."); + return "Question not found."; } - ChatHistory chatHistory = await this.InitializeChatHistoryAsync(this.CreateChatHistory(this._kernel, out var aiService), aiService, question, context, cancellationToken).ConfigureAwait(false); + ChatHistory chatHistory = await this.InitializeChatHistoryAsync(this._kernel, this.CreateChatHistory(this._kernel, out var aiService), aiService, question, contextVariables, cancellationToken).ConfigureAwait(false); if (aiService is null) { - throw new SKException("No AIService available for getting completions."); + throw new KernelException("No AIService available for getting completions."); } if (chatHistory is null) { - throw new SKException("ChatHistory is null."); + throw new KernelException("ChatHistory is null."); } var startingMessageCount = chatHistory.Count; @@ -134,21 +143,21 @@ async Task GetNextStepAsync() return this.ParseResult(actionText); } - SKContext? TryGetFinalAnswer(SystemStep step, int iterations, SKContext context) + string? TryGetFinalAnswer(SystemStep step, int iterations, ContextVariables variables) { // If a final answer is found, update the context to be returned if (!string.IsNullOrEmpty(step.FinalAnswer)) { this._logger?.LogInformation("Final Answer: {FinalAnswer}", step.FinalAnswer); - context.Variables.Update(step.FinalAnswer); + variables.Update(step.FinalAnswer); stepsTaken.Add(step); // Add additional results to the context - AddExecutionStatsToContext(stepsTaken, context, iterations); + AddExecutionStatsToContextVariables(stepsTaken, variables, iterations); - return context; + return variables.Input; } return null; @@ -272,10 +281,10 @@ bool TryGetThought(SystemStep step) var nextStep = await GetNextStepAsync().ConfigureAwait(false); // If final answer is available, we're done, return the context - var finalContext = TryGetFinalAnswer(nextStep, i + 1, context); - if (finalContext is not null) + var answer = TryGetFinalAnswer(nextStep, i + 1, contextVariables); + if (answer is not null) { - return finalContext; + return answer; } // If we have an observation before running the action, continue to the next iteration @@ -303,24 +312,24 @@ bool TryGetThought(SystemStep step) } } - AddExecutionStatsToContext(stepsTaken, context, this.Config.MaxIterations); - context.Variables.Update(NoFinalAnswerFoundMessage); + AddExecutionStatsToContextVariables(stepsTaken, contextVariables, this.Config.MaxIterations); + contextVariables.Update(NoFinalAnswerFoundMessage); - return context; + return NoFinalAnswerFoundMessage; } #region setup helpers - private async Task InitializeChatHistoryAsync(ChatHistory chatHistory, IAIService aiService, string question, SKContext context, CancellationToken cancellationToken) + private async Task InitializeChatHistoryAsync(Kernel kernel, ChatHistory chatHistory, IAIService aiService, string question, ContextVariables variables, CancellationToken cancellationToken) { - string userManual = await this.GetUserManualAsync(question, context, cancellationToken).ConfigureAwait(false); - string userQuestion = await this.GetUserQuestionAsync(context, cancellationToken).ConfigureAwait(false); + string userManual = await this.GetUserManualAsync(kernel, question, variables, cancellationToken).ConfigureAwait(false); + string userQuestion = await this.GetUserQuestionAsync(kernel, variables, cancellationToken).ConfigureAwait(false); - var systemContext = this._kernel.CreateNewContext(); + var systemVariables = new ContextVariables(); - systemContext.Variables.Set("suffix", this.Config.Suffix); - systemContext.Variables.Set("functionDescriptions", userManual); - string systemMessage = await this.GetSystemMessageAsync(systemContext, cancellationToken).ConfigureAwait(false); + systemVariables.Set("suffix", this.Config.Suffix); + systemVariables.Set("functionDescriptions", userManual); + string systemMessage = await this.GetSystemMessageAsync(kernel, systemVariables, cancellationToken).ConfigureAwait(false); chatHistory.AddSystemMessage(systemMessage); chatHistory.AddUserMessage(userQuestion); @@ -328,7 +337,7 @@ private async Task InitializeChatHistoryAsync(ChatHistory chatHisto return chatHistory; } - private ChatHistory CreateChatHistory(IKernel kernel, out IAIService aiService) + private ChatHistory CreateChatHistory(Kernel kernel, out IAIService aiService) { ChatHistory chatHistory; if (TryGetChatCompletion(this._kernel, out var chatCompletion)) @@ -338,26 +347,26 @@ private ChatHistory CreateChatHistory(IKernel kernel, out IAIService aiService) } else { - var textCompletion = this._kernel.GetService(); - aiService = textCompletion; + aiService = this._kernel.GetService(); chatHistory = new ChatHistory(); } return chatHistory; } - private async Task GetUserManualAsync(string question, SKContext context, CancellationToken cancellationToken) + private async Task GetUserManualAsync(Kernel kernel, string question, ContextVariables variables, CancellationToken cancellationToken) { - var descriptions = await this._kernel.Functions.GetFunctionsManualAsync(this.Config, question, this._logger, cancellationToken).ConfigureAwait(false); - context.Variables.Set("functionDescriptions", descriptions); - return await this._promptRenderer.RenderAsync(this._manualTemplate, context, cancellationToken).ConfigureAwait(false); + var descriptions = await this._kernel.Plugins.GetFunctionsManualAsync(this.Config, question, this._logger, cancellationToken).ConfigureAwait(false); + variables.Set("functionDescriptions", descriptions); + var promptTemplate = this._promptTemplateFactory.Create(this._manualTemplate, new PromptTemplateConfig()); + return await promptTemplate.RenderAsync(kernel, variables, cancellationToken).ConfigureAwait(false); } - private Task GetUserQuestionAsync(SKContext context, CancellationToken cancellationToken) - => this._promptRenderer.RenderAsync(this._questionTemplate, context, cancellationToken); + private Task GetUserQuestionAsync(Kernel kernel, ContextVariables variables, CancellationToken cancellationToken) + => this._promptTemplateFactory.Create(this._questionTemplate, new PromptTemplateConfig()).RenderAsync(kernel, variables, cancellationToken); - private Task GetSystemMessageAsync(SKContext context, CancellationToken cancellationToken) - => this._promptRenderer.RenderAsync(this._promptTemplate, context, cancellationToken); + private Task GetSystemMessageAsync(Kernel kernel, ContextVariables variables, CancellationToken cancellationToken) + => this._promptTemplateFactory.Create(this._promptTemplate, new PromptTemplateConfig()).RenderAsync(kernel, variables, cancellationToken); #endregion setup helpers @@ -381,7 +390,7 @@ private Task GetNextStepCompletionAsync(List stepsTaken, Cha if (tokenCount >= this.Config.MaxPromptTokens) { - throw new SKException("ChatHistory is too long to get a completion. Try reducing the available functions."); + throw new KernelException("ChatHistory is too long to get a completion. Try reducing the available functions."); } var reducedChatHistory = new ChatHistory(); @@ -403,7 +412,7 @@ private async Task GetCompletionAsync(IAIService aiService, ChatHistory var llmResponse = (await chatCompletion.GenerateMessageAsync(chatHistory, this._promptConfig.GetDefaultRequestSettings(), token).ConfigureAwait(false)); return llmResponse; } - else if (aiService is ITextCompletion textCompletion) + else if (aiService is ITextGeneration textGeneration) { var thoughtProcess = string.Join("\n", chatHistory.Select(m => m.Content)); @@ -415,17 +424,17 @@ private async Task GetCompletionAsync(IAIService aiService, ChatHistory } thoughtProcess = $"{thoughtProcess}\n"; - IReadOnlyList results = await textCompletion.GetCompletionsAsync(thoughtProcess, this._promptConfig.GetDefaultRequestSettings(), token).ConfigureAwait(false); + IReadOnlyList results = await textGeneration.GetCompletionsAsync(thoughtProcess, this._promptConfig.GetDefaultRequestSettings(), token).ConfigureAwait(false); if (results.Count == 0) { - throw new SKException("No completions returned."); + throw new KernelException("No completions returned."); } return await results[0].GetCompletionAsync(token).ConfigureAwait(false); } - throw new SKException("No AIService available for getting completions."); + throw new KernelException("No AIService available for getting completions."); } /// @@ -517,7 +526,7 @@ protected internal virtual SystemStep ParseResult(string input) return $"Could not parse functionName from actionName: {actionName}. Please try again using one of the [AVAILABLE FUNCTIONS]."; } - var getFunctionCallback = this.Config.GetFunctionCallback ?? this._kernel.Functions.GetFunctionCallback(); + var getFunctionCallback = this.Config.GetFunctionCallback ?? this._kernel.Plugins.GetFunctionCallback(); var targetFunction = getFunctionCallback(pluginName, functionName); if (targetFunction == null) @@ -531,14 +540,16 @@ protected internal virtual SystemStep ParseResult(string input) string? result = null; var vars = this.CreateActionContextVariables(actionVariables); - var kernelResult = await this._kernel.RunAsync(targetFunction, vars, cancellationToken).ConfigureAwait(false); - var resultObject = kernelResult.GetValue(); - - var converter = TypeDescriptor.GetConverter(resultObject); + var functionResult = await this._kernel.InvokeAsync(targetFunction, vars, cancellationToken).ConfigureAwait(false); + var resultObject = functionResult.GetValue(); - if (converter.CanConvertTo(typeof(string))) + if (resultObject is not null) { - result = converter.ConvertToString(resultObject); + var converter = TypeDescriptor.GetConverter(resultObject); + if (converter.CanConvertTo(typeof(string))) + { + result = converter.ConvertToString(resultObject); + } } this._logger?.LogTrace("Invoked {FunctionName}. Result: {Result}", targetFunction.Name, result); @@ -547,7 +558,7 @@ protected internal virtual SystemStep ParseResult(string input) } catch (Exception e) when (!e.IsCriticalException()) { - this._logger?.LogError(e, "Something went wrong in system step: {Plugin}.{Function}. Error: {Error}", targetFunction.PluginName, targetFunction.Name, e.Message); + this._logger?.LogError(e, "Something went wrong in system step: {Function}. Error: {Error}", targetFunction.Name, e.Message); throw; } } @@ -574,7 +585,7 @@ private static PromptTemplateConfig LoadPromptConfigFromResource() return !string.IsNullOrEmpty(promptConfigString) ? PromptTemplateConfig.FromJson(promptConfigString) : new PromptTemplateConfig(); } - private static bool TryGetChatCompletion(IKernel kernel, [NotNullWhen(true)] out IChatCompletion? chatCompletion) + private static bool TryGetChatCompletion(Kernel kernel, [NotNullWhen(true)] out IChatCompletion? chatCompletion) { try { @@ -583,7 +594,7 @@ private static bool TryGetChatCompletion(IKernel kernel, [NotNullWhen(true)] out chatCompletion = kernel.GetService(); return true; } - catch (SKException) + catch (KernelException) { chatCompletion = null; } @@ -591,11 +602,11 @@ private static bool TryGetChatCompletion(IKernel kernel, [NotNullWhen(true)] out return false; } - private static void AddExecutionStatsToContext(List stepsTaken, SKContext context, int iterations) + private static void AddExecutionStatsToContextVariables(List stepsTaken, ContextVariables variables, int iterations) { - context.Variables.Set("stepCount", stepsTaken.Count.ToString(CultureInfo.InvariantCulture)); - context.Variables.Set("stepsTaken", JsonSerializer.Serialize(stepsTaken)); - context.Variables.Set("iterations", iterations.ToString(CultureInfo.InvariantCulture)); + variables.Set("stepCount", stepsTaken.Count.ToString(CultureInfo.InvariantCulture)); + variables.Set("stepsTaken", JsonSerializer.Serialize(stepsTaken)); + variables.Set("iterations", iterations.ToString(CultureInfo.InvariantCulture)); Dictionary actionCounts = new(); foreach (var step in stepsTaken) @@ -611,7 +622,7 @@ private static void AddExecutionStatsToContext(List stepsTaken, SKCo var functionCallCountStr = actionCounts.Values.Sum().ToString(CultureInfo.InvariantCulture); - context.Variables.Set("functionCount", $"{functionCallCountStr} ({functionCallListWithCounts})"); + variables.Set("functionCount", $"{functionCallCountStr} ({functionCallListWithCounts})"); } #region private @@ -622,13 +633,13 @@ private static void AddExecutionStatsToContext(List stepsTaken, SKCo private StepwisePlannerConfig Config { get; } // Context used to access the list of functions in the kernel - private readonly IKernel _kernel; - private readonly ILogger? _logger; + private readonly Kernel _kernel; + private readonly ILogger _logger; /// /// Planner native functions /// - private readonly IDictionary _nativeFunctions = new Dictionary(); + private readonly IKernelPlugin _nativeFunctions; /// /// The prompt template to use for the system step @@ -648,7 +659,7 @@ private static void AddExecutionStatsToContext(List stepsTaken, SKCo /// /// The prompt renderer to use for the system step /// - private readonly BasicPromptTemplateEngine _promptRenderer; + private readonly KernelPromptTemplateFactory _promptTemplateFactory; /// /// The prompt config to use for the system step diff --git a/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlannerConfig.cs b/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlannerConfig.cs index 545edf191cb9..3acb6aab1813 100644 --- a/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlannerConfig.cs +++ b/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlannerConfig.cs @@ -1,11 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.TemplateEngine; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// Configuration for Stepwise planner instances. diff --git a/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlannerExtensions.cs b/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlannerExtensions.cs deleted file mode 100644 index 9a49f49680c5..000000000000 --- a/dotnet/src/Planners/Planners.Core/Stepwise/StepwisePlannerExtensions.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.Logging; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 - -/// -/// Extension methods for class. -/// -public static class StepwisePlannerExtensions -{ - /// - /// Returns decorated instance of with enabled instrumentation. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public static IStepwisePlanner WithInstrumentation(this IStepwisePlanner planner, ILoggerFactory? loggerFactory = null) - { - return new InstrumentedStepwisePlanner(planner, loggerFactory); - } -} diff --git a/dotnet/src/Planners/Planners.Core/Stepwise/SystemStep.cs b/dotnet/src/Planners/Planners.Core/Stepwise/SystemStep.cs index 812db626cca2..178f87534cca 100644 --- a/dotnet/src/Planners/Planners.Core/Stepwise/SystemStep.cs +++ b/dotnet/src/Planners/Planners.Core/Stepwise/SystemStep.cs @@ -3,10 +3,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of Plan -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; /// /// A step in a Stepwise plan. diff --git a/dotnet/src/Planners/Planners.Core/Utils/EmbeddedResource.cs b/dotnet/src/Planners/Planners.Core/Utils/EmbeddedResource.cs index c759b9ef9171..c887f5e35470 100644 --- a/dotnet/src/Planners/Planners.Core/Utils/EmbeddedResource.cs +++ b/dotnet/src/Planners/Planners.Core/Utils/EmbeddedResource.cs @@ -2,11 +2,8 @@ using System.IO; using System.Reflection; -using Microsoft.SemanticKernel.Diagnostics; -#pragma warning disable IDE0130 -namespace Microsoft.SemanticKernel.Planners; -#pragma warning restore IDE0130 +namespace Microsoft.SemanticKernel.Planning; internal static class EmbeddedResource { @@ -15,10 +12,10 @@ internal static class EmbeddedResource internal static string Read(string name) { var assembly = typeof(EmbeddedResource).GetTypeInfo().Assembly; - if (assembly == null) { throw new SKException($"[{s_namespace}] {name} assembly not found"); } + if (assembly == null) { throw new FileNotFoundException($"[{s_namespace}] {name} assembly not found"); } using Stream? resource = assembly.GetManifestResourceStream($"{s_namespace}." + name); - if (resource == null) { throw new SKException($"[{s_namespace}] {name} resource not found"); } + if (resource == null) { throw new FileNotFoundException($"[{s_namespace}] {name} resource not found"); } using var reader = new StreamReader(resource); return reader.ReadToEnd(); diff --git a/dotnet/src/Planners/Planners.Handlebars.UnitTests/.editorconfig b/dotnet/src/Planners/Planners.Handlebars.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Planners/Planners.Handlebars.UnitTests/Handlebars/HandlebarsPlannerTests.cs b/dotnet/src/Planners/Planners.Handlebars.UnitTests/Handlebars/HandlebarsPlannerTests.cs new file mode 100644 index 000000000000..535a6ac7d321 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars.UnitTests/Handlebars/HandlebarsPlannerTests.cs @@ -0,0 +1,114 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Planning.Handlebars; +using Moq; +using Xunit; + +namespace Microsoft.SemanticKernel.Planners.UnitTests.Handlebars; + +public sealed class HandlebarsPlannerTests +{ + private const string PlanString = + @"```handlebars +{{!-- Step 1: Call Summarize function --}} +{{set ""summary"" (SummarizePlugin-Summarize)}} + +{{!-- Step 2: Call Translate function with the language set to French --}} +{{set ""translatedSummary"" (WriterPlugin-Translate language=""French"" input=(get ""summary""))}} + +{{!-- Step 3: Call GetEmailAddress function with input set to John Doe --}} +{{set ""emailAddress"" (email-GetEmailAddress input=""John Doe"")}} + +{{!-- Step 4: Call SendEmail function with input set to the translated summary and email_address set to the retrieved email address --}} +{{email-SendEmail input=(get ""translatedSummary"") email_address=(get ""emailAddress"")}} +```"; + + [Theory] + [InlineData("Summarize this text, translate it to French and send it to John Doe.")] + public async Task ItCanCreatePlanAsync(string goal) + { + // Arrange + var plugins = this.CreatePluginCollection(); + var kernel = this.CreateKernelWithMockCompletionResult(PlanString, plugins); + var planner = new HandlebarsPlanner(); + + // Act + HandlebarsPlan plan = await planner.CreatePlanAsync(kernel, goal); + + // Assert + Assert.True(!string.IsNullOrEmpty(plan.Prompt)); + Assert.True(!string.IsNullOrEmpty(plan.ToString())); + } + + [Fact] + public async Task EmptyGoalThrowsAsync() + { + // Arrange + var kernel = this.CreateKernelWithMockCompletionResult(PlanString); + + var planner = new HandlebarsPlanner(); + + // Act & Assert + await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync(kernel, string.Empty)); + } + + [Fact] + public async Task InvalidHandlebarsTemplateThrowsAsync() + { + // Arrange + var kernel = this.CreateKernelWithMockCompletionResult("notvalid<"); + + var planner = new HandlebarsPlanner(); + + // Act & Assert + var exception = await Assert.ThrowsAsync(async () => await planner.CreatePlanAsync(kernel, "goal")); + Assert.True(exception?.Message?.Contains("Could not find the plan in the results", StringComparison.InvariantCulture)); + } + + private Kernel CreateKernelWithMockCompletionResult(string testPlanString, KernelPluginCollection? plugins = null) + { + plugins ??= new KernelPluginCollection(); + + var chatMessage = new ChatMessageContent(AuthorRole.Assistant, testPlanString); + + var chatCompletion = new Mock(); + chatCompletion + .Setup(cc => cc.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(new List { chatMessage }); + + var serviceSelector = new Mock(); + IChatCompletionService resultService = chatCompletion.Object; + PromptExecutionSettings resultSettings = new(); + serviceSelector + .Setup(ss => ss.TrySelectAIService(It.IsAny(), It.IsAny(), It.IsAny(), out resultService!, out resultSettings!)) + .Returns(true); + + var serviceCollection = new ServiceCollection(); + serviceCollection.AddSingleton(serviceSelector.Object); + serviceCollection.AddSingleton(chatCompletion.Object); + + return new Kernel(serviceCollection.BuildServiceProvider(), plugins); + } + + private KernelPluginCollection CreatePluginCollection() + { + return new() + { + KernelPluginFactory.CreateFromFunctions("email", "Email functions", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "SendEmail", "Send an e-mail"), + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "GetEmailAddress", "Get an e-mail address") + }), + KernelPluginFactory.CreateFromFunctions("WriterPlugin", "Writer functions", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Translate", "Translate something"), + }), + KernelPluginFactory.CreateFromFunctions("SummarizePlugin", "Summarize functions", new[] + { + KernelFunctionFactory.CreateFromMethod(() => "MOCK FUNCTION CALLED", "Summarize", "Summarize something"), + }) + }; + } +} diff --git a/dotnet/src/Planners/Planners.Handlebars.UnitTests/Handlebars/KernelParameterMetadataExtensionsTests.cs b/dotnet/src/Planners/Planners.Handlebars.UnitTests/Handlebars/KernelParameterMetadataExtensionsTests.cs new file mode 100644 index 000000000000..fdd99f73801f --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars.UnitTests/Handlebars/KernelParameterMetadataExtensionsTests.cs @@ -0,0 +1,335 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Planning.Handlebars; +using Xunit; + +namespace Microsoft.SemanticKernel.Planners.UnitTests.Handlebars; + +public class KernelParameterMetadataExtensionsTests +{ + [Fact] + public void ReturnsTrueForPrimitiveOrStringTypes() + { + // Arrange + var primitiveTypes = new Type[] { typeof(int), typeof(double), typeof(bool), typeof(char) }; + var stringType = typeof(string); + + // Act and Assert + foreach (var type in primitiveTypes) + { + Assert.True(KernelParameterMetadataExtensions.IsPrimitiveOrStringType(type)); + } + + Assert.True(KernelParameterMetadataExtensions.IsPrimitiveOrStringType(stringType)); + } + + [Fact] + public void ReturnsFalseForNonPrimitiveOrStringTypes() + { + // Arrange + var nonPrimitiveTypes = new Type[] { typeof(object), typeof(DateTime), typeof(List), typeof(HandlebarsParameterTypeMetadata) }; + + // Act and Assert + foreach (var type in nonPrimitiveTypes) + { + Assert.False(KernelParameterMetadataExtensions.IsPrimitiveOrStringType(type)); + } + } + + [Fact] + public void ReturnsEmptySetForPrimitiveOrStringType() + { + // Arrange + var primitiveType = typeof(int); + + // Act + var result = primitiveType.ToHandlebarsParameterTypeMetadata(); + + // Assert + Assert.Empty(result); + } + + [Fact] + public void ReturnsSetWithOneElementForSimpleClassType() + { + // Arrange + var simpleClassType = typeof(SimpleClass); + + // Act + var result = simpleClassType.ToHandlebarsParameterTypeMetadata(); + + // Assert + Assert.Single(result); + Assert.Equal("SimpleClass", result.First().Name); + Assert.True(result.First().IsComplex); + Assert.Equal(2, result.First().Properties.Count); + Assert.Equal("Id", result.First().Properties[0].Name); + Assert.Equal(typeof(int), result.First().Properties[0].ParameterType); + Assert.Equal("Name", result.First().Properties[1].Name); + Assert.Equal(typeof(string), result.First().Properties[1].ParameterType); + } + + [Fact] + public void ReturnsSetWithOneElementForRecursiveClassType() + { + // Arrange + var recursiveClassType = typeof(RecursiveClass); + + // Act + var result = recursiveClassType.ToHandlebarsParameterTypeMetadata(); + + // Assert + Assert.Single(result); + Assert.Equal(nameof(RecursiveClass), result.First().Name); + Assert.True(result.First().IsComplex); + Assert.Equal(2, result.First().Properties.Count); + Assert.Equal(nameof(RecursiveClass.Name), result.First().Properties[0].Name); + Assert.Equal(typeof(string), result.First().Properties[0].ParameterType); + Assert.Equal(nameof(RecursiveClass.Next), result.First().Properties[1].Name); + Assert.Equal(typeof(RecursiveClass), result.First().Properties[1].ParameterType); + } + + [Fact] + public void ReturnsSetWithMultipleElementsForNestedClassType() + { + // Arrange + var nestedClassType = typeof(NestedClass); + + // Act + var result = nestedClassType.ToHandlebarsParameterTypeMetadata(); + + // Assert + Assert.Equal(3, result.Count); + Assert.Contains(result, r => r.Name == "NestedClass"); + Assert.Contains(result, r => r.Name == "SimpleClass"); + Assert.Contains(result, r => r.Name == "AnotherClass"); + + var nestedClass = result.First(r => r.Name == "NestedClass"); + Assert.True(nestedClass.IsComplex); + Assert.Equal(3, nestedClass.Properties.Count); + Assert.Equal("Id", nestedClass.Properties[0].Name); + Assert.Equal(typeof(int), nestedClass.Properties[0].ParameterType); + Assert.Equal("Simple", nestedClass.Properties[1].Name); + Assert.Equal(typeof(SimpleClass), nestedClass.Properties[1].ParameterType); + Assert.Equal("Another", nestedClass.Properties[2].Name); + Assert.Equal(typeof(AnotherClass), nestedClass.Properties[2].ParameterType); + + var simpleClass = result.First(r => r.Name == "SimpleClass"); + Assert.True(simpleClass.IsComplex); + Assert.Equal(2, simpleClass.Properties.Count); + Assert.Equal("Id", simpleClass.Properties[0].Name); + Assert.Equal(typeof(int), simpleClass.Properties[0].ParameterType); + Assert.Equal("Name", simpleClass.Properties[1].Name); + Assert.Equal(typeof(string), simpleClass.Properties[1].ParameterType); + + var anotherClass = result.First(r => r.Name == "AnotherClass"); + Assert.True(anotherClass.IsComplex); + Assert.Single(anotherClass.Properties); + Assert.Equal("Value", anotherClass.Properties[0].Name); + Assert.Equal(typeof(double), anotherClass.Properties[0].ParameterType); + + // Should not contain primitive types + Assert.DoesNotContain(result, r => r.Name == "Id"); + Assert.DoesNotContain(result, r => !r.IsComplex); + + // Should not contain empty complex types + Assert.DoesNotContain(result, r => r.IsComplex && r.Properties.Count == 0); + } + + [Fact] + public void ReturnsSetWithOneElementForTaskOfSimpleClassType() + { + // Arrange + var taskOfSimpleClassType = typeof(Task); + + // Act + var result = taskOfSimpleClassType.ToHandlebarsParameterTypeMetadata(); + + // Assert + Assert.Single(result); + Assert.Equal("SimpleClass", result.First().Name); + Assert.True(result.First().IsComplex); + Assert.Equal(2, result.First().Properties.Count); + Assert.Equal("Id", result.First().Properties[0].Name); + Assert.Equal(typeof(int), result.First().Properties[0].ParameterType); + Assert.Equal("Name", result.First().Properties[1].Name); + Assert.Equal(typeof(string), result.First().Properties[1].ParameterType); + } + + [Fact] + public void ReturnsEmptySetForTaskOfPrimitiveOrStringType() + { + // Arrange + var taskOfPrimitiveType = typeof(Task); + var taskOfStringType = typeof(Task); + + // Act + var result1 = taskOfPrimitiveType.ToHandlebarsParameterTypeMetadata(); + var result2 = taskOfStringType.ToHandlebarsParameterTypeMetadata(); + + // Assert + Assert.Empty(result1); + Assert.Empty(result2); + } + + [Fact] + public void ReturnsTrueForPrimitiveOrStringSchemaTypes() + { + // Arrange + var primitiveSchemaTypes = new string[] { "string", "number", "integer", "boolean" }; + + // Act and Assert + foreach (var type in primitiveSchemaTypes) + { + Assert.True(KernelParameterMetadataExtensions.IsPrimitiveOrStringType(type)); + } + } + + [Fact] + public void ReturnsFalseForNonPrimitiveOrStringSchemaTypes() + { + // Arrange + var nonPrimitiveSchemaTypes = new string[] { "object", "array", "any", "null" }; + + // Act and Assert + foreach (var type in nonPrimitiveSchemaTypes) + { + Assert.False(KernelParameterMetadataExtensions.IsPrimitiveOrStringType(type)); + } + } + + [Fact] + public void ReturnsParameterWithParameterTypeForPrimitiveOrStringSchemaType() + { + // Arrange + var schemaTypeMap = new Dictionary + { + {"string", typeof(string)}, + {"integer", typeof(long)}, + {"number", typeof(double)}, + {"boolean", typeof(bool)}, + {"null", typeof(object)} + }; + + foreach (var pair in schemaTypeMap) + { + var schema = KernelJsonSchema.Parse($"{{\"type\": \"{pair.Key}\"}}"); + var parameter = new KernelParameterMetadata("test") { Schema = schema }; + + // Act + var result = parameter.ParseJsonSchema(); + + // Assert + Assert.Equal(pair.Value, result.ParameterType); + } + } + + [Fact] + public void ReturnsParameterWithSchemaForNonPrimitiveOrStringSchemaType() + { + // Arrange + var schema = KernelJsonSchema.Parse("{\"type\": \"object\", \"properties\": {\"name\": {\"type\": \"string\"}}}"); + var parameter = new KernelParameterMetadata("test") { Schema = schema }; + + // Act + var result = parameter.ParseJsonSchema(); + + // Assert + Assert.Null(result.ParameterType); + Assert.Equal(schema, result.Schema); + } + + [Fact] + public void ReturnsIndentedJsonStringForJsonElement() + { + // Arrange + var jsonProperties = KernelJsonSchema.Parse("{\"name\": \"Alice\", \"age\": 25}").RootElement; + + // Act + var result = jsonProperties.ToJsonString(); + + // Ensure that the line endings are consistent across different dotnet versions + result = result.Replace("\r\n", "\n", StringComparison.InvariantCulture); + + // Assert + var expected = "{\n \"name\": \"Alice\",\n \"age\": 25\n}"; + Assert.Equal(expected, result); + } + + [Fact] + public void ReturnsParameterNameAndSchemaType() + { + // Arrange + var schema = KernelJsonSchema.Parse("{\"type\": \"object\", \"properties\": {\"name\": {\"type\": \"string\"}}}"); + var parameter = new KernelParameterMetadata("test") { Schema = schema }; + + // Act + var result = parameter.GetSchemaTypeName(); + + // Assert + Assert.Equal("test-object", result); + } + + [Fact] + public void ConvertsReturnParameterMetadataToParameterMetadata() + { + // Arrange + var schema = KernelJsonSchema.Parse("{\"type\": \"object\", \"properties\": {\"name\": {\"type\": \"string\"}}}"); + var returnParameter = new KernelReturnParameterMetadata() { Description = "test", ParameterType = typeof(object), Schema = schema }; + + // Act + var functionName = "Foo"; + var result = returnParameter.ToKernelParameterMetadata(functionName); + + // Assert + Assert.Equal("FooReturns", result.Name); + Assert.Equal("test", result.Description); + Assert.Equal(typeof(object), result.ParameterType); + Assert.Equal(schema, result.Schema); + } + + [Fact] + public void ConvertsParameterMetadataToReturnParameterMetadata() + { + // Arrange + var schema = KernelJsonSchema.Parse("{\"type\": \"object\", \"properties\": {\"name\": {\"type\": \"string\"}}}"); + var parameter = new KernelParameterMetadata("test") { Description = "test", ParameterType = typeof(object), Schema = schema }; + + // Act + var result = parameter.ToKernelReturnParameterMetadata(); + + // Assert + Assert.Equal("test", result.Description); + Assert.Equal(typeof(object), result.ParameterType); + Assert.Equal(schema, result.Schema); + } + + #region Simple helper classes + + private sealed class SimpleClass + { + public int Id { get; set; } + public string Name { get; set; } = string.Empty; + } + + private sealed class AnotherClass + { + public double Value { get; set; } + } + + private static class NestedClass + { + public static int Id { get; set; } + public static SimpleClass Simple { get; set; } = new SimpleClass(); + public static AnotherClass Another { get; set; } = new AnotherClass(); + } + + private sealed class RecursiveClass + { + public string Name { get; set; } = ""; + + public RecursiveClass Next { get; set; } = new(); + } + + #endregion +} diff --git a/dotnet/src/Planners/Planners.Handlebars.UnitTests/Planners.Handlebars.UnitTests.csproj b/dotnet/src/Planners/Planners.Handlebars.UnitTests/Planners.Handlebars.UnitTests.csproj new file mode 100644 index 000000000000..f538fff633c2 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars.UnitTests/Planners.Handlebars.UnitTests.csproj @@ -0,0 +1,35 @@ + + + + Microsoft.SemanticKernel.Planners.Handlebars.UnitTests + Microsoft.SemanticKernel.Planners.UnitTests + net6.0 + LatestMajor + true + enable + enable + false + CA2007,VSTHRD111,SKEXP0060 + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + diff --git a/dotnet/src/Planners/Planners.Handlebars/AssemblyInfo.cs b/dotnet/src/Planners/Planners.Handlebars/AssemblyInfo.cs new file mode 100644 index 000000000000..e105bdb168ac --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0060")] diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/CreatePlanPrompt.handlebars b/dotnet/src/Planners/Planners.Handlebars/Handlebars/CreatePlanPrompt.handlebars new file mode 100644 index 000000000000..ee2305987005 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/CreatePlanPrompt.handlebars @@ -0,0 +1,205 @@ +{{#message role="system"}}## Instructions +Explain how to achieve the user's goal with the available helpers with a Handlebars template. + +## Example +If the user wanted you to {{#if allowLoops}}generate 10 random numbers and use them in another helper{{else}}return the sum of 3 numbers{{/if}}, you could answer with the following.{{/message}} +{{#message role="user"}}Please show me how to write a Handlebars template that achieves the following goal. + +{{#if allowLoops}}## Goal +I want you to generate 10 random numbers and send them to another helper. +{{else}}## Goal +What's the sum of 5+10+15? +{{~/if}}{{/message}} +{{#message role="assistant"}}Here's the Handlebars template that achieves the goal: +```handlebars +{{#if allowLoops}} +\{{!-- Step 1: Identify key values --}} +\{{set + "count" + 10 +}} +\{{!-- Step 2: Loop using the count --}} +\{{#each + (range + 1 + count + ) +}} + \{{!-- Step 3: Create random number --}} + \{{set + "randomNumber" + (Example{{nameDelimiter}}Random + seed=this + ) + }} + \{{!-- Step 4: Call example helper with random number and print the result to the screen --}} + \{{set + "result" + (Example{{nameDelimiter}}Helper + input=randomNumber + ) + }} + \{{json (concat "The result" " " "is:" " " result)}} +\{{/each}} +{{else}} +\{{!-- Step 1: Initialize the variables --}} +\{{set + "num1" + 5 +}} +\{{set + "num2" + 10 +}} +\{{set + "num3" + 15 +}} +\{{!-- Step 2: Call the Example helper with the variables and store the result --}} +\{{set + "sum" + (Example{{nameDelimiter}}AddNums + num1=num1 + num2=num2 + num3=num3 + ) +}} +\{{!-- Step 3: Print the result using the json helper --}} +\{{json (concat "The sum of " num1 "+" num2 "+" num3 " " "is" " " sum)}} +{{/if}} +```{{/message}} +{{#message role="system"}}Now let's try the real thing.{{/message}} +{{#message role="user"}} +The following helpers are available to you: + +## Built-in block helpers +- `\{{#if}}\{{/if}}` +- `\{{#unless}}\{{/unless}}`{{#if allowLoops}} +- `\{{#each}}\{{/each}}`{{/if}} +- `\{{#with}}\{{/with}}` + +{{#if allowLoops}} +## Loop helpers +If you need to loop through a list of values with `\{{#each}}`, you can use the following helpers: +- `\{{range}}` – Generates a list of integral numbers within a specified range, inclusive of the first and last value. +- `\{{array}}` – Generates an array of values from the given values (zero-indexed). + +IMPORTANT: `range` and `array` are the only supported data structures. Others like `hash` are not supported. Also, you cannot use any methods or properties on the built-in data structures. + +## Math helpers +If you need to do basic operations, you can use these two helpers with numerical values: +- `\{{add}}` – Adds two values together. +- `\{{subtract}}` – Subtracts the second value from the first. + +{{/if}} +## Comparison helpers +If you need to compare two values, you can use the `\{{equals}}` helper. +To use the {{#if allowLoops}}math and {{/if}}comparison helpers, you must pass in two positional values. For example, to check if the variable `var` is equal to number `1`, you would use the following helper like so: `\{{#if (equals var 1)}}\{{/if}}`. + +## Variable helpers +If you need to create or retrieve a variable, you can use the following helpers: +- `\{{set}}` – Creates a variable with the given name and value. It does not print anything to the template, so you must use `\{{json}}` to print the value. +- `\{{json}}` – Generates and prints a JSON string from the given value. +- `\{{concat}}` – Concatenates the given values into one string. + +{{#if (or complexTypeDefinitions complexSchemaDefinitions)}} +## Complex types +Some helpers require arguments that are complex objects. The JSON schemas for these complex objects are defined below: + +{{#each complexTypeDefinitions}} +### {{Name}}: +{ + "type": "Object", + "properties": { + {{#each Properties}} + "{{Name}}": { + "type": "{{ParameterType.Name}}", + }, + {{/each}} + } +} + +{{/each}} +{{#each complexSchemaDefinitions}} +### {{@key}}: +{{this}} + +{{/each}} +{{/if}} +## Custom helpers +Lastly, you have the following custom helpers to use. + +{{#each functions}} +### `{{doubleOpen}}{{PluginName}}{{../nameDelimiter}}{{Name}}{{doubleClose}}` +Description: {{Description}} +Inputs: + {{#each Parameters}} + - {{Name}}: + {{~#if ParameterType}} {{ParameterType.Name}} - + {{~else}} + {{~#if Schema}} {{getSchemaTypeName this}} -{{/if}} + {{~/if}} + {{~#if Description}} {{Description}}{{/if}} + {{~#if IsRequired}} (required){{else}} (optional){{/if}} + {{/each}} +Output: +{{~#if ReturnParameter}} + {{~#if ReturnParameter.ParameterType}} {{ReturnParameter.ParameterType.Name}} + {{~else}} + {{~#if ReturnParameter.Schema}} {{getSchemaReturnTypeName ReturnParameter Name}} + {{else}} string{{/if}} + {{~/if}} + {{~#if ReturnParameter.Description}} - {{ReturnParameter.Description}}{{/if}} +{{/if}} + +{{/each}} +IMPORTANT: You can only use the helpers that are listed above. Do not use any other helpers that are not explicitly listed here. For example, do not use `\{{log}}` or any `\{{Example}}` helpers, as they are not supported.{{/message}} +{{#message role="user"}}Please show me how to write a Handlebars template that achieves the following goal with the available helpers. +{{#if lastError}} +{{#message role="system"}} +## Previous attempt +This previous plan failed to achieve the goal: +```handlebars +{{lastPlan}} +``` + +The error was : +``` +{{lastError}} +``` + +Try again to achieve the goal while fixing the error.{{/message}} +{{/if}} + +## Goal +{{goal}}{{/message}} +{{#message role="system"}} +## Tips and reminders +- Add a comment above each step to describe what the step does. +- Each variable should have a well-defined name. +- Be extremely careful about types. For example, if you pass an array to a helper that expects a number, the template will error out. +- Each step should contain only one helper call. + +## Start +Follow these steps to create one Handlebars template to achieve the goal: +1. Identify Key Values: + - Read the goal carefully and note any numbers, strings, or conditions that you'll need. Do not truncate or modify any data. + - Use the `\{{set}}` helper to create a variable for each key value. +2. Choose the Right Helpers: + - Use the provided helpers to manipulate the variables you've created. Start with the basic helpers and only use custom helpers if necessary to accomplish the goal. + - Always reference a custom helper by its full name. +3. Don't Create or Assume Unlisted Helpers: + - Only use the helpers provided. Any helper not listed is considered hallucinated and must not be used. + - Do not invent or assume the existence of any functions not explicitly defined above. +4. What if I Need More Helpers? + - If the goal cannot be fully achieved with the provided helpers or you reference a hallucinated helper, print the following message: "{{insufficientFunctionsErrorMessage}}". +5. Keep It Simple: + - Your template should be intelligent and efficient, avoiding unnecessary complexity or redundant steps. +{{#if allowLoops}} - Avoid using loops or block expressions. They are allowed but not always necessary, so try to find a solution that does not use them.{{/if}} +6. No Nested Helpers: + - Do not nest helpers or conditionals inside other helpers. This can cause errors in the template. +7. Output the Result: + - Once you have completed the necessary steps to reach the goal, use the `\{{json}}` helper to output the final result. + - Ensure your template and all steps are enclosed in a ``` handlebars block. + +Remember, the objective is not to use all the helpers available, but to use the correct ones to achieve the desired outcome with a clear and concise template.{{/message}} \ No newline at end of file diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/HandlebarsPlannerExtensions.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/HandlebarsPlannerExtensions.cs new file mode 100644 index 000000000000..278c36f28a16 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/HandlebarsPlannerExtensions.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Reflection; + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +/// +/// Extension methods for the interface. +/// +internal static class HandlebarsPlannerExtensions +{ + /// + /// Reads the prompt for the given file name. + /// + /// The handlebars planner. + /// The name of the file to read. + /// The name of the additional namespace. + /// The content of the file as a string. + public static string ReadPrompt(this HandlebarsPlanner planner, string fileName, string? additionalNameSpace = "") + { + using var stream = planner.ReadPromptStream(fileName, additionalNameSpace); + using var reader = new StreamReader(stream); + + return reader.ReadToEnd(); + } + + /// + /// Reads the prompt stream for the given file name. + /// + /// The handlebars planner. + /// The name of the file to read. + /// The name of the additional namespace. + /// The stream for the given file name. + public static Stream ReadPromptStream(this HandlebarsPlanner planner, string fileName, string? additionalNamespace = "") + { + var assembly = Assembly.GetExecutingAssembly(); + var name = planner.GetType().Namespace; + var supplementalNamespace = !string.IsNullOrEmpty(additionalNamespace) ? $".{additionalNamespace}" : string.Empty; + var resourceName = $"{name}{supplementalNamespace}.{fileName}"; + + return assembly.GetManifestResourceStream(resourceName)!; + } +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/HandlebarsPromptTemplateExtensions.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/HandlebarsPromptTemplateExtensions.cs new file mode 100644 index 000000000000..04683838b751 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/HandlebarsPromptTemplateExtensions.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using HandlebarsDotNet; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using static Microsoft.SemanticKernel.PromptTemplates.Handlebars.HandlebarsPromptTemplateOptions; + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +/// +/// Provides extension methods for rendering Handlebars templates in the context of a Semantic Kernel. +/// +internal sealed class HandlebarsPromptTemplateExtensions +{ + public static void RegisterCustomCreatePlanHelpers( + RegisterHelperCallback registerHelper, + HandlebarsPromptTemplateOptions options, + KernelArguments executionContext + ) + { + registerHelper("getSchemaTypeName", static (Context context, Arguments arguments) => + { + KernelParameterMetadata parameter = (KernelParameterMetadata)arguments[0]; + return parameter.GetSchemaTypeName(); + }); + + registerHelper("getSchemaReturnTypeName", static (Context context, Arguments arguments) => + { + KernelReturnParameterMetadata parameter = (KernelReturnParameterMetadata)arguments[0]; + var functionName = arguments[1].ToString(); + return parameter.ToKernelParameterMetadata(functionName).GetSchemaTypeName(); + }); + } +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/KernelParameterMetadataExtensions.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/KernelParameterMetadataExtensions.cs new file mode 100644 index 000000000000..05d25f9674aa --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Extensions/KernelParameterMetadataExtensions.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text.Json; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +internal static class KernelParameterMetadataExtensions +{ + /// + /// Checks if type is primitive or string + /// + public static bool IsPrimitiveOrStringType(Type type) => type.IsPrimitive || type == typeof(string); + + /// + /// Checks if stringified type is primitive or string + /// + public static bool IsPrimitiveOrStringType(string type) => + type == "string" || type == "number" || type == "integer" || type == "boolean"; + + /// + /// Converts non-primitive types to a data class definition and returns a hash set of complex type metadata. + /// Complex types will become a data class. + /// If there are nested complex types, the nested complex type will also be returned. + /// Example: + /// Complex type: + /// class ComplexType: + /// propertyA: int + /// propertyB: str + /// propertyC: PropertyC + /// + public static HashSet ToHandlebarsParameterTypeMetadata(this Type type) + { + return type.ToHandlebarsParameterTypeMetadata(new HashSet()); + } + + private static HashSet ToHandlebarsParameterTypeMetadata(this Type type, HashSet processedTypes) + { + var parameterTypes = new HashSet(); + if (type.TryGetGenericResultType(out var taskResultType)) + { + var resultTypeProperties = taskResultType.GetProperties(); + if (!IsPrimitiveOrStringType(taskResultType) && resultTypeProperties.Length is not 0) + { + parameterTypes.Add(new HandlebarsParameterTypeMetadata() + { + Name = taskResultType.Name, + IsComplex = true, + Properties = resultTypeProperties.Select(p => new KernelParameterMetadata(p.Name) { ParameterType = p.PropertyType }).ToList() + }); + + processedTypes.Add(taskResultType); + parameterTypes.AddNestedComplexTypes(resultTypeProperties, processedTypes); + } + } + else if (type.IsClass && type != typeof(string)) + { + // Class + var properties = type.GetProperties(); + + parameterTypes.Add(new HandlebarsParameterTypeMetadata() + { + Name = type.Name, + IsComplex = properties.Length is not 0, + Properties = properties.Select(p => new KernelParameterMetadata(p.Name) { ParameterType = p.PropertyType }).ToList() + }); + + processedTypes.Add(type); + parameterTypes.AddNestedComplexTypes(properties, processedTypes); + } + + return parameterTypes; + } + + private static void AddNestedComplexTypes(this HashSet parameterTypes, PropertyInfo[] properties, HashSet processedTypes) + { + // Add nested complex types + foreach (var property in properties) + { + // Only convert the property type if we have not already done so. + if (!processedTypes.Contains(property.PropertyType)) + { + parameterTypes.UnionWith(property.PropertyType.ToHandlebarsParameterTypeMetadata(processedTypes)); + } + } + } + + private static Type GetTypeFromSchema(string schemaType) => + schemaType switch + { + "string" => typeof(string), + "integer" => typeof(long), + "number" => typeof(double), + "boolean" => typeof(bool), + "array" => typeof(object[]), + _ => typeof(object) // default to object for "object", "null", or anything unexpected + }; + + public static KernelParameterMetadata ParseJsonSchema(this KernelParameterMetadata parameter) + { + var schema = parameter.Schema!; + + var type = "object"; + if (schema.RootElement.TryGetProperty("type", out var typeNode)) + { + type = typeNode.Deserialize()!; + } + + if (IsPrimitiveOrStringType(type) || type == "null") + { + return new(parameter) + { + ParameterType = GetTypeFromSchema(type), + Schema = null, + }; + } + + return parameter; + } + + public static string ToJsonString(this JsonElement jsonProperties) + { + return JsonSerializer.Serialize(jsonProperties, JsonOptionsCache.WriteIndented); + } + + public static string GetSchemaTypeName(this KernelParameterMetadata parameter) + { + var schemaType = parameter.Schema?.RootElement.TryGetProperty("type", out var typeElement) is true ? typeElement.ToString() : "object"; + return $"{parameter.Name}-{schemaType}"; + } + + public static KernelParameterMetadata ToKernelParameterMetadata(this KernelReturnParameterMetadata parameter, string functionName) => + new($"{functionName}Returns") + { + Description = parameter.Description, + ParameterType = parameter.ParameterType, + Schema = parameter.Schema + }; + + public static KernelReturnParameterMetadata ToKernelReturnParameterMetadata(this KernelParameterMetadata parameter) => + new() + { + Description = parameter.Description, + ParameterType = parameter.ParameterType, + Schema = parameter.Schema + }; +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlan.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlan.cs new file mode 100644 index 000000000000..c6196746156a --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlan.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; +using HandlebarsDotNet; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +/// +/// Represents a Handlebars plan. +/// +public sealed class HandlebarsPlan +{ + /// + /// Error message for hallucinated helpers (helpers that are not registered kernel functions or built-in library helpers). + /// + internal const string HallucinatedHelpersErrorMessage = "Template references a helper that cannot be resolved."; + + /// + /// The handlebars template representing the plan. + /// + private readonly string _template; + + /// + /// Gets the prompt template used to generate the plan. + /// + public string? Prompt { get; set; } = null; + + /// + /// Initializes a new instance of the class. + /// + /// A Handlebars template representing the generated plan. + /// Prompt template used to generate the plan. + public HandlebarsPlan(string generatedPlan, string? createPlanPromptTemplate = null) + { + this._template = generatedPlan; + this.Prompt = createPlanPromptTemplate; + } + + /// + /// Print the generated plan, aka handlebars template that was the create plan chat completion result. + /// + /// Handlebars template representing the plan. + public override string ToString() + { + return this._template; + } + + /// + /// Invokes the Handlebars plan. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments. + /// The cancellation token. + /// The plan result. + public Task InvokeAsync( + Kernel kernel, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + var logger = kernel.LoggerFactory.CreateLogger(typeof(HandlebarsPlan)) ?? NullLogger.Instance; + + return PlannerInstrumentation.InvokePlanAsync( + static (HandlebarsPlan plan, Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) + => plan.InvokeCoreAsync(kernel, arguments, cancellationToken), + this, kernel, arguments, logger, cancellationToken); + } + + private async Task InvokeCoreAsync( + Kernel kernel, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + var templateFactory = new HandlebarsPromptTemplateFactory(options: HandlebarsPlanner.PromptTemplateOptions); + var promptTemplateConfig = new PromptTemplateConfig() + { + Template = this._template, + TemplateFormat = HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + Name = "InvokeHandlebarsPlan", + }; + + var handlebarsTemplate = templateFactory.Create(promptTemplateConfig); + try + { + return await handlebarsTemplate!.RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + } + catch (HandlebarsRuntimeException ex) when (ex.Message.Contains(HallucinatedHelpersErrorMessage)) + { + var hallucinatedHelpers = ex.Message.Substring(HallucinatedHelpersErrorMessage.Length + 1); + throw new KernelException($"[{HandlebarsPlannerErrorCodes.HallucinatedHelpers}] The plan references hallucinated helpers: {hallucinatedHelpers}", ex); + } + } +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlanner.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlanner.cs new file mode 100644 index 000000000000..e0b4debd087f --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlanner.cs @@ -0,0 +1,254 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using HandlebarsDotNet.Helpers.Enums; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +/// +/// Represents a Handlebars planner. +/// +public sealed class HandlebarsPlanner +{ + /// + /// Represents static options for all Handlebars Planner prompt templates. + /// + public static readonly HandlebarsPromptTemplateOptions PromptTemplateOptions = new() + { + // Options for built-in Handlebars helpers + Categories = new Category[] { Category.DateTime }, + UseCategoryPrefix = false, + + // Custom helpers + RegisterCustomHelpers = HandlebarsPromptTemplateExtensions.RegisterCustomCreatePlanHelpers, + }; + + /// + /// Initializes a new instance of the class. + /// + /// Configuration options for Handlebars Planner. + public HandlebarsPlanner(HandlebarsPlannerOptions? options = default) + { + this._options = options ?? new HandlebarsPlannerOptions(); + this._templateFactory = new HandlebarsPromptTemplateFactory(options: PromptTemplateOptions); + this._options.ExcludedPlugins.Add("Planner_Excluded"); + } + + /// Creates a plan for the specified goal. + /// The containing services, plugins, and other state for use throughout the operation. + /// The goal for which a plan should be created. + /// The to monitor for cancellation requests. The default is . + /// The created plan. + /// is null. + /// is empty or entirely composed of whitespace. + /// A plan could not be created. + public Task CreatePlanAsync(Kernel kernel, string goal, CancellationToken cancellationToken = default) + { + Verify.NotNullOrWhiteSpace(goal); + + var logger = kernel.LoggerFactory.CreateLogger(typeof(HandlebarsPlanner)) ?? NullLogger.Instance; + + return PlannerInstrumentation.CreatePlanAsync( + static (HandlebarsPlanner planner, Kernel kernel, string goal, CancellationToken cancellationToken) + => planner.CreatePlanCoreAsync(kernel, goal, cancellationToken), + this, kernel, goal, logger, cancellationToken); + } + + #region private + + private readonly HandlebarsPlannerOptions _options; + + private readonly HandlebarsPromptTemplateFactory _templateFactory; + + /// + /// Error message if kernel does not contain sufficient functions to create a plan. + /// + private const string InsufficientFunctionsError = "Additional helpers may be required"; + + private async Task CreatePlanCoreAsync(Kernel kernel, string goal, CancellationToken cancellationToken = default) + { + // Get CreatePlan prompt template + var functionsMetadata = await kernel.Plugins.GetFunctionsAsync(this._options, null, null, cancellationToken).ConfigureAwait(false); + var availableFunctions = this.GetAvailableFunctionsManual(functionsMetadata, out var complexParameterTypes, out var complexParameterSchemas); + var createPlanPrompt = await this.GetHandlebarsTemplateAsync(kernel, goal, availableFunctions, complexParameterTypes, complexParameterSchemas, cancellationToken).ConfigureAwait(false); + ChatHistory chatMessages = this.GetChatHistoryFromPrompt(createPlanPrompt); + + // Get the chat completion results + var chatCompletionService = kernel.GetRequiredService(); + var completionResults = await chatCompletionService.GetChatMessageContentAsync(chatMessages, executionSettings: this._options.ExecutionSettings, cancellationToken: cancellationToken).ConfigureAwait(false); + + // Check if plan could not be created due to insufficient functions + if (completionResults.Content is not null && completionResults.Content.IndexOf(InsufficientFunctionsError, StringComparison.OrdinalIgnoreCase) >= 0) + { + var functionNames = availableFunctions.ToList().Select(func => $"{func.PluginName}{this._templateFactory.NameDelimiter}{func.Name}"); + throw new KernelException($"[{HandlebarsPlannerErrorCodes.InsufficientFunctionsForGoal}] Unable to create plan for goal with available functions.\nGoal: {goal}\nAvailable Functions: {string.Join(", ", functionNames)}\nPlanner output:\n{completionResults}"); + } + + Match match = Regex.Match(completionResults.Content, @"```\s*(handlebars)?\s*(.*)\s*```", RegexOptions.Singleline); + if (!match.Success) + { + throw new KernelException($"[{HandlebarsPlannerErrorCodes.InvalidTemplate}] Could not find the plan in the results\nPlanner output:\n{completionResults}"); + } + + var planTemplate = match.Groups[2].Value.Trim(); + planTemplate = MinifyHandlebarsTemplate(planTemplate); + + return new HandlebarsPlan(planTemplate, createPlanPrompt); + } + + private List GetAvailableFunctionsManual( + IEnumerable availableFunctions, + out HashSet complexParameterTypes, + out Dictionary complexParameterSchemas) + { + complexParameterTypes = new(); + complexParameterSchemas = new(); + + var functionsMetadata = new List(); + foreach (var kernelFunction in availableFunctions) + { + // Extract any complex parameter types for isolated render in prompt template + var parametersMetadata = new List(); + foreach (var parameter in kernelFunction.Parameters) + { + var paramToAdd = this.SetComplexTypeDefinition(parameter, complexParameterTypes, complexParameterSchemas); + parametersMetadata.Add(paramToAdd); + } + + var returnParameter = kernelFunction.ReturnParameter.ToKernelParameterMetadata(kernelFunction.Name); + returnParameter = this.SetComplexTypeDefinition(returnParameter, complexParameterTypes, complexParameterSchemas); + + // Need to override function metadata in case parameter metadata changed (e.g., converted primitive types from schema objects) + var functionMetadata = new KernelFunctionMetadata(kernelFunction.Name) + { + PluginName = kernelFunction.PluginName, + Description = kernelFunction.Description, + Parameters = parametersMetadata, + ReturnParameter = returnParameter.ToKernelReturnParameterMetadata() + }; + functionsMetadata.Add(functionMetadata); + } + + return functionsMetadata; + } + + // Extract any complex types or schemas for isolated render in prompt template + private KernelParameterMetadata SetComplexTypeDefinition( + KernelParameterMetadata parameter, + HashSet complexParameterTypes, + Dictionary complexParameterSchemas) + { + // TODO (@teresaqhoang): Handle case when schema and ParameterType can exist i.e., when ParameterType = RestApiResponse + if (parameter.ParameterType is not null) + { + // Async return type - need to extract the actual return type and override ParameterType property + var type = parameter.ParameterType; + if (type.TryGetGenericResultType(out var taskResultType)) + { + parameter = new(parameter) { ParameterType = taskResultType }; // Actual Return Type + } + + complexParameterTypes.UnionWith(parameter.ParameterType!.ToHandlebarsParameterTypeMetadata()); + } + else if (parameter.Schema is not null) + { + // Parse the schema to extract any primitive types and set in ParameterType property instead + var parsedParameter = parameter.ParseJsonSchema(); + if (parsedParameter.Schema is not null) + { + complexParameterSchemas[parameter.GetSchemaTypeName()] = parameter.Schema.RootElement.ToJsonString(); + } + + parameter = parsedParameter; + } + + return parameter; + } + + private ChatHistory GetChatHistoryFromPrompt(string prompt) + { + // Extract the chat history from the rendered prompt + string pattern = @"<(user~|system~|assistant~)>(.*?)<\/\1>"; + MatchCollection matches = Regex.Matches(prompt, pattern, RegexOptions.Singleline); + + // Add the chat history to the chat + var chatMessages = new ChatHistory(); + foreach (Match m in matches.Cast()) + { + string role = m.Groups[1].Value; + string message = m.Groups[2].Value; + + switch (role) + { + case "user~": + chatMessages.AddUserMessage(message); + break; + case "system~": + chatMessages.AddSystemMessage(message); + break; + case "assistant~": + chatMessages.AddAssistantMessage(message); + break; + } + } + + return chatMessages; + } + + private async Task GetHandlebarsTemplateAsync( + Kernel kernel, string goal, + List availableFunctions, + HashSet complexParameterTypes, + Dictionary complexParameterSchemas, + CancellationToken cancellationToken) + { + var createPlanPrompt = this.ReadPrompt("CreatePlanPrompt.handlebars"); + var arguments = new KernelArguments() + { + { "functions", availableFunctions}, + { "goal", goal }, + { "nameDelimiter", this._templateFactory.NameDelimiter}, + { "insufficientFunctionsErrorMessage", InsufficientFunctionsError}, + { "allowLoops", this._options.AllowLoops }, + { "complexTypeDefinitions", complexParameterTypes.Count > 0 && complexParameterTypes.Any(p => p.IsComplex) ? complexParameterTypes.Where(p => p.IsComplex) : null}, + { "complexSchemaDefinitions", complexParameterSchemas.Count > 0 ? complexParameterSchemas : null}, + { "lastPlan", this._options.LastPlan }, + { "lastError", this._options.LastError } + }; + + var promptTemplateConfig = new PromptTemplateConfig() + { + Template = createPlanPrompt, + TemplateFormat = HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + Name = "Planner_Excluded-CreateHandlebarsPlan", + }; + + var handlebarsTemplate = this._templateFactory.Create(promptTemplateConfig); + return await handlebarsTemplate!.RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(true); + } + + private static string MinifyHandlebarsTemplate(string template) + { + // This regex pattern matches '{{', then any characters including newlines (non-greedy), then '}}' + string pattern = @"(\{\{[\s\S]*?}})"; + + // Replace all occurrences of the pattern in the input template + return Regex.Replace(template, pattern, m => + { + // For each match, remove the whitespace within the handlebars, except for spaces + // that separate different items (e.g., 'json' and '(get') + return Regex.Replace(m.Value, @"\s+", " ").Replace(" {", "{").Replace(" }", "}").Replace(" )", ")"); + }); + } + + #endregion +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlannerOptions.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlannerOptions.cs new file mode 100644 index 000000000000..a97bc191ef55 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/HandlebarsPlannerOptions.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +/// +/// Configuration for Handlebars planner instances. +/// +public sealed class HandlebarsPlannerOptions : PlannerOptions +{ + /// + /// The prompt execution settings to use for the planner. + /// + public PromptExecutionSettings? ExecutionSettings { get; set; } + + /// + /// Gets or sets the last plan generated by the planner. + /// + public HandlebarsPlan? LastPlan { get; set; } + + /// + /// Gets or sets the last error that occurred during planning. + /// + public string? LastError { get; set; } + + /// + /// Gets or sets a value indicating whether loops are allowed in the plan. + /// + public bool AllowLoops { get; set; } = true; + + /// + /// Initializes a new instance of the class. + /// + public HandlebarsPlannerOptions( + HandlebarsPlan? lastPlan = default, + string? lastError = default, + bool allowLoops = true + ) + { + this.LastPlan = lastPlan; + this.LastError = lastError; + this.AllowLoops = allowLoops; + } +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/Models/HandlebarsParameterTypeMetadata.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Models/HandlebarsParameterTypeMetadata.cs new file mode 100644 index 000000000000..2d845360738b --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Models/HandlebarsParameterTypeMetadata.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +internal sealed class HandlebarsParameterTypeMetadata +{ + [JsonPropertyName("name")] + public string Name { get; set; } = string.Empty; + + [JsonPropertyName("isComplexType")] + public bool IsComplex { get; set; } = false; + + /// + /// If this is a complex type, this will contain the properties of the complex type. + /// + [JsonPropertyName("properties")] + public List Properties { get; set; } = new(); + + // Override the Equals method to compare the property values + public override bool Equals(object obj) + { + // Check to make sure the object is the expected type + if (obj is not HandlebarsParameterTypeMetadata other) + { + return false; + } + + // Compare the Name and IsComplex properties + if (this.Name != other.Name || this.IsComplex != other.IsComplex) + { + return false; + } + + // Compare the Properties lists using a helper method + return ArePropertiesEqual(this.Properties, other.Properties); + } + + // A helper method to compare two lists of KernelParameterMetadata + private static bool ArePropertiesEqual(List list1, List list2) + { + // Check if the lists are null or have different lengths + if (list1 == null || list2 == null || list1.Count != list2.Count) + { + return false; + } + + // Compare the elements of the lists by comparing the Name and ParameterType properties + for (int i = 0; i < list1.Count; i++) + { + if (!list1[i].Name.Equals(list2[i].Name, System.StringComparison.Ordinal) || !list1[i].ParameterType!.Equals(list2[i].ParameterType)) + { + return false; + } + } + + // If all elements are equal, return true + return true; + } + + // Override the GetHashCode method to generate a hash code based on the property values + public override int GetHashCode() + { + HashCode hash = default; + hash.Add(this.Name); + hash.Add(this.IsComplex); + foreach (var item in this.Properties) + { + // Combine the Name and ParameterType properties into one hash code + hash.Add( + HashCode.Combine(item.Name, item.ParameterType) + ); + } + + return hash.ToHashCode(); + } +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Handlebars/Models/HandlebarsPlannerErrorCodes.cs b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Models/HandlebarsPlannerErrorCodes.cs new file mode 100644 index 000000000000..adfafe292933 --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Handlebars/Models/HandlebarsPlannerErrorCodes.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Planning.Handlebars; + +/// +/// Enum error codes for Handlebars planner exceptions. +/// +public enum HandlebarsPlannerErrorCodes +{ + /// + /// Error code for hallucinated helpers. + /// + HallucinatedHelpers, + + /// + /// Error code for invalid Handlebars template. + /// + InvalidTemplate, + + /// + /// Error code for insufficient functions to complete the goal. + /// + InsufficientFunctionsForGoal, +} diff --git a/dotnet/src/Planners/Planners.Handlebars/Planners.Handlebars.csproj b/dotnet/src/Planners/Planners.Handlebars/Planners.Handlebars.csproj new file mode 100644 index 000000000000..18176910ac7d --- /dev/null +++ b/dotnet/src/Planners/Planners.Handlebars/Planners.Handlebars.csproj @@ -0,0 +1,39 @@ + + + + + Microsoft.SemanticKernel.Planners.Handlebars + Microsoft.SemanticKernel.Planning + netstandard2.0 + preview + + + + + + + + + Semantic Kernel - Planners + Semantic Kernel Handlebars Planners. + + + + + Always + + + + + + + + + + + + + + + + diff --git a/dotnet/src/Planners/Planners.OpenAI/AssemblyInfo.cs b/dotnet/src/Planners/Planners.OpenAI/AssemblyInfo.cs new file mode 100644 index 000000000000..3ae27a60b721 --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0061")] diff --git a/dotnet/src/Planners/Planners.OpenAI/Planners.OpenAI.csproj b/dotnet/src/Planners/Planners.OpenAI/Planners.OpenAI.csproj new file mode 100644 index 000000000000..b8a7994070e6 --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/Planners.OpenAI.csproj @@ -0,0 +1,39 @@ + + + + + Microsoft.SemanticKernel.Planners.OpenAI + Microsoft.SemanticKernel.Planning + netstandard2.0 + preview + + + + + + + + + Semantic Kernel - Planners + Semantic Kernel OpenAI Planners. + + + + + + + + + Always + + + Always + + + + + + + + + diff --git a/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlanner.cs b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlanner.cs new file mode 100644 index 000000000000..9c1bdd484547 --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlanner.cs @@ -0,0 +1,363 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Json.More; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// A planner that uses OpenAI function calling in a stepwise manner to fulfill a user goal or question. +/// +public sealed class FunctionCallingStepwisePlanner +{ + /// + /// Initialize a new instance of the class. + /// + /// The planner options. + public FunctionCallingStepwisePlanner( + FunctionCallingStepwisePlannerOptions? options = null) + { + this._options = options ?? new(); + this._generatePlanYaml = this._options.GetInitialPlanPromptTemplate?.Invoke() ?? EmbeddedResource.Read("Stepwise.GeneratePlan.yaml"); + this._stepPrompt = this._options.GetStepPromptTemplate?.Invoke() ?? EmbeddedResource.Read("Stepwise.StepPrompt.txt"); + this._options.ExcludedPlugins.Add(StepwisePlannerPluginName); + } + + /// + /// Execute a plan + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The question to answer + /// The to monitor for cancellation requests. The default is . + /// Result containing the model's response message and chat history. + public Task ExecuteAsync( + Kernel kernel, + string question, + CancellationToken cancellationToken = default) + { + var logger = kernel.LoggerFactory.CreateLogger(this.GetType()) ?? NullLogger.Instance; + + return PlannerInstrumentation.InvokePlanAsync( + static (FunctionCallingStepwisePlanner plan, Kernel kernel, string? question, CancellationToken cancellationToken) + => plan.ExecuteCoreAsync(kernel, question!, cancellationToken), + this, kernel, question, logger, cancellationToken); + } + + #region private + + private async Task ExecuteCoreAsync( + Kernel kernel, + string question, + CancellationToken cancellationToken = default) + { + Verify.NotNullOrWhiteSpace(question); + Verify.NotNull(kernel); + IChatCompletionService chatCompletion = kernel.GetRequiredService(); + ILoggerFactory loggerFactory = kernel.LoggerFactory; + ILogger logger = loggerFactory.CreateLogger(this.GetType()) ?? NullLogger.Instance; + var promptTemplateFactory = new KernelPromptTemplateFactory(loggerFactory); + var stepExecutionSettings = this._options.ExecutionSettings ?? new OpenAIPromptExecutionSettings(); + + // Clone the kernel so that we can add planner-specific plugins without affecting the original kernel instance + var clonedKernel = kernel.Clone(); + clonedKernel.ImportPluginFromType(); + + // Create and invoke a kernel function to generate the initial plan + var initialPlan = await this.GeneratePlanAsync(question, clonedKernel, logger, cancellationToken).ConfigureAwait(false); + + var chatHistoryForSteps = await this.BuildChatHistoryForStepAsync(question, initialPlan, clonedKernel, promptTemplateFactory, cancellationToken).ConfigureAwait(false); + + for (int i = 0; i < this._options.MaxIterations; i++) + { + // sleep for a bit to avoid rate limiting + if (i > 0) + { + await Task.Delay(this._options.MinIterationTimeMs, cancellationToken).ConfigureAwait(false); + } + + // For each step, request another completion to select a function for that step + chatHistoryForSteps.AddUserMessage(StepwiseUserMessage); + var chatResult = await this.GetCompletionWithFunctionsAsync(chatHistoryForSteps, clonedKernel, chatCompletion, stepExecutionSettings, logger, cancellationToken).ConfigureAwait(false); + chatHistoryForSteps.Add(chatResult); + + // Check for function response + if (!this.TryGetFunctionResponse(chatResult, out IReadOnlyList? functionResponses, out string? functionResponseError)) + { + // No function response found. Either AI returned a chat message, or something went wrong when parsing the function. + // Log the error (if applicable), then let the planner continue. + if (functionResponseError is not null) + { + chatHistoryForSteps.AddUserMessage(functionResponseError); + } + continue; + } + + // Check for final answer in the function response + foreach (OpenAIFunctionToolCall functionResponse in functionResponses) + { + if (this.TryFindFinalAnswer(functionResponse, out string finalAnswer, out string? finalAnswerError)) + { + if (finalAnswerError is not null) + { + // We found a final answer, but failed to parse it properly. + // Log the error message in chat history and let the planner try again. + chatHistoryForSteps.AddUserMessage(finalAnswerError); + continue; + } + + // Success! We found a final answer, so return the planner result. + return new FunctionCallingStepwisePlannerResult + { + FinalAnswer = finalAnswer, + ChatHistory = chatHistoryForSteps, + Iterations = i + 1, + }; + } + } + + // Look up function in kernel + foreach (OpenAIFunctionToolCall functionResponse in functionResponses) + { + if (clonedKernel.Plugins.TryGetFunctionAndArguments(functionResponse, out KernelFunction? pluginFunction, out KernelArguments? arguments)) + { + try + { + // Execute function and add to result to chat history + var result = (await clonedKernel.InvokeAsync(pluginFunction, arguments, cancellationToken).ConfigureAwait(false)).GetValue(); + chatHistoryForSteps.AddMessage(AuthorRole.Tool, ParseObjectAsString(result), metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, functionResponse.Id } }); + } + catch (Exception ex) when (!ex.IsCriticalException()) + { + chatHistoryForSteps.AddMessage(AuthorRole.Tool, ex.Message, metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, functionResponse.Id } }); + chatHistoryForSteps.AddUserMessage($"Failed to execute function {functionResponse.FullyQualifiedName}. Try something else!"); + } + } + else + { + chatHistoryForSteps.AddUserMessage($"Function {functionResponse.FullyQualifiedName} does not exist in the kernel. Try something else!"); + } + } + } + + // We've completed the max iterations, but the model hasn't returned a final answer. + return new FunctionCallingStepwisePlannerResult + { + FinalAnswer = string.Empty, + ChatHistory = chatHistoryForSteps, + Iterations = this._options.MaxIterations, + }; + } + + private async Task GetCompletionWithFunctionsAsync( + ChatHistory chatHistory, + Kernel kernel, + IChatCompletionService chatCompletion, + OpenAIPromptExecutionSettings openAIExecutionSettings, + ILogger logger, + CancellationToken cancellationToken) + { + openAIExecutionSettings.ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions; + + await this.ValidateTokenCountAsync(chatHistory, kernel, logger, openAIExecutionSettings, cancellationToken).ConfigureAwait(false); + return await chatCompletion.GetChatMessageContentAsync(chatHistory, openAIExecutionSettings, kernel, cancellationToken).ConfigureAwait(false); + } + + private async Task GetFunctionsManualAsync(Kernel kernel, ILogger logger, CancellationToken cancellationToken) + { + return await kernel.Plugins.GetJsonSchemaFunctionsManualAsync(this._options, null, logger, false, OpenAIFunction.NameSeparator, cancellationToken).ConfigureAwait(false); + } + + // Create and invoke a kernel function to generate the initial plan + private async Task GeneratePlanAsync(string question, Kernel kernel, ILogger logger, CancellationToken cancellationToken) + { + var generatePlanFunction = kernel.CreateFunctionFromPromptYaml(this._generatePlanYaml); + string functionsManual = await this.GetFunctionsManualAsync(kernel, logger, cancellationToken).ConfigureAwait(false); + var generatePlanArgs = new KernelArguments + { + [NameDelimiterKey] = OpenAIFunction.NameSeparator, + [AvailableFunctionsKey] = functionsManual, + [GoalKey] = question + }; + var generatePlanResult = await kernel.InvokeAsync(generatePlanFunction, generatePlanArgs, cancellationToken).ConfigureAwait(false); + return generatePlanResult.GetValue() ?? throw new KernelException("Failed get a completion for the plan."); + } + + private async Task BuildChatHistoryForStepAsync( + string goal, + string initialPlan, + Kernel kernel, + KernelPromptTemplateFactory promptTemplateFactory, + CancellationToken cancellationToken) + { + var chatHistory = new ChatHistory(); + + // Add system message with context about the initial goal/plan + var arguments = new KernelArguments + { + [GoalKey] = goal, + [InitialPlanKey] = initialPlan + }; + var systemMessage = await promptTemplateFactory.Create(new PromptTemplateConfig(this._stepPrompt)).RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + chatHistory.AddSystemMessage(systemMessage); + + return chatHistory; + } + + private bool TryGetFunctionResponse(ChatMessageContent chatMessage, [NotNullWhen(true)] out IReadOnlyList? functionResponses, out string? errorMessage) + { + OpenAIChatMessageContent? openAiChatMessage = chatMessage as OpenAIChatMessageContent; + Verify.NotNull(openAiChatMessage, nameof(openAiChatMessage)); + + functionResponses = null; + errorMessage = null; + try + { + functionResponses = openAiChatMessage.GetOpenAIFunctionToolCalls(); + } + catch (JsonException) + { + errorMessage = "That function call is invalid. Try something else!"; + } + + return functionResponses is { Count: > 0 }; + } + + private bool TryFindFinalAnswer(OpenAIFunctionToolCall functionResponse, out string finalAnswer, out string? errorMessage) + { + finalAnswer = string.Empty; + errorMessage = null; + + if (functionResponse.PluginName == "UserInteraction" && functionResponse.FunctionName == "SendFinalAnswer") + { + if (functionResponse.Arguments is { Count: > 0 } arguments && arguments.TryGetValue("answer", out object? valueObj)) + { + finalAnswer = ParseObjectAsString(valueObj); + } + else + { + errorMessage = "Returned answer in incorrect format. Try again!"; + } + return true; + } + return false; + } + + private static string ParseObjectAsString(object? valueObj) + { + string resultStr = string.Empty; + + if (valueObj is RestApiOperationResponse apiResponse) + { + resultStr = apiResponse.Content as string ?? string.Empty; + } + else if (valueObj is string valueStr) + { + resultStr = valueStr; + } + else if (valueObj is JsonElement valueElement) + { + if (valueElement.ValueKind == JsonValueKind.String) + { + resultStr = valueElement.GetString() ?? ""; + } + else + { + resultStr = valueElement.ToJsonString(); + } + } + else + { + resultStr = JsonSerializer.Serialize(valueObj); + } + + return resultStr; + } + + private async Task ValidateTokenCountAsync( + ChatHistory chatHistory, + Kernel kernel, + ILogger logger, + OpenAIPromptExecutionSettings openAIExecutionSettings, + CancellationToken cancellationToken) + { + if (this._options.MaxPromptTokens is not null) + { + string functionManual = string.Empty; + + // If using functions, get the functions manual to include in token count estimate + if (openAIExecutionSettings.ToolCallBehavior == ToolCallBehavior.EnableKernelFunctions) + { + functionManual = await this.GetFunctionsManualAsync(kernel, logger, cancellationToken).ConfigureAwait(false); + } + + var tokenCount = chatHistory.GetTokenCount(additionalMessage: functionManual); + if (tokenCount >= this._options.MaxPromptTokens) + { + throw new KernelException("ChatHistory is too long to get a completion. Try reducing the available functions."); + } + } + } + + /// + /// The options for the planner + /// + private readonly FunctionCallingStepwisePlannerOptions _options; + + /// + /// The prompt YAML for generating the initial stepwise plan. + /// + private readonly string _generatePlanYaml; + + /// + /// The prompt (system message) for performing the steps. + /// + private readonly string _stepPrompt; + + /// + /// The name to use when creating semantic functions that are restricted from plan creation + /// + private const string StepwisePlannerPluginName = "StepwisePlanner_Excluded"; + + /// + /// The user message to add to the chat history for each step of the plan. + /// + private const string StepwiseUserMessage = "Perform the next step of the plan if there is more work to do. When you have reached a final answer, use the UserInteraction-SendFinalAnswer function to communicate this back to the user."; + + // Context variable keys + private const string AvailableFunctionsKey = "available_functions"; + private const string InitialPlanKey = "initial_plan"; + private const string GoalKey = "goal"; + private const string NameDelimiterKey = "name_delimiter"; + + #endregion private + + /// + /// Plugin used by the to interact with the caller. + /// + public sealed class UserInteraction + { + /// + /// This function is used by the to indicate when the final answer has been found. + /// + /// The final answer for the plan. + [KernelFunction] + [Description("This function is used to send the final answer of a plan to the user.")] +#pragma warning disable IDE0060 // Remove unused parameter. The parameter is purely an indication to the LLM and is not intended to be used. + public string SendFinalAnswer([Description("The final answer")] string answer) +#pragma warning restore IDE0060 + { + return "Thanks"; + } + } +} diff --git a/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlannerOptions.cs b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlannerOptions.cs new file mode 100644 index 000000000000..777781cce016 --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlannerOptions.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Configuration for Stepwise planner instances. +/// +public sealed class FunctionCallingStepwisePlannerOptions : PlannerOptions +{ + /// + /// Initializes a new instance of the + /// + public FunctionCallingStepwisePlannerOptions() { } + + /// + /// The maximum total number of tokens to allow in a completion request, + /// which includes the tokens from the prompt and completion + /// + public int? MaxTokens { get; set; } + + /// + /// The ratio of tokens to allocate to the completion request. (prompt / (prompt + completion)) + /// + public double MaxTokensRatio { get; set; } = 0.1; + + internal int? MaxCompletionTokens => (this.MaxTokens is null) ? null : (int)(this.MaxTokens * this.MaxTokensRatio); + internal int? MaxPromptTokens => (this.MaxTokens is null) ? null : (int)(this.MaxTokens * (1 - this.MaxTokensRatio)); + + /// + /// Delegate to get the prompt template YAML for the initial plan generation phase. + /// + public Func? GetInitialPlanPromptTemplate { get; set; } + + /// + /// Delegate to get the prompt template string (system message) for the step execution phase. + /// + public Func? GetStepPromptTemplate { get; set; } + + /// + /// The maximum number of iterations to allow in a plan. + /// + public int MaxIterations { get; set; } = 15; + + /// + /// The minimum time to wait between iterations in milliseconds. + /// + public int MinIterationTimeMs { get; set; } + + /// + /// The prompt execution settings to use for the step execution phase. + /// + public OpenAIPromptExecutionSettings? ExecutionSettings { get; set; } +} diff --git a/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlannerResult.cs b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlannerResult.cs new file mode 100644 index 000000000000..c4cfc3635bcc --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlannerResult.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Planning; + +/// +/// Result produced by the . +/// +public class FunctionCallingStepwisePlannerResult +{ + /// + /// Final result message of the plan. + /// + public string FinalAnswer { get; internal set; } = string.Empty; + + /// + /// Chat history containing the planning process. + /// + public ChatHistory? ChatHistory { get; internal set; } + + /// + /// Number of iterations performed by the planner. + /// + public int Iterations { get; internal set; } = 0; +} diff --git a/dotnet/src/Planners/Planners.OpenAI/Stepwise/GeneratePlan.yaml b/dotnet/src/Planners/Planners.OpenAI/Stepwise/GeneratePlan.yaml new file mode 100644 index 000000000000..7793437aea74 --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/Stepwise/GeneratePlan.yaml @@ -0,0 +1,32 @@ +template_format: semantic-kernel +template: | + + You are an expert at generating plans from a given GOAL. Think step by step and determine a plan to satisfy the specified GOAL using only the FUNCTIONS provided to you. You can also make use of your own knowledge while forming an answer but you must not use functions that are not provided. Once you have come to a final answer, use the UserInteraction{{$name_delimiter}}SendFinalAnswer function to communicate this back to the user. + + [FUNCTIONS] + + {{$available_functions}} + + [END FUNCTIONS] + + To create the plan, follow these steps: + 0. Each step should be something that is capable of being done by the list of available functions. + 1. Steps can use output from one or more previous steps as input, if appropriate. + 2. The plan should be as short as possible. + + {{$goal}} +description: Generate a step-by-step plan to satisfy a given goal +name: GeneratePlan +input_variables: + - name: available_functions + description: A list of functions that can be used to generate the plan + - name: goal + description: The goal to satisfy +execution_settings: + default: + temperature: 0.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [] diff --git a/dotnet/src/Planners/Planners.OpenAI/Stepwise/StepPrompt.txt b/dotnet/src/Planners/Planners.OpenAI/Stepwise/StepPrompt.txt new file mode 100644 index 000000000000..1299d55d62e3 --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/Stepwise/StepPrompt.txt @@ -0,0 +1,6 @@ +Original request: {{$goal}} + +You are in the process of helping the user fulfill this request using the following plan: +{{$initial_plan}} + +The user will ask you for help with each step. \ No newline at end of file diff --git a/dotnet/src/Planners/Planners.OpenAI/Utils/EmbeddedResource.cs b/dotnet/src/Planners/Planners.OpenAI/Utils/EmbeddedResource.cs new file mode 100644 index 000000000000..c887f5e35470 --- /dev/null +++ b/dotnet/src/Planners/Planners.OpenAI/Utils/EmbeddedResource.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Reflection; + +namespace Microsoft.SemanticKernel.Planning; + +internal static class EmbeddedResource +{ + private static readonly string? s_namespace = typeof(EmbeddedResource).Namespace; + + internal static string Read(string name) + { + var assembly = typeof(EmbeddedResource).GetTypeInfo().Assembly; + if (assembly == null) { throw new FileNotFoundException($"[{s_namespace}] {name} assembly not found"); } + + using Stream? resource = assembly.GetManifestResourceStream($"{s_namespace}." + name); + if (resource == null) { throw new FileNotFoundException($"[{s_namespace}] {name} resource not found"); } + + using var reader = new StreamReader(resource); + return reader.ReadToEnd(); + } +} diff --git a/dotnet/src/Plugins/Plugins.Core/AssemblyInfo.cs b/dotnet/src/Plugins/Plugins.Core/AssemblyInfo.cs new file mode 100644 index 000000000000..0aef47e394f8 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.Core/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0050")] diff --git a/dotnet/src/Plugins/Plugins.Core/ConversationSummaryPlugin.cs b/dotnet/src/Plugins/Plugins.Core/ConversationSummaryPlugin.cs index 01fca19a6e78..0c540e64b915 100644 --- a/dotnet/src/Plugins/Plugins.Core/ConversationSummaryPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Core/ConversationSummaryPlugin.cs @@ -3,129 +3,102 @@ using System.Collections.Generic; using System.ComponentModel; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Plugins.Core; /// -/// Semantic plugin that enables conversations summarization. +/// Semantic plugin that enables conversations summarization. /// -/// -/// -/// var kernel = new KernelBuilder().Build(); -/// kernel.ImportFunctions(new ConversationSummaryPlugin(kernel)); -/// -/// public class ConversationSummaryPlugin { /// - /// The max tokens to process in a single semantic function call. + /// The max tokens to process in a single prompt function call. /// private const int MaxTokens = 1024; - private readonly ISKFunction _summarizeConversationFunction; - private readonly ISKFunction _conversationActionItemsFunction; - private readonly ISKFunction _conversationTopicsFunction; + private readonly KernelFunction _summarizeConversationFunction; + private readonly KernelFunction _conversationActionItemsFunction; + private readonly KernelFunction _conversationTopicsFunction; /// /// Initializes a new instance of the class. /// - /// Kernel instance - public ConversationSummaryPlugin(IKernel kernel) + public ConversationSummaryPlugin() { - this._summarizeConversationFunction = kernel.CreateSemanticFunction( - SemanticFunctionConstants.SummarizeConversationDefinition, - pluginName: nameof(ConversationSummaryPlugin), - description: "Given a section of a conversation transcript, summarize the part of the conversation.", - requestSettings: new AIRequestSettings() + PromptExecutionSettings settings = new() + { + ExtensionData = new() { - ExtensionData = new Dictionary() - { - { "Temperature", 0.1 }, - { "TopP", 0.5 }, - { "MaxTokens", MaxTokens } - } - }); + { "Temperature", 0.1 }, + { "TopP", 0.5 }, + { "MaxTokens", MaxTokens } + } + }; + + this._summarizeConversationFunction = KernelFunctionFactory.CreateFromPrompt( + PromptFunctionConstants.SummarizeConversationDefinition, + description: "Given a section of a conversation transcript, summarize the part of the conversation.", + executionSettings: settings); - this._conversationActionItemsFunction = kernel.CreateSemanticFunction( - SemanticFunctionConstants.GetConversationActionItemsDefinition, - pluginName: nameof(ConversationSummaryPlugin), + this._conversationActionItemsFunction = KernelFunctionFactory.CreateFromPrompt( + PromptFunctionConstants.GetConversationActionItemsDefinition, description: "Given a section of a conversation transcript, identify action items.", - requestSettings: new AIRequestSettings() - { - ExtensionData = new Dictionary() - { - { "Temperature", 0.1 }, - { "TopP", 0.5 }, - { "MaxTokens", MaxTokens } - } - }); + executionSettings: settings); - this._conversationTopicsFunction = kernel.CreateSemanticFunction( - SemanticFunctionConstants.GetConversationTopicsDefinition, - pluginName: nameof(ConversationSummaryPlugin), + this._conversationTopicsFunction = KernelFunctionFactory.CreateFromPrompt( + PromptFunctionConstants.GetConversationTopicsDefinition, description: "Analyze a conversation transcript and extract key topics worth remembering.", - requestSettings: new AIRequestSettings() - { - ExtensionData = new Dictionary() - { - { "Temperature", 0.1 }, - { "TopP", 0.5 }, - { "MaxTokens", MaxTokens } - } - }); + executionSettings: settings); } /// /// Given a long conversation transcript, summarize the conversation. /// /// A long conversation transcript. - /// The SKContext for function execution. - [SKFunction, Description("Given a long conversation transcript, summarize the conversation.")] - public Task SummarizeConversationAsync( + /// The containing services, plugins, and other state for use throughout the operation. + [KernelFunction, Description("Given a long conversation transcript, summarize the conversation.")] + public Task SummarizeConversationAsync( [Description("A long conversation transcript.")] string input, - SKContext context) - { - List lines = TextChunker.SplitPlainTextLines(input, MaxTokens); - List paragraphs = TextChunker.SplitPlainTextParagraphs(lines, MaxTokens); - - return this._summarizeConversationFunction - .AggregatePartitionedResultsAsync(paragraphs, context); - } + Kernel kernel) => + ProcessAsync(this._summarizeConversationFunction, input, kernel); /// /// Given a long conversation transcript, identify action items. /// /// A long conversation transcript. - /// The SKContext for function execution. - [SKFunction, Description("Given a long conversation transcript, identify action items.")] - public Task GetConversationActionItemsAsync( + /// The containing services, plugins, and other state for use throughout the operation. + [KernelFunction, Description("Given a long conversation transcript, identify action items.")] + public Task GetConversationActionItemsAsync( [Description("A long conversation transcript.")] string input, - SKContext context) - { - List lines = TextChunker.SplitPlainTextLines(input, MaxTokens); - List paragraphs = TextChunker.SplitPlainTextParagraphs(lines, MaxTokens); - - return this._conversationActionItemsFunction - .AggregatePartitionedResultsAsync(paragraphs, context); - } + Kernel kernel) => + ProcessAsync(this._conversationActionItemsFunction, input, kernel); /// /// Given a long conversation transcript, identify topics. /// /// A long conversation transcript. - /// The SKContext for function execution. - [SKFunction, Description("Given a long conversation transcript, identify topics worth remembering.")] - public Task GetConversationTopicsAsync( + /// The containing services, plugins, and other state for use throughout the operation. + [KernelFunction, Description("Given a long conversation transcript, identify topics worth remembering.")] + public Task GetConversationTopicsAsync( [Description("A long conversation transcript.")] string input, - SKContext context) + Kernel kernel) => + ProcessAsync(this._conversationTopicsFunction, input, kernel); + + private static async Task ProcessAsync(KernelFunction func, string input, Kernel kernel) { List lines = TextChunker.SplitPlainTextLines(input, MaxTokens); List paragraphs = TextChunker.SplitPlainTextParagraphs(lines, MaxTokens); - return this._conversationTopicsFunction - .AggregatePartitionedResultsAsync(paragraphs, context); + string[] results = new string[paragraphs.Count]; + + for (int i = 0; i < results.Length; i++) + { + // The first parameter is the input text. + results[i] = (await func.InvokeAsync(kernel, new() { ["input"] = paragraphs[i] }).ConfigureAwait(false)) + .GetValue() ?? string.Empty; + } + + return string.Join("\n", results); } } diff --git a/dotnet/src/Plugins/Plugins.Core/FileIOPlugin.cs b/dotnet/src/Plugins/Plugins.Core/FileIOPlugin.cs index c50ed4e9302c..52a780344ff6 100644 --- a/dotnet/src/Plugins/Plugins.Core/FileIOPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Core/FileIOPlugin.cs @@ -11,12 +11,6 @@ namespace Microsoft.SemanticKernel.Plugins.Core; /// /// Read and write from a file. /// -/// -/// Usage: kernel.ImportFunctions(new FileIOPlugin(), "file"); -/// Examples: -/// {{file.readAsync $path }} => "hello world" -/// {{file.writeAsync}} -/// public sealed class FileIOPlugin { /// @@ -27,7 +21,7 @@ public sealed class FileIOPlugin /// /// Source file /// File content - [SKFunction, Description("Read a file")] + [KernelFunction, Description("Read a file")] public async Task ReadAsync([Description("Source file")] string path) { using var reader = File.OpenText(path); @@ -43,7 +37,7 @@ public async Task ReadAsync([Description("Source file")] string path) /// The destination file path /// The file content to write /// An awaitable task - [SKFunction, Description("Write a file")] + [KernelFunction, Description("Write a file")] public async Task WriteAsync( [Description("Destination file")] string path, [Description("File content")] string content) diff --git a/dotnet/src/Plugins/Plugins.Core/HttpPlugin.cs b/dotnet/src/Plugins/Plugins.Core/HttpPlugin.cs index e8f5124c4ba7..1df7bce7e7e1 100644 --- a/dotnet/src/Plugins/Plugins.Core/HttpPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Core/HttpPlugin.cs @@ -4,22 +4,14 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.Http; namespace Microsoft.SemanticKernel.Plugins.Core; /// /// A plugin that provides HTTP functionality. /// -/// -/// Usage: kernel.ImportFunctions(new HttpPlugin(), "http"); -/// Examples: -/// SKContext.Variables["url"] = "https://www.bing.com" -/// {{http.getAsync $url}} -/// {{http.postAsync $url}} -/// {{http.putAsync $url}} -/// {{http.deleteAsync $url}} -/// [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1054:URI-like parameters should not be strings", Justification = "Semantic Kernel operates on strings")] public sealed class HttpPlugin @@ -29,7 +21,7 @@ public sealed class HttpPlugin /// /// Initializes a new instance of the class. /// - public HttpPlugin() : this(new HttpClient(NonDisposableHttpClientHandler.Instance, disposeHandler: false)) + public HttpPlugin() : this(null) { } @@ -40,8 +32,9 @@ public HttpPlugin() : this(new HttpClient(NonDisposableHttpClientHandler.Instanc /// /// assumes ownership of the instance and will dispose it when the plugin is disposed. /// - public HttpPlugin(HttpClient client) => - this._client = client; + [ActivatorUtilitiesConstructor] + public HttpPlugin(HttpClient? client = null) => + this._client = client ?? HttpClientProvider.GetHttpClient(); /// /// Sends an HTTP GET request to the specified URI and returns the response body as a string. @@ -49,7 +42,7 @@ public HttpPlugin(HttpClient client) => /// URI of the request /// The token to use to request cancellation. /// The response body as a string. - [SKFunction, Description("Makes a GET request to a uri")] + [KernelFunction, Description("Makes a GET request to a uri")] public Task GetAsync( [Description("The URI of the request")] string uri, CancellationToken cancellationToken = default) => @@ -62,7 +55,7 @@ public Task GetAsync( /// The body of the request /// The token to use to request cancellation. /// The response body as a string. - [SKFunction, Description("Makes a POST request to a uri")] + [KernelFunction, Description("Makes a POST request to a uri")] public Task PostAsync( [Description("The URI of the request")] string uri, [Description("The body of the request")] string body, @@ -76,7 +69,7 @@ public Task PostAsync( /// The body of the request /// The token to use to request cancellation. /// The response body as a string. - [SKFunction, Description("Makes a PUT request to a uri")] + [KernelFunction, Description("Makes a PUT request to a uri")] public Task PutAsync( [Description("The URI of the request")] string uri, [Description("The body of the request")] string body, @@ -89,7 +82,7 @@ public Task PutAsync( /// URI of the request /// The token to use to request cancellation. /// The response body as a string. - [SKFunction, Description("Makes a DELETE request to a uri")] + [KernelFunction, Description("Makes a DELETE request to a uri")] public Task DeleteAsync( [Description("The URI of the request")] string uri, CancellationToken cancellationToken = default) => @@ -103,7 +96,7 @@ public Task DeleteAsync( private async Task SendRequestAsync(string uri, HttpMethod method, HttpContent? requestContent, CancellationToken cancellationToken) { using var request = new HttpRequestMessage(method, uri) { Content = requestContent }; - request.Headers.Add("User-Agent", Telemetry.HttpUserAgent); + request.Headers.Add("User-Agent", HttpHeaderValues.UserAgent); using var response = await this._client.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); return await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); } diff --git a/dotnet/src/Plugins/Plugins.Core/MathPlugin.cs b/dotnet/src/Plugins/Plugins.Core/MathPlugin.cs index c8f0a84d8978..f76390c22e20 100644 --- a/dotnet/src/Plugins/Plugins.Core/MathPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Core/MathPlugin.cs @@ -7,32 +7,29 @@ namespace Microsoft.SemanticKernel.Plugins.Core; /// /// MathPlugin provides a set of functions to make Math calculations. /// -/// -/// Usage: kernel.ImportFunctions(new MathPlugin(), "math"); -/// Examples: -/// {{math.Add}} => Returns the sum of FirstNumber and SecondNumber (provided in the SKContext) -/// public sealed class MathPlugin { /// - /// Returns the Addition result of initial and amount values provided. + /// Returns the addition result of initial and amount values provided. /// - /// Initial value to which to add the specified amount + /// Initial value to which to add the specified amount. /// The amount to add as a string. /// The resulting sum as a string. - [SKFunction, Description("Adds an amount to a value")] + [KernelFunction, Description("Adds an amount to a value")] + [return: Description("The sum")] public int Add( [Description("The value to add")] int value, [Description("Amount to add")] int amount) => value + amount; /// - /// Returns the Sum of two SKContext numbers provided. + /// Returns the subtraction result of initial and amount values provided. /// - /// Initial value from which to subtract the specified amount + /// Initial value from which to subtract the specified amount. /// The amount to subtract as a string. /// The resulting subtraction as a string. - [SKFunction, Description("Subtracts an amount from a value")] + [KernelFunction, Description("Subtracts an amount from a value")] + [return: Description("The difference")] public int Subtract( [Description("The value to subtract")] int value, [Description("Amount to subtract")] int amount) => diff --git a/dotnet/src/Plugins/Plugins.Core/Plugins.Core.csproj b/dotnet/src/Plugins/Plugins.Core/Plugins.Core.csproj index 04ac3a26b1d6..fc446022d6b6 100644 --- a/dotnet/src/Plugins/Plugins.Core/Plugins.Core.csproj +++ b/dotnet/src/Plugins/Plugins.Core/Plugins.Core.csproj @@ -5,6 +5,7 @@ Microsoft.SemanticKernel.Plugins.Core $(AssemblyName) netstandard2.0 + alpha diff --git a/dotnet/src/Plugins/Plugins.Core/PromptFunctionConstants.cs b/dotnet/src/Plugins/Plugins.Core/PromptFunctionConstants.cs new file mode 100644 index 000000000000..03c482283862 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.Core/PromptFunctionConstants.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Plugins.Core; + +internal static class PromptFunctionConstants +{ + internal const string SummarizeConversationDefinition = + @"BEGIN CONTENT TO SUMMARIZE: +{{$INPUT}} + +END CONTENT TO SUMMARIZE. + +Summarize the conversation in 'CONTENT TO SUMMARIZE', identifying main points of discussion and any conclusions that were reached. +Do not incorporate other general knowledge. +Summary is in plain text, in complete sentences, with no markup or tags. + +BEGIN SUMMARY: +"; + + internal const string GetConversationActionItemsDefinition = + @"You are an action item extractor. You will be given chat history and need to make note of action items mentioned in the chat. +Extract action items from the content if there are any. If there are no action, return nothing. If a single field is missing, use an empty string. +Return the action items in json. + +Possible statuses for action items are: Open, Closed, In Progress. + +EXAMPLE INPUT WITH ACTION ITEMS: + +John Doe said: ""I will record a demo for the new feature by Friday"" +I said: ""Great, thanks John. We may not use all of it but it's good to get it out there."" + +EXAMPLE OUTPUT: +{ + ""actionItems"": [ + { + ""owner"": ""John Doe"", + ""actionItem"": ""Record a demo for the new feature"", + ""dueDate"": ""Friday"", + ""status"": ""Open"", + ""notes"": """" + } + ] +} + +EXAMPLE INPUT WITHOUT ACTION ITEMS: + +John Doe said: ""Hey I'm going to the store, do you need anything?"" +I said: ""No thanks, I'm good."" + +EXAMPLE OUTPUT: +{ + ""action_items"": [] +} + +CONTENT STARTS HERE. + +{{$INPUT}} + +CONTENT STOPS HERE. + +OUTPUT:"; + + internal const string GetConversationTopicsDefinition = + @"Analyze the following extract taken from a conversation transcript and extract key topics. +- Topics only worth remembering. +- Be brief. Short phrases. +- Can use broken English. +- Conciseness is very important. +- Topics can include names of memories you want to recall. +- NO LONG SENTENCES. SHORT PHRASES. +- Return in JSON +[Input] +My name is Macbeth. I used to be King of Scotland, but I died. My wife's name is Lady Macbeth and we were married for 15 years. We had no children. Our beloved dog Toby McDuff was a famous hunter of rats in the forest. +My tragic story was immortalized by Shakespeare in a play. +[Output] +{ + ""topics"": [ + ""Macbeth"", + ""King of Scotland"", + ""Lady Macbeth"", + ""Dog"", + ""Toby McDuff"", + ""Shakespeare"", + ""Play"", + ""Tragedy"" + ] +} ++++++ +[Input] +{{$INPUT}} +[Output]"; +} diff --git a/dotnet/src/Plugins/Plugins.Core/SemanticFunctionConstants.cs b/dotnet/src/Plugins/Plugins.Core/SemanticFunctionConstants.cs deleted file mode 100644 index eaa18c28c93b..000000000000 --- a/dotnet/src/Plugins/Plugins.Core/SemanticFunctionConstants.cs +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Plugins.Core; - -internal static class SemanticFunctionConstants -{ - internal const string SummarizeConversationDefinition = - @"BEGIN CONTENT TO SUMMARIZE: -{{$INPUT}} - -END CONTENT TO SUMMARIZE. - -Summarize the conversation in 'CONTENT TO SUMMARIZE', identifying main points of discussion and any conclusions that were reached. -Do not incorporate other general knowledge. -Summary is in plain text, in complete sentences, with no markup or tags. - -BEGIN SUMMARY: -"; - - internal const string GetConversationActionItemsDefinition = - @"You are an action item extractor. You will be given chat history and need to make note of action items mentioned in the chat. -Extract action items from the content if there are any. If there are no action, return nothing. If a single field is missing, use an empty string. -Return the action items in json. - -Possible statuses for action items are: Open, Closed, In Progress. - -EXAMPLE INPUT WITH ACTION ITEMS: - -John Doe said: ""I will record a demo for the new feature by Friday"" -I said: ""Great, thanks John. We may not use all of it but it's good to get it out there."" - -EXAMPLE OUTPUT: -{ - ""actionItems"": [ - { - ""owner"": ""John Doe"", - ""actionItem"": ""Record a demo for the new feature"", - ""dueDate"": ""Friday"", - ""status"": ""Open"", - ""notes"": """" - } - ] -} - -EXAMPLE INPUT WITHOUT ACTION ITEMS: - -John Doe said: ""Hey I'm going to the store, do you need anything?"" -I said: ""No thanks, I'm good."" - -EXAMPLE OUTPUT: -{ - ""action_items"": [] -} - -CONTENT STARTS HERE. - -{{$INPUT}} - -CONTENT STOPS HERE. - -OUTPUT:"; - - internal const string GetConversationTopicsDefinition = - @"Analyze the following extract taken from a conversation transcript and extract key topics. -- Topics only worth remembering. -- Be brief. Short phrases. -- Can use broken English. -- Conciseness is very important. -- Topics can include names of memories you want to recall. -- NO LONG SENTENCES. SHORT PHRASES. -- Return in JSON -[Input] -My name is Macbeth. I used to be King of Scotland, but I died. My wife's name is Lady Macbeth and we were married for 15 years. We had no children. Our beloved dog Toby McDuff was a famous hunter of rats in the forest. -My tragic story was immortalized by Shakespeare in a play. -[Output] -{ - ""topics"": [ - ""Macbeth"", - ""King of Scotland"", - ""Lady Macbeth"", - ""Dog"", - ""Toby McDuff"", - ""Shakespeare"", - ""Play"", - ""Tragedy"" - ] -} -+++++ -[Input] -{{$INPUT}} -[Output]"; -} diff --git a/dotnet/src/Plugins/Plugins.Core/TextPlugin.cs b/dotnet/src/Plugins/Plugins.Core/TextPlugin.cs index 2ba5d1d5d2f6..c145a7e8bfa9 100644 --- a/dotnet/src/Plugins/Plugins.Core/TextPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Core/TextPlugin.cs @@ -8,107 +8,65 @@ namespace Microsoft.SemanticKernel.Plugins.Core; /// /// TextPlugin provides a set of functions to manipulate strings. /// -/// -/// Usage: kernel.ImportFunctions(new TextPlugin(), "text"); -/// -/// Examples: -/// SKContext.Variables["input"] = " hello world " -/// {{text.trim $input}} => "hello world" -/// {{text.trimStart $input} => "hello world " -/// {{text.trimEnd $input} => " hello world" -/// SKContext.Variables["input"] = "hello world" -/// {{text.uppercase $input}} => "HELLO WORLD" -/// SKContext.Variables["input"] = "HELLO WORLD" -/// {{text.lowercase $input}} => "hello world" -/// public sealed class TextPlugin { /// /// Trim whitespace from the start and end of a string. /// - /// - /// SKContext.Variables["input"] = " hello world " - /// {{text.trim $input}} => "hello world" - /// /// The string to trim. /// The trimmed string. - [SKFunction, Description("Trim whitespace from the start and end of a string.")] + [KernelFunction, Description("Trim whitespace from the start and end of a string.")] public string Trim(string input) => input.Trim(); /// /// Trim whitespace from the start of a string. /// - /// - /// SKContext.Variables["input"] = " hello world " - /// {{text.trimStart $input} => "hello world " - /// /// The string to trim. /// The trimmed string. - [SKFunction, Description("Trim whitespace from the start of a string.")] + [KernelFunction, Description("Trim whitespace from the start of a string.")] public string TrimStart(string input) => input.TrimStart(); /// /// Trim whitespace from the end of a string. /// - /// - /// SKContext.Variables["input"] = " hello world " - /// {{text.trimEnd $input} => " hello world" - /// /// The string to trim. /// The trimmed string. - [SKFunction, Description("Trim whitespace from the end of a string.")] + [KernelFunction, Description("Trim whitespace from the end of a string.")] public string TrimEnd(string input) => input.TrimEnd(); /// /// Convert a string to uppercase. /// - /// - /// SKContext.Variables["input"] = "hello world" - /// {{text.uppercase $input}} => "HELLO WORLD" - /// /// The string to convert. /// An object that supplies culture-specific casing rules. /// The converted string. - [SKFunction, Description("Convert a string to uppercase.")] + [KernelFunction, Description("Convert a string to uppercase.")] public string Uppercase(string input, CultureInfo? cultureInfo = null) => input.ToUpper(cultureInfo); /// /// Convert a string to lowercase. /// - /// - /// SKContext.Variables["input"] = "HELLO WORLD" - /// {{text.lowercase $input}} => "hello world" - /// /// The string to convert. /// An object that supplies culture-specific casing rules. /// The converted string. - [SKFunction, Description("Convert a string to lowercase.")] + [KernelFunction, Description("Convert a string to lowercase.")] public string Lowercase(string input, CultureInfo? cultureInfo = null) => input.ToLower(cultureInfo); /// /// Get the length of a string. Returns 0 if null or empty /// - /// - /// SKContext.Variables["input"] = "HELLO WORLD" - /// {{text.length $input}} => "11" - /// /// The string to get length. /// The length size of string (0) if null or empty. - [SKFunction, Description("Get the length of a string.")] + [KernelFunction, Description("Get the length of a string.")] public int Length(string input) => input?.Length ?? 0; /// /// Concatenate two strings into one /// - /// - /// text = "HELLO " - /// SKContext.Variables["input2"] = "WORLD" - /// Result: "HELLO WORLD" - /// /// First input to concatenate with /// Second input to concatenate with /// Concatenation result from both inputs. - [SKFunction, Description("Concat two strings into one.")] + [KernelFunction, Description("Concat two strings into one.")] public string Concat( [Description("First input to concatenate with")] string input, [Description("Second input to concatenate with")] string input2) => @@ -119,7 +77,7 @@ public string Concat( /// /// Input string to echo. /// The input string. - [SKFunction, Description("Echo the input string. Useful for capturing plan input for use in multiple functions.")] + [KernelFunction, Description("Echo the input string. Useful for capturing plan input for use in multiple functions.")] public string Echo( [Description("Input string to echo.")] string text) { diff --git a/dotnet/src/Plugins/Plugins.Core/TimePlugin.cs b/dotnet/src/Plugins/Plugins.Core/TimePlugin.cs index 4ec697ae7cee..94b46ad8d296 100644 --- a/dotnet/src/Plugins/Plugins.Core/TimePlugin.cs +++ b/dotnet/src/Plugins/Plugins.Core/TimePlugin.cs @@ -8,31 +8,6 @@ namespace Microsoft.SemanticKernel.Plugins.Core; /// /// TimePlugin provides a set of functions to get the current time and date. /// -/// -/// Usage: kernel.ImportFunctions(new TimePlugin(), "time"); -/// Examples: -/// {{time.date}} => Sunday, 12 January, 2031 -/// {{time.today}} => Sunday, 12 January, 2031 -/// {{time.now}} => Sunday, January 12, 2031 9:15 PM -/// {{time.utcNow}} => Sunday, January 13, 2031 5:15 AM -/// {{time.time}} => 09:15:07 PM -/// {{time.year}} => 2031 -/// {{time.month}} => January -/// {{time.monthNumber}} => 01 -/// {{time.day}} => 12 -/// {{time.dayOfMonth}} => 12 -/// {{time.dayOfWeek}} => Sunday -/// {{time.hour}} => 9 PM -/// {{time.hourNumber}} => 21 -/// {{time.daysAgo $days}} => Sunday, January 12, 2025 9:15 PM -/// {{time.lastMatchingDay $dayName}} => Sunday, 7 May, 2023 -/// {{time.minute}} => 15 -/// {{time.minutes}} => 15 -/// {{time.second}} => 7 -/// {{time.seconds}} => 7 -/// {{time.timeZoneOffset}} => -08:00 -/// {{time.timeZoneName}} => PST -/// /// /// Note: the time represents the time on the hw/vm/machine where the kernel is running. /// TODO: import and use user's timezone @@ -46,7 +21,7 @@ public sealed class TimePlugin /// {{time.date}} => Sunday, 12 January, 2031 /// /// The current date - [SKFunction, Description("Get the current date")] + [KernelFunction, Description("Get the current date")] public string Date(IFormatProvider? formatProvider = null) => // Example: Sunday, 12 January, 2025 DateTimeOffset.Now.ToString("D", formatProvider); @@ -58,7 +33,7 @@ public string Date(IFormatProvider? formatProvider = null) => /// {{time.today}} => Sunday, 12 January, 2031 /// /// The current date - [SKFunction, Description("Get the current date")] + [KernelFunction, Description("Get the current date")] public string Today(IFormatProvider? formatProvider = null) => // Example: Sunday, 12 January, 2025 this.Date(formatProvider); @@ -70,7 +45,7 @@ public string Today(IFormatProvider? formatProvider = null) => /// {{time.now}} => Sunday, January 12, 2025 9:15 PM /// /// The current date and time in the local time zone - [SKFunction, Description("Get the current date and time in the local time zone")] + [KernelFunction, Description("Get the current date and time in the local time zone")] public string Now(IFormatProvider? formatProvider = null) => // Sunday, January 12, 2025 9:15 PM DateTimeOffset.Now.ToString("f", formatProvider); @@ -82,7 +57,7 @@ public string Now(IFormatProvider? formatProvider = null) => /// {{time.utcNow}} => Sunday, January 13, 2025 5:15 AM /// /// The current UTC date and time - [SKFunction, Description("Get the current UTC date and time")] + [KernelFunction, Description("Get the current UTC date and time")] public string UtcNow(IFormatProvider? formatProvider = null) => // Sunday, January 13, 2025 5:15 AM DateTimeOffset.UtcNow.ToString("f", formatProvider); @@ -94,7 +69,7 @@ public string UtcNow(IFormatProvider? formatProvider = null) => /// {{time.time}} => 09:15:07 PM /// /// The current time - [SKFunction, Description("Get the current time")] + [KernelFunction, Description("Get the current time")] public string Time(IFormatProvider? formatProvider = null) => // Example: 09:15:07 PM DateTimeOffset.Now.ToString("hh:mm:ss tt", formatProvider); @@ -106,7 +81,7 @@ public string Time(IFormatProvider? formatProvider = null) => /// {{time.year}} => 2025 /// /// The current year - [SKFunction, Description("Get the current year")] + [KernelFunction, Description("Get the current year")] public string Year(IFormatProvider? formatProvider = null) => // Example: 2025 DateTimeOffset.Now.ToString("yyyy", formatProvider); @@ -118,7 +93,7 @@ public string Year(IFormatProvider? formatProvider = null) => /// {time.month}} => January /// /// The current month name - [SKFunction, Description("Get the current month name")] + [KernelFunction, Description("Get the current month name")] public string Month(IFormatProvider? formatProvider = null) => // Example: January DateTimeOffset.Now.ToString("MMMM", formatProvider); @@ -130,7 +105,7 @@ public string Month(IFormatProvider? formatProvider = null) => /// {{time.monthNumber}} => 01 /// /// The current month number - [SKFunction, Description("Get the current month number")] + [KernelFunction, Description("Get the current month number")] public string MonthNumber(IFormatProvider? formatProvider = null) => // Example: 01 DateTimeOffset.Now.ToString("MM", formatProvider); @@ -142,7 +117,7 @@ public string MonthNumber(IFormatProvider? formatProvider = null) => /// {{time.day}} => 12 /// /// The current day of the month - [SKFunction, Description("Get the current day of the month")] + [KernelFunction, Description("Get the current day of the month")] public string Day(IFormatProvider? formatProvider = null) => // Example: 12 DateTimeOffset.Now.ToString("dd", formatProvider); @@ -150,15 +125,11 @@ public string Day(IFormatProvider? formatProvider = null) => /// /// Get the date a provided number of days in the past /// - /// - /// SKContext.Variables["input"] = "3" - /// {{time.daysAgo}} => Sunday, January 12, 2025 9:15 PM - /// /// The date the provided number of days before today - [SKFunction] + [KernelFunction] [Description("Get the date offset by a provided number of days from today")] - public string DaysAgo([Description("The number of days to offset from today"), SKName("input")] double daysOffset, IFormatProvider? formatProvider = null) => - DateTimeOffset.Now.AddDays(-daysOffset).ToString("D", formatProvider); + public string DaysAgo([Description("The number of days to offset from today")] double input, IFormatProvider? formatProvider = null) => + DateTimeOffset.Now.AddDays(-input).ToString("D", formatProvider); /// /// Get the current day of the week @@ -167,7 +138,7 @@ public string DaysAgo([Description("The number of days to offset from today"), S /// {{time.dayOfWeek}} => Sunday /// /// The current day of the week - [SKFunction, Description("Get the current day of the week")] + [KernelFunction, Description("Get the current day of the week")] public string DayOfWeek(IFormatProvider? formatProvider = null) => // Example: Sunday DateTimeOffset.Now.ToString("dddd", formatProvider); @@ -179,7 +150,7 @@ public string DayOfWeek(IFormatProvider? formatProvider = null) => /// {{time.hour}} => 9 PM /// /// The current clock hour - [SKFunction, Description("Get the current clock hour")] + [KernelFunction, Description("Get the current clock hour")] public string Hour(IFormatProvider? formatProvider = null) => // Example: 9 PM DateTimeOffset.Now.ToString("h tt", formatProvider); @@ -191,7 +162,7 @@ public string Hour(IFormatProvider? formatProvider = null) => /// {{time.hourNumber}} => 21 /// /// The current clock 24-hour number - [SKFunction, Description("Get the current clock 24-hour number")] + [KernelFunction, Description("Get the current clock 24-hour number")] public string HourNumber(IFormatProvider? formatProvider = null) => // Example: 21 DateTimeOffset.Now.ToString("HH", formatProvider); @@ -204,10 +175,10 @@ public string HourNumber(IFormatProvider? formatProvider = null) => /// /// The date of the last instance of this day name /// dayName is not a recognized name of a day of the week - [SKFunction] + [KernelFunction] [Description("Get the date of the last day matching the supplied week day name in English. Example: Che giorno era 'Martedi' scorso -> dateMatchingLastDayName 'Tuesday' => Tuesday, 16 May, 2023")] public string DateMatchingLastDayName( - [Description("The day name to match"), SKName("input")] DayOfWeek dayName, + [Description("The day name to match")] DayOfWeek input, IFormatProvider? formatProvider = null) { DateTimeOffset dateTime = DateTimeOffset.Now; @@ -216,7 +187,7 @@ public string DateMatchingLastDayName( for (int i = 1; i <= 7; ++i) { dateTime = dateTime.AddDays(-1); - if (dateTime.DayOfWeek == dayName) + if (dateTime.DayOfWeek == input) { break; } @@ -232,7 +203,7 @@ public string DateMatchingLastDayName( /// {{time.minute}} => 15 /// /// The minutes on the current hour - [SKFunction, Description("Get the minutes on the current hour")] + [KernelFunction, Description("Get the minutes on the current hour")] public string Minute(IFormatProvider? formatProvider = null) => // Example: 15 DateTimeOffset.Now.ToString("mm", formatProvider); @@ -244,7 +215,7 @@ public string Minute(IFormatProvider? formatProvider = null) => /// {{time.second}} => 7 /// /// The seconds on the current minute - [SKFunction, Description("Get the seconds on the current minute")] + [KernelFunction, Description("Get the seconds on the current minute")] public string Second(IFormatProvider? formatProvider = null) => // Example: 07 DateTimeOffset.Now.ToString("ss", formatProvider); @@ -256,7 +227,7 @@ public string Second(IFormatProvider? formatProvider = null) => /// {{time.timeZoneOffset}} => -08:00 /// /// The local time zone offset from UTC - [SKFunction, Description("Get the local time zone offset from UTC")] + [KernelFunction, Description("Get the local time zone offset from UTC")] public string TimeZoneOffset(IFormatProvider? formatProvider = null) => // Example: -08:00 DateTimeOffset.Now.ToString("%K", formatProvider); @@ -271,7 +242,7 @@ public string TimeZoneOffset(IFormatProvider? formatProvider = null) => /// Note: this is the "current" timezone and it can change over the year, e.g. from PST to PDT /// /// The local time zone name - [SKFunction, Description("Get the local time zone name")] + [KernelFunction, Description("Get the local time zone name")] public string TimeZoneName() => // Example: PST // Note: this is the "current" timezone and it can change over the year, e.g. from PST to PDT diff --git a/dotnet/src/Plugins/Plugins.Core/WaitPlugin.cs b/dotnet/src/Plugins/Plugins.Core/WaitPlugin.cs index a5f10d4a19a6..2c11325d7fcf 100644 --- a/dotnet/src/Plugins/Plugins.Core/WaitPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Core/WaitPlugin.cs @@ -3,25 +3,27 @@ using System; using System.ComponentModel; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; namespace Microsoft.SemanticKernel.Plugins.Core; /// /// WaitPlugin provides a set of functions to wait before making the rest of operations. /// -/// -/// Usage: kernel.ImportFunctions(new WaitPlugin(), "wait"); -/// Examples: -/// {{wait.seconds 10}} => Wait 10 seconds -/// public sealed class WaitPlugin { private readonly TimeProvider _timeProvider; + /// + /// Initializes a new instance of the class. + /// + public WaitPlugin() : this(null) { } + /// /// Initializes a new instance of the class. /// /// An optional time provider. If not provided, a default time provider will be used. + [ActivatorUtilitiesConstructor] public WaitPlugin(TimeProvider? timeProvider = null) => this._timeProvider = timeProvider ?? TimeProvider.System; @@ -31,7 +33,7 @@ public WaitPlugin(TimeProvider? timeProvider = null) => /// /// {{wait.seconds 10}} (Wait 10 seconds) /// - [SKFunction, Description("Wait a given amount of seconds")] + [KernelFunction, Description("Wait a given amount of seconds")] public Task SecondsAsync([Description("The number of seconds to wait")] decimal seconds) => this._timeProvider.Delay(TimeSpan.FromSeconds((double)Math.Max(seconds, 0))); } diff --git a/dotnet/src/Plugins/Plugins.Document/AssemblyInfo.cs b/dotnet/src/Plugins/Plugins.Document/AssemblyInfo.cs new file mode 100644 index 000000000000..5fbb5f0788f9 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.Document/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0051")] diff --git a/dotnet/src/Plugins/Plugins.Document/DocumentPlugin.cs b/dotnet/src/Plugins/Plugins.Document/DocumentPlugin.cs index 4800c0ce67e6..8e8270fba328 100644 --- a/dotnet/src/Plugins/Plugins.Document/DocumentPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Document/DocumentPlugin.cs @@ -7,7 +7,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Plugins.Document.FileSystem; namespace Microsoft.SemanticKernel.Plugins.Document; @@ -38,17 +37,6 @@ namespace Microsoft.SemanticKernel.Plugins.Document; /// public sealed class DocumentPlugin { - /// - /// parameter names. - /// - public static class Parameters - { - /// - /// Document file path. - /// - public const string FilePath = "filePath"; - } - private readonly IDocumentConnector _documentConnector; private readonly IFileSystemConnector _fileSystemConnector; private readonly ILogger _logger; @@ -63,13 +51,13 @@ public DocumentPlugin(IDocumentConnector documentConnector, IFileSystemConnector { this._documentConnector = documentConnector ?? throw new ArgumentNullException(nameof(documentConnector)); this._fileSystemConnector = fileSystemConnector ?? throw new ArgumentNullException(nameof(fileSystemConnector)); - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(DocumentPlugin)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(DocumentPlugin)) ?? NullLogger.Instance; } /// - /// Read all text from a document, using as the file path. + /// Read all text from a document, using the filePath argument as the file path. /// - [SKFunction, Description("Read all text from a document")] + [KernelFunction, Description("Read all text from a document")] public async Task ReadTextAsync( [Description("Path to the file to read")] string filePath, CancellationToken cancellationToken = default) @@ -80,9 +68,9 @@ public async Task ReadTextAsync( } /// - /// Append the text in to a document. If the document doesn't exist, it will be created. + /// Append the text specified by the text argument to a document. If the document doesn't exist, it will be created. /// - [SKFunction, Description("Append text to a document. If the document doesn't exist, it will be created.")] + [KernelFunction, Description("Append text to a document. If the document doesn't exist, it will be created.")] public async Task AppendTextAsync( [Description("Text to append")] string text, [Description("Destination file path")] string filePath, diff --git a/dotnet/src/Plugins/Plugins.Document/OpenXml/Extensions/WordprocessingDocumentEx.cs b/dotnet/src/Plugins/Plugins.Document/OpenXml/Extensions/WordprocessingDocumentEx.cs index 81d8838532ea..0097bac47a4f 100644 --- a/dotnet/src/Plugins/Plugins.Document/OpenXml/Extensions/WordprocessingDocumentEx.cs +++ b/dotnet/src/Plugins/Plugins.Document/OpenXml/Extensions/WordprocessingDocumentEx.cs @@ -5,7 +5,7 @@ using DocumentFormat.OpenXml.Packaging; using DocumentFormat.OpenXml.Wordprocessing; -namespace Microsoft.SemanticKernel.Plugins.Document.OpenXml.Extensions; +namespace Microsoft.SemanticKernel.Plugins.Document.OpenXml; /// /// Extension methods for DocumentFormat.OpenXml.Packaging.WordprocessingDocument diff --git a/dotnet/src/Plugins/Plugins.Document/OpenXml/WordDocumentConnector.cs b/dotnet/src/Plugins/Plugins.Document/OpenXml/WordDocumentConnector.cs index 8d745536d915..143d6b2c396c 100644 --- a/dotnet/src/Plugins/Plugins.Document/OpenXml/WordDocumentConnector.cs +++ b/dotnet/src/Plugins/Plugins.Document/OpenXml/WordDocumentConnector.cs @@ -3,7 +3,6 @@ using System.IO; using DocumentFormat.OpenXml; using DocumentFormat.OpenXml.Packaging; -using Microsoft.SemanticKernel.Plugins.Document.OpenXml.Extensions; namespace Microsoft.SemanticKernel.Plugins.Document.OpenXml; diff --git a/dotnet/src/Plugins/Plugins.Document/Plugins.Document.csproj b/dotnet/src/Plugins/Plugins.Document/Plugins.Document.csproj index fdc9d46c39be..8ab3de7f1875 100644 --- a/dotnet/src/Plugins/Plugins.Document/Plugins.Document.csproj +++ b/dotnet/src/Plugins/Plugins.Document/Plugins.Document.csproj @@ -5,9 +5,11 @@ Microsoft.SemanticKernel.Plugins.Document $(AssemblyName) netstandard2.0 + alpha + diff --git a/dotnet/src/Plugins/Plugins.Memory/AssemblyInfo.cs b/dotnet/src/Plugins/Plugins.Memory/AssemblyInfo.cs new file mode 100644 index 000000000000..9defed7a14a8 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.Memory/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0052")] diff --git a/dotnet/src/Plugins/Plugins.Memory/Collections/MinHeap.cs b/dotnet/src/Plugins/Plugins.Memory/Collections/MinHeap.cs index 3726c5b0a76e..cf711e13c93d 100644 --- a/dotnet/src/Plugins/Plugins.Memory/Collections/MinHeap.cs +++ b/dotnet/src/Plugins/Plugins.Memory/Collections/MinHeap.cs @@ -3,9 +3,8 @@ using System; using System.Collections.Generic; using System.Diagnostics; -using Microsoft.SemanticKernel.Diagnostics; -namespace Microsoft.SemanticKernel.Plugins.Memory.Collections; +namespace Microsoft.SemanticKernel.Memory; /// /// Implements the classic 'heap' data structure. By default, the item with the lowest value is at the top of the heap. diff --git a/dotnet/src/Plugins/Plugins.Memory/Collections/ScoredValue.cs b/dotnet/src/Plugins/Plugins.Memory/Collections/ScoredValue.cs index ebdb360b69f9..b04cc0b2ff0d 100644 --- a/dotnet/src/Plugins/Plugins.Memory/Collections/ScoredValue.cs +++ b/dotnet/src/Plugins/Plugins.Memory/Collections/ScoredValue.cs @@ -2,8 +2,9 @@ using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; -namespace Microsoft.SemanticKernel.Plugins.Memory.Collections; +namespace Microsoft.SemanticKernel.Memory; /// /// Structure for storing data which can be scored. @@ -78,7 +79,7 @@ public static implicit operator ScoredValue(KeyValuePair src) } /// - public override bool Equals(object obj) + public override bool Equals([NotNullWhen(true)] object? obj) { return (obj is ScoredValue other) && this.Equals(other); } diff --git a/dotnet/src/Plugins/Plugins.Memory/Collections/TopNCollection.cs b/dotnet/src/Plugins/Plugins.Memory/Collections/TopNCollection.cs index 9a8b874cd2c8..04886b41a8f3 100644 --- a/dotnet/src/Plugins/Plugins.Memory/Collections/TopNCollection.cs +++ b/dotnet/src/Plugins/Plugins.Memory/Collections/TopNCollection.cs @@ -3,7 +3,7 @@ using System.Collections; using System.Collections.Generic; -namespace Microsoft.SemanticKernel.Plugins.Memory.Collections; +namespace Microsoft.SemanticKernel.Memory; /// /// A collector for Top N matches. Keeps only the best N matches by Score. diff --git a/dotnet/src/Plugins/Plugins.Memory/MemoryBuilder.cs b/dotnet/src/Plugins/Plugins.Memory/MemoryBuilder.cs deleted file mode 100644 index 646f4232cacb..000000000000 --- a/dotnet/src/Plugins/Plugins.Memory/MemoryBuilder.cs +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; - -namespace Microsoft.SemanticKernel.Plugins.Memory; - -/// -/// A builder for Memory plugin. -/// -public sealed class MemoryBuilder -{ - private Func? _memoryStoreFactory = null; - private Func? _embeddingGenerationFactory = null; - private IDelegatingHandlerFactory _httpHandlerFactory = NullHttpHandlerFactory.Instance; - private ILoggerFactory _loggerFactory = NullLoggerFactory.Instance; - - /// - /// Build a new instance of using the settings passed so far. - /// - /// Instance of . - public ISemanticTextMemory Build() - { - var memoryStore = this._memoryStoreFactory?.Invoke() ?? - throw new SKException($"{nameof(IMemoryStore)} dependency was not provided. Use {nameof(WithMemoryStore)} method."); - - var embeddingGeneration = this._embeddingGenerationFactory?.Invoke() ?? - throw new SKException($"{nameof(ITextEmbeddingGeneration)} dependency was not provided. Use {nameof(WithTextEmbeddingGeneration)} method."); - - return new SemanticTextMemory(memoryStore, embeddingGeneration); - } - - /// - /// Add a logger factory. - /// - /// The to use for logging. If null, no logging will be performed. - /// Updated Memory builder including the logger factory. - public MemoryBuilder WithLoggerFactory(ILoggerFactory loggerFactory) - { - Verify.NotNull(loggerFactory); - this._loggerFactory = loggerFactory; - return this; - } - - /// - /// Add a http handler factory. - /// - /// Http handler factory to add. - /// Updated Memory builder including the http handler factory. - public MemoryBuilder WithHttpHandlerFactory(IDelegatingHandlerFactory httpHandlerFactory) - { - Verify.NotNull(httpHandlerFactory); - this._httpHandlerFactory = httpHandlerFactory; - return this; - } - - /// - /// Add memory store. - /// - /// Store to add. - /// Updated Memory builder including the memory store. - public MemoryBuilder WithMemoryStore(IMemoryStore store) - { - Verify.NotNull(store); - this._memoryStoreFactory = () => store; - return this; - } - - /// - /// Add memory store factory. - /// - /// The store factory. - /// Updated Memory builder including the memory store. - public MemoryBuilder WithMemoryStore(Func factory) where TStore : IMemoryStore - { - Verify.NotNull(factory); - this._memoryStoreFactory = () => factory(this._loggerFactory); - return this; - } - - /// - /// Add memory store factory. - /// - /// The store factory. - /// Updated Memory builder including the memory store. - public MemoryBuilder WithMemoryStore(Func factory) where TStore : IMemoryStore - { - Verify.NotNull(factory); - this._memoryStoreFactory = () => factory(this._loggerFactory, this._httpHandlerFactory); - return this; - } - - /// - /// Add text embedding generation. - /// - /// The text embedding generation. - /// Updated Memory builder including the text embedding generation. - public MemoryBuilder WithTextEmbeddingGeneration(ITextEmbeddingGeneration textEmbeddingGeneration) - { - Verify.NotNull(textEmbeddingGeneration); - this._embeddingGenerationFactory = () => textEmbeddingGeneration; - return this; - } - - /// - /// Add text embedding generation. - /// - /// The text embedding generation factory. - /// Updated Memory builder including the text embedding generation. - public MemoryBuilder WithTextEmbeddingGeneration( - Func factory) where TEmbeddingGeneration : ITextEmbeddingGeneration - { - Verify.NotNull(factory); - this._embeddingGenerationFactory = () => factory(this._loggerFactory, this._httpHandlerFactory); - return this; - } -} diff --git a/dotnet/src/Plugins/Plugins.Memory/Plugins.Memory.csproj b/dotnet/src/Plugins/Plugins.Memory/Plugins.Memory.csproj index 0634d2b185bf..0ceee02fafc3 100644 --- a/dotnet/src/Plugins/Plugins.Memory/Plugins.Memory.csproj +++ b/dotnet/src/Plugins/Plugins.Memory/Plugins.Memory.csproj @@ -5,6 +5,7 @@ Microsoft.SemanticKernel.Plugins.Memory $(AssemblyName) netstandard2.0 + alpha @@ -22,7 +23,6 @@ - diff --git a/dotnet/src/Plugins/Plugins.Memory/TextMemoryPlugin.cs b/dotnet/src/Plugins/Plugins.Memory/TextMemoryPlugin.cs index c7e9a6ea229a..ec9257bf7cbe 100644 --- a/dotnet/src/Plugins/Plugins.Memory/TextMemoryPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Memory/TextMemoryPlugin.cs @@ -2,12 +2,13 @@ using System.Collections.Generic; using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text.Json; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.Memory; namespace Microsoft.SemanticKernel.Plugins.Memory; @@ -15,31 +16,30 @@ namespace Microsoft.SemanticKernel.Plugins.Memory; /// /// TextMemoryPlugin provides a plugin to save or recall information from the long or short term memory. /// -/// -/// Usage: kernel.ImportFunctions(new TextMemoryPlugin(), "memory"); -/// Examples: -/// SKContext.Variables["input"] = "what is the capital of France?" -/// {{memory.recall $input }} => "Paris" -/// +[Experimental("SKEXP0003")] public sealed class TextMemoryPlugin { /// - /// Name of the context variable used to specify which memory collection to use. + /// Name used to specify the input text. + /// + public const string InputParam = "input"; + /// + /// Name used to specify which memory collection to use. /// public const string CollectionParam = "collection"; /// - /// Name of the context variable used to specify memory search relevance score. + /// Name used to specify memory search relevance score. /// public const string RelevanceParam = "relevance"; /// - /// Name of the context variable used to specify a unique key associated with stored information. + /// Name used to specify a unique key associated with stored information. /// public const string KeyParam = "key"; /// - /// Name of the context variable used to specify the number of memories to recall + /// Name used to specify the number of memories to recall /// public const string LimitParam = "limit"; @@ -48,37 +48,38 @@ public sealed class TextMemoryPlugin private const int DefaultLimit = 1; private readonly ISemanticTextMemory _memory; + private readonly ILogger _logger; /// /// Creates a new instance of the TextMemoryPlugin /// - public TextMemoryPlugin(ISemanticTextMemory memory) + public TextMemoryPlugin( + ISemanticTextMemory memory, + ILoggerFactory? loggerFactory = null) { this._memory = memory; + this._logger = loggerFactory?.CreateLogger(typeof(TextMemoryPlugin)) ?? NullLogger.Instance; } /// /// Key-based lookup for a specific memory /// - /// Memories collection associated with the memory to retrieve /// The key associated with the memory to retrieve. - /// The to use for logging. If null, no logging will be performed. + /// Memories collection associated with the memory to retrieve /// The to monitor for cancellation requests. The default is . - /// - /// SKContext.Variables[TextMemoryPlugin.KeyParam] = "countryInfo1" - /// {{memory.retrieve }} - /// - [SKFunction, Description("Key-based lookup for a specific memory")] + [KernelFunction, Description("Key-based lookup for a specific memory")] public async Task RetrieveAsync( - [SKName(CollectionParam), Description("Memories collection associated with the memory to retrieve"), DefaultValue(DefaultCollection)] string? collection, - [SKName(KeyParam), Description("The key associated with the memory to retrieve")] string key, - ILoggerFactory? loggerFactory, + [Description("The key associated with the memory to retrieve")] string key, + [Description("Memories collection associated with the memory to retrieve")] string? collection = DefaultCollection, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(collection); Verify.NotNullOrWhiteSpace(key); - loggerFactory?.CreateLogger(typeof(TextMemoryPlugin)).LogDebug("Recalling memory with key '{0}' from collection '{1}'", key, collection); + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Recalling memory with key '{0}' from collection '{1}'", key, collection); + } var memory = await this._memory.GetAsync(collection, key, cancellationToken: cancellationToken).ConfigureAwait(false); @@ -88,32 +89,29 @@ public async Task RetrieveAsync( /// /// Semantic search and return up to N memories related to the input text /// - /// - /// SKContext.Variables["input"] = "what is the capital of France?" - /// {{memory.recall $input }} => "Paris" - /// /// The input text to find related memories for. /// Memories collection to search. /// The relevance score, from 0.0 to 1.0, where 1.0 means perfect match. /// The maximum number of relevant memories to recall. - /// The to use for logging. If null, no logging will be performed. /// The to monitor for cancellation requests. The default is . - [SKFunction, Description("Semantic search and return up to N memories related to the input text")] + [KernelFunction, Description("Semantic search and return up to N memories related to the input text")] public async Task RecallAsync( [Description("The input text to find related memories for")] string input, - [SKName(CollectionParam), Description("Memories collection to search"), DefaultValue(DefaultCollection)] string collection, - [SKName(RelevanceParam), Description("The relevance score, from 0.0 to 1.0, where 1.0 means perfect match"), DefaultValue(DefaultRelevance)] double? relevance, - [SKName(LimitParam), Description("The maximum number of relevant memories to recall"), DefaultValue(DefaultLimit)] int? limit, - ILoggerFactory? loggerFactory, + [Description("Memories collection to search")] string collection = DefaultCollection, + [Description("The relevance score, from 0.0 to 1.0, where 1.0 means perfect match")] double? relevance = DefaultRelevance, + [Description("The maximum number of relevant memories to recall")] int? limit = DefaultLimit, CancellationToken cancellationToken = default) { + Verify.NotNullOrWhiteSpace(input); Verify.NotNullOrWhiteSpace(collection); + relevance ??= DefaultRelevance; limit ??= DefaultLimit; - ILogger? logger = loggerFactory?.CreateLogger(typeof(TextMemoryPlugin)); - - logger?.LogDebug("Searching memories in collection '{0}', relevance '{1}'", collection, relevance); + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Searching memories in collection '{0}', relevance '{1}'", collection, relevance); + } // Search memory List memories = await this._memory @@ -123,39 +121,37 @@ public async Task RecallAsync( if (memories.Count == 0) { - logger?.LogWarning("Memories not found in collection: {0}", collection); + if (this._logger.IsEnabled(LogLevel.Warning)) + { + this._logger.LogWarning("Memories not found in collection: {0}", collection); + } return string.Empty; } - logger?.LogTrace("Done looking for memories in collection '{0}')", collection); return limit == 1 ? memories[0].Metadata.Text : JsonSerializer.Serialize(memories.Select(x => x.Metadata.Text)); } /// /// Save information to semantic memory /// - /// - /// SKContext.Variables["input"] = "the capital of France is Paris" - /// SKContext.Variables[TextMemoryPlugin.KeyParam] = "countryInfo1" - /// {{memory.save $input }} - /// /// The information to save - /// Memories collection associated with the information to save /// The key associated with the information to save - /// The to use for logging. If null, no logging will be performed. + /// Memories collection associated with the information to save /// The to monitor for cancellation requests. The default is . - [SKFunction, Description("Save information to semantic memory")] + [KernelFunction, Description("Save information to semantic memory")] public async Task SaveAsync( [Description("The information to save")] string input, - [SKName(CollectionParam), Description("Memories collection associated with the information to save"), DefaultValue(DefaultCollection)] string collection, - [SKName(KeyParam), Description("The key associated with the information to save")] string key, - ILoggerFactory? loggerFactory, + [Description("The key associated with the information to save")] string key, + [Description("Memories collection associated with the information to save")] string collection = DefaultCollection, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(collection); Verify.NotNullOrWhiteSpace(key); - loggerFactory?.CreateLogger(typeof(TextMemoryPlugin)).LogDebug("Saving memory to collection '{0}'", collection); + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Saving memory to collection '{0}'", collection); + } await this._memory.SaveInformationAsync(collection, text: input, id: key, cancellationToken: cancellationToken).ConfigureAwait(false); } @@ -163,25 +159,22 @@ public async Task SaveAsync( /// /// Remove specific memory /// - /// - /// SKContext.Variables[TextMemoryPlugin.KeyParam] = "countryInfo1" - /// {{memory.remove }} - /// - /// Memories collection associated with the information to save /// The key associated with the information to save - /// The to use for logging. If null, no logging will be performed. + /// Memories collection associated with the information to save /// The to monitor for cancellation requests. The default is . - [SKFunction, Description("Remove specific memory")] + [KernelFunction, Description("Remove specific memory")] public async Task RemoveAsync( - [SKName(CollectionParam), Description("Memories collection associated with the information to save"), DefaultValue(DefaultCollection)] string collection, - [SKName(KeyParam), Description("The key associated with the information to save")] string key, - ILoggerFactory? loggerFactory, + [Description("The key associated with the information to save")] string key, + [Description("Memories collection associated with the information to save")] string collection = DefaultCollection, CancellationToken cancellationToken = default) { Verify.NotNullOrWhiteSpace(collection); Verify.NotNullOrWhiteSpace(key); - loggerFactory?.CreateLogger(typeof(TextMemoryPlugin)).LogDebug("Removing memory from collection '{0}'", collection); + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Removing memory from collection '{0}'", collection); + } await this._memory.RemoveAsync(collection, key, cancellationToken: cancellationToken).ConfigureAwait(false); } diff --git a/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs b/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs index 0cb4ddba93b9..5dddcec51bf0 100644 --- a/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs +++ b/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs @@ -9,11 +9,8 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory.Collections; -namespace Microsoft.SemanticKernel.Plugins.Memory; +namespace Microsoft.SemanticKernel.Memory; /// /// A simple volatile memory embeddings store. @@ -46,7 +43,7 @@ public Task DeleteCollectionAsync(string collectionName, CancellationToken cance { if (!this._store.TryRemove(collectionName, out _)) { - return Task.FromException(new SKException($"Could not delete collection {collectionName}")); + return Task.FromException(new KernelException($"Could not delete collection {collectionName}")); } return Task.CompletedTask; @@ -65,7 +62,7 @@ public Task UpsertAsync(string collectionName, MemoryRecord record, Canc } else { - return Task.FromException(new SKException($"Attempted to access a memory collection that does not exist: {collectionName}")); + return Task.FromException(new KernelException($"Attempted to access a memory collection that does not exist: {collectionName}")); } return Task.FromResult(record.Key); @@ -120,7 +117,7 @@ public Task RemoveAsync(string collectionName, string key, CancellationToken can { if (this.TryGetCollection(collectionName, out var collectionDict)) { - collectionDict.TryRemove(key, out MemoryRecord _); + collectionDict.TryRemove(key, out _); } return Task.CompletedTask; diff --git a/dotnet/src/Plugins/Plugins.MsGraph/AssemblyInfo.cs b/dotnet/src/Plugins/Plugins.MsGraph/AssemblyInfo.cs new file mode 100644 index 000000000000..2e9f2af2242d --- /dev/null +++ b/dotnet/src/Plugins/Plugins.MsGraph/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0053")] diff --git a/dotnet/src/Plugins/Plugins.MsGraph/CalendarPlugin.cs b/dotnet/src/Plugins/Plugins.MsGraph/CalendarPlugin.cs index 314122196722..78d424d9690d 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/CalendarPlugin.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/CalendarPlugin.cs @@ -10,7 +10,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Plugins.MsGraph.Diagnostics; using Microsoft.SemanticKernel.Plugins.MsGraph.Models; @@ -21,47 +20,6 @@ namespace Microsoft.SemanticKernel.Plugins.MsGraph; /// public sealed class CalendarPlugin { - /// - /// parameter names. - /// - public static class Parameters - { - /// - /// Event start as DateTimeOffset. - /// - public const string Start = "start"; - - /// - /// Event end as DateTimeOffset. - /// - public const string End = "end"; - - /// - /// Event's location. - /// - public const string Location = "location"; - - /// - /// Event's content. - /// - public const string Content = "content"; - - /// - /// Event's attendees, separated by ',' or ';'. - /// - public const string Attendees = "attendees"; - - /// - /// The name of the top parameter used to limit the number of results returned in the response. - /// - public const string MaxResults = "maxResults"; - - /// - /// The name of the skip parameter used to skip a certain number of results in the response. - /// - public const string Skip = "skip"; - } - private readonly ICalendarConnector _connector; private readonly ILogger _logger; private static readonly JsonSerializerOptions s_options = new() @@ -69,6 +27,7 @@ public static class Parameters WriteIndented = false, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, }; + private static readonly char[] s_separator = { ',', ';' }; /// /// Initializes a new instance of the class. @@ -80,34 +39,34 @@ public CalendarPlugin(ICalendarConnector connector, ILoggerFactory? loggerFactor Ensure.NotNull(connector, nameof(connector)); this._connector = connector; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(CalendarPlugin)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(CalendarPlugin)) ?? NullLogger.Instance; } /// - /// Add an event to my calendar using as the subject. + /// Add an event to my calendar. /// - [SKFunction, Description("Add an event to my calendar.")] + [KernelFunction, Description("Add an event to my calendar.")] public async Task AddEventAsync( - [Description("Event subject"), SKName("input")] string subject, + [Description("Event subject")] string input, [Description("Event start date/time as DateTimeOffset")] DateTimeOffset start, [Description("Event end date/time as DateTimeOffset")] DateTimeOffset end, [Description("Event location (optional)")] string? location = null, [Description("Event content/body (optional)")] string? content = null, [Description("Event attendees, separated by ',' or ';'.")] string? attendees = null) { - if (string.IsNullOrWhiteSpace(subject)) + if (string.IsNullOrWhiteSpace(input)) { - throw new ArgumentException($"{nameof(subject)} variable was null or whitespace", nameof(subject)); + throw new ArgumentException($"{nameof(input)} variable was null or whitespace", nameof(input)); } CalendarEvent calendarEvent = new() { - Subject = subject, + Subject = input, Start = start, End = end, Location = location, Content = content, - Attendees = attendees is not null ? attendees.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries) : Enumerable.Empty(), + Attendees = attendees is not null ? attendees.Split(s_separator, StringSplitOptions.RemoveEmptyEntries) : Enumerable.Empty(), }; // Sensitive data, logging as trace, disabled by default @@ -118,7 +77,7 @@ public async Task AddEventAsync( /// /// Get calendar events with specified optional clauses used to query for messages. /// - [SKFunction, Description("Get calendar events.")] + [KernelFunction, Description("Get calendar events.")] public async Task GetCalendarEventsAsync( [Description("Optional limit of the number of events to retrieve.")] int? maxResults = 10, [Description("Optional number of events to skip before retrieving results.")] int? skip = 0, diff --git a/dotnet/src/Plugins/Plugins.MsGraph/CloudDrivePlugin.cs b/dotnet/src/Plugins/Plugins.MsGraph/CloudDrivePlugin.cs index f38aebf8519e..934a207ebb8e 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/CloudDrivePlugin.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/CloudDrivePlugin.cs @@ -7,7 +7,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Plugins.MsGraph.Diagnostics; namespace Microsoft.SemanticKernel.Plugins.MsGraph; @@ -17,17 +16,6 @@ namespace Microsoft.SemanticKernel.Plugins.MsGraph; /// public sealed class CloudDrivePlugin { - /// - /// parameter names. - /// - public static class Parameters - { - /// - /// Document file path. - /// - public const string DestinationPath = "destinationPath"; - } - private readonly ICloudDriveConnector _connector; private readonly ILogger _logger; @@ -41,7 +29,7 @@ public CloudDrivePlugin(ICloudDriveConnector connector, ILoggerFactory? loggerFa Ensure.NotNull(connector, nameof(connector)); this._connector = connector; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(CloudDrivePlugin)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(CloudDrivePlugin)) ?? NullLogger.Instance; } /// @@ -50,7 +38,7 @@ public CloudDrivePlugin(ICloudDriveConnector connector, ILoggerFactory? loggerFa /// The path to the file. /// A cancellation token to observe while waiting for the task to complete. /// A string containing the file content. - [SKFunction, Description("Get the contents of a file in a cloud drive.")] + [KernelFunction, Description("Get the contents of a file in a cloud drive.")] public async Task GetFileContentAsync( [Description("Path to file")] string filePath, CancellationToken cancellationToken = default) @@ -70,7 +58,7 @@ public async Task GetFileContentAsync( /// The path to the file. /// The remote path to store the file. /// A cancellation token to observe while waiting for the task to complete. - [SKFunction, Description("Upload a small file to OneDrive (less than 4MB).")] + [KernelFunction, Description("Upload a small file to OneDrive (less than 4MB).")] public async Task UploadFileAsync( [Description("Path to file")] string filePath, [Description("Remote path to store the file")] string destinationPath, @@ -93,7 +81,7 @@ public async Task UploadFileAsync( /// The path to the file. /// A cancellation token to observe while waiting for the task to complete. /// A string containing the sharable link. - [SKFunction, Description("Create a sharable link to a file stored in a cloud drive.")] + [KernelFunction, Description("Create a sharable link to a file stored in a cloud drive.")] public async Task CreateLinkAsync( [Description("Path to file")] string filePath, CancellationToken cancellationToken = default) diff --git a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/Client/MsGraphClientLoggingHandler.cs b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/Client/MsGraphClientLoggingHandler.cs index 88196b4a77ff..9efe68358de4 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/Client/MsGraphClientLoggingHandler.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/Client/MsGraphClientLoggingHandler.cs @@ -54,14 +54,14 @@ protected override async Task SendAsync(HttpRequestMessage request.Headers.Add(ClientRequestIdHeaderName, Guid.NewGuid().ToString()); this.LogHttpMessage(request.Headers, request.RequestUri, "REQUEST"); HttpResponseMessage response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); - this.LogHttpMessage(response.Headers, response.RequestMessage.RequestUri, "RESPONSE"); + this.LogHttpMessage(response.Headers, response.RequestMessage?.RequestUri, "RESPONSE"); return response; } /// /// Log the headers and URI of an HTTP message. /// - private void LogHttpMessage(HttpHeaders headers, Uri uri, string prefix) + private void LogHttpMessage(HttpHeaders headers, Uri? uri, string prefix) { if (this._logger.IsEnabled(LogLevel.Debug)) { @@ -69,7 +69,7 @@ private void LogHttpMessage(HttpHeaders headers, Uri uri, string prefix) message.AppendLine($"{prefix} {uri}"); foreach (string headerName in this._headerNamesToLog) { - if (headers.TryGetValues(headerName, out IEnumerable values)) + if (headers.TryGetValues(headerName, out IEnumerable? values)) { message.AppendLine($"{headerName}: {string.Join(", ", values)}"); } diff --git a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/CredentialManagers/LocalUserMSALCredentialManager.cs b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/CredentialManagers/LocalUserMSALCredentialManager.cs index 2031ee521dc2..c6d9f61ddf02 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/CredentialManagers/LocalUserMSALCredentialManager.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/CredentialManagers/LocalUserMSALCredentialManager.cs @@ -93,7 +93,7 @@ public async Task GetTokenAsync(string clientId, string tenantId, string { IPublicClientApplication newPublicApp = PublicClientApplicationBuilder.Create(clientId) .WithRedirectUri(redirectUri.ToString()) - .WithTenantId(tenantId) + .WithAuthority(AzureCloudInstance.AzurePublic, tenantId) .Build(); this._cacheHelper.RegisterCache(newPublicApp.UserTokenCache); return newPublicApp; diff --git a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/MicrosoftToDoConnector.cs b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/MicrosoftToDoConnector.cs index 615657388836..1c37d98dab7f 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/MicrosoftToDoConnector.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/MicrosoftToDoConnector.cs @@ -7,7 +7,6 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Graph; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Plugins.MsGraph.Connectors.Diagnostics; using Microsoft.SemanticKernel.Plugins.MsGraph.Models; using TaskStatus = Microsoft.Graph.TaskStatus; @@ -50,7 +49,7 @@ public MicrosoftToDoConnector(GraphServiceClient graphServiceClient) if (result == null) { - throw new SKException("Could not find default task list."); + throw new KernelException("Could not find default task list."); } return new TaskManagementTaskList(result.Id, result.DisplayName); diff --git a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/OneDriveConnector.cs b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/OneDriveConnector.cs index c9a086e3ca2a..ff2b541807e1 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/Connectors/OneDriveConnector.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/Connectors/OneDriveConnector.cs @@ -7,8 +7,8 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Graph; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Plugins.MsGraph.Connectors.Diagnostics; + namespace Microsoft.SemanticKernel.Plugins.MsGraph.Connectors; /// @@ -138,7 +138,7 @@ public async Task CreateShareLinkAsync(string filePath, string type = "v string? result = (await response.GetResponseObjectAsync().ConfigureAwait(false)).Link?.WebUrl; if (string.IsNullOrWhiteSpace(result)) { - throw new SKException("Shareable file link was null or whitespace."); + throw new KernelException("Shareable file link was null or whitespace."); } return result!; diff --git a/dotnet/src/Plugins/Plugins.MsGraph/Diagnostics/NullableAttributes.cs b/dotnet/src/Plugins/Plugins.MsGraph/Diagnostics/NullableAttributes.cs deleted file mode 100644 index 5ca5ef37dc19..000000000000 --- a/dotnet/src/Plugins/Plugins.MsGraph/Diagnostics/NullableAttributes.cs +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. -// See the LICENSE file in the project root for more information.// This was copied from https://github.com/dotnet/runtime/blob/39b9607807f29e48cae4652cd74735182b31182e/src/libraries/System.Private.CoreLib/src/System/Diagnostics/CodeAnalysis/NullableAttributes.cs -// and updated to have the scope of the attributes be internal. - -#pragma warning disable IDE0130 // Namespace does not match folder structure -// ReSharper disable once CheckNamespace -namespace System.Diagnostics.CodeAnalysis; -#pragma warning restore IDE0130 - -#if !NETCOREAPP - -/// Specifies that null is allowed as an input even if the corresponding type disallows it. -[AttributeUsage(AttributeTargets.Field | AttributeTargets.Parameter | AttributeTargets.Property, Inherited = false)] -internal sealed class AllowNullAttribute : Attribute -{ -} - -/// Specifies that null is disallowed as an input even if the corresponding type allows it. -[AttributeUsage(AttributeTargets.Field | AttributeTargets.Parameter | AttributeTargets.Property, Inherited = false)] -internal sealed class DisallowNullAttribute : Attribute -{ -} - -/// Specifies that an output may be null even if the corresponding type disallows it. -[AttributeUsage(AttributeTargets.Field | AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.ReturnValue, Inherited = false)] -internal sealed class MaybeNullAttribute : Attribute -{ -} - -/// Specifies that an output will not be null even if the corresponding type allows it. -[AttributeUsage(AttributeTargets.Field | AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.ReturnValue, Inherited = false)] -internal sealed class NotNullAttribute : Attribute -{ -} - -/// Specifies that when a method returns , the parameter may be null even if the corresponding type disallows it. -[AttributeUsage(AttributeTargets.Parameter, Inherited = false)] -internal sealed class MaybeNullWhenAttribute : Attribute -{ - /// Initializes the attribute with the specified return value condition. - /// - /// The return value condition. If the method returns this value, the associated parameter may be null. - /// - public MaybeNullWhenAttribute(bool returnValue) => this.ReturnValue = returnValue; - - /// Gets the return value condition. - public bool ReturnValue { get; } -} - -/// Specifies that when a method returns , the parameter will not be null even if the corresponding type allows it. -[AttributeUsage(AttributeTargets.Parameter, Inherited = false)] -internal sealed class NotNullWhenAttribute : Attribute -{ - /// Initializes the attribute with the specified return value condition. - /// - /// The return value condition. If the method returns this value, the associated parameter will not be null. - /// - public NotNullWhenAttribute(bool returnValue) => this.ReturnValue = returnValue; - - /// Gets the return value condition. - public bool ReturnValue { get; } -} - -/// Specifies that the output will be non-null if the named parameter is non-null. -[AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.ReturnValue, AllowMultiple = true, Inherited = false)] -internal sealed class NotNullIfNotNullAttribute : Attribute -{ - /// Initializes the attribute with the associated parameter name. - /// - /// The associated parameter name. The output will be non-null if the argument to the parameter specified is non-null. - /// - public NotNullIfNotNullAttribute(string parameterName) => this.ParameterName = parameterName; - - /// Gets the associated parameter name. - public string ParameterName { get; } -} - -/// Applied to a method that will never return under any circumstance. -[AttributeUsage(AttributeTargets.Method, Inherited = false)] -internal sealed class DoesNotReturnAttribute : Attribute -{ -} - -/// Specifies that the method will not return if the associated Boolean parameter is passed the specified value. -[AttributeUsage(AttributeTargets.Parameter, Inherited = false)] -internal sealed class DoesNotReturnIfAttribute : Attribute -{ - /// Initializes the attribute with the specified parameter value. - /// - /// The condition parameter value. Code after the method will be considered unreachable by diagnostics if the argument to - /// the associated parameter matches this value. - /// - public DoesNotReturnIfAttribute(bool parameterValue) => this.ParameterValue = parameterValue; - - /// Gets the condition parameter value. - public bool ParameterValue { get; } -} - -#endif - -#if !NETCOREAPP || NETCOREAPP3_1 - -/// Specifies that the method or property will ensure that the listed field and property members have not-null values. -[AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, Inherited = false, AllowMultiple = true)] -internal sealed class MemberNotNullAttribute : Attribute -{ - /// Initializes the attribute with a field or property member. - /// - /// The field or property member that is promised to be not-null. - /// - [SuppressMessage("Design", "CA1019:Define accessors for attribute arguments")] - public MemberNotNullAttribute(string member) => this.Members = new[] { member }; - - /// Initializes the attribute with the list of field and property members. - /// - /// The list of field and property members that are promised to be not-null. - /// - public MemberNotNullAttribute(params string[] members) => this.Members = members; - - /// Gets field or property member names. - public string[] Members { get; } -} - -/// Specifies that the method or property will ensure that the listed field and property members have not-null values when returning with the specified return value condition. -[AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, Inherited = false, AllowMultiple = true)] -internal sealed class MemberNotNullWhenAttribute : Attribute -{ - /// Initializes the attribute with the specified return value condition and a field or property member. - /// - /// The return value condition. If the method returns this value, the associated parameter will not be null. - /// - /// - /// The field or property member that is promised to be not-null. - /// - [SuppressMessage("Design", "CA1019:Define accessors for attribute arguments")] - public MemberNotNullWhenAttribute(bool returnValue, string member) - { - this.ReturnValue = returnValue; - this.Members = new[] { member }; - } - - /// Initializes the attribute with the specified return value condition and list of field and property members. - /// - /// The return value condition. If the method returns this value, the associated parameter will not be null. - /// - /// - /// The list of field and property members that are promised to be not-null. - /// - public MemberNotNullWhenAttribute(bool returnValue, params string[] members) - { - this.ReturnValue = returnValue; - this.Members = members; - } - - /// Gets the return value condition. - public bool ReturnValue { get; } - - /// Gets field or property member names. - public string[] Members { get; } -} - -#endif diff --git a/dotnet/src/Plugins/Plugins.MsGraph/EmailPlugin.cs b/dotnet/src/Plugins/Plugins.MsGraph/EmailPlugin.cs index d20351690cc6..4e502ae51278 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/EmailPlugin.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/EmailPlugin.cs @@ -9,7 +9,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Plugins.MsGraph.Diagnostics; using Microsoft.SemanticKernel.Plugins.MsGraph.Models; @@ -20,32 +19,6 @@ namespace Microsoft.SemanticKernel.Plugins.MsGraph; /// public sealed class EmailPlugin { - /// - /// parameter names. - /// - public static class Parameters - { - /// - /// Email recipients, separated by ',' or ';'. - /// - public const string Recipients = "recipients"; - - /// - /// Email subject. - /// - public const string Subject = "subject"; - - /// - /// The name of the top parameter used to limit the number of results returned in the response. - /// - public const string MaxResults = "maxResults"; - - /// - /// The name of the skip parameter used to skip a certain number of results in the response. - /// - public const string Skip = "skip"; - } - private readonly IEmailConnector _connector; private readonly ILogger _logger; private static readonly JsonSerializerOptions s_options = new() @@ -53,6 +26,7 @@ public static class Parameters WriteIndented = false, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, }; + private static readonly char[] s_separator = { ',', ';' }; /// /// Initializes a new instance of the class. @@ -64,20 +38,20 @@ public EmailPlugin(IEmailConnector connector, ILoggerFactory? loggerFactory = nu Ensure.NotNull(connector, nameof(connector)); this._connector = connector; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(EmailPlugin)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(EmailPlugin)) ?? NullLogger.Instance; } /// /// Get my email address. /// - [SKFunction, Description("Gets the email address for me.")] + [KernelFunction, Description("Gets the email address for me.")] public async Task GetMyEmailAddressAsync() => await this._connector.GetMyEmailAddressAsync().ConfigureAwait(false); /// - /// Send an email using as the body. + /// Send an email. /// - [SKFunction, Description("Send an email to one or more recipients.")] + [KernelFunction, Description("Send an email to one or more recipients.")] public async Task SendEmailAsync( [Description("Email content/body")] string content, [Description("Recipients of the email, separated by ',' or ';'.")] string recipients, @@ -96,14 +70,14 @@ public async Task SendEmailAsync( // Sensitive data, logging as trace, disabled by default this._logger.LogTrace("Sending email to '{0}' with subject '{1}'", recipients, subject); - string[] recipientList = recipients.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries); + string[] recipientList = recipients.Split(s_separator, StringSplitOptions.RemoveEmptyEntries); await this._connector.SendEmailAsync(subject, content, recipientList, cancellationToken).ConfigureAwait(false); } /// /// Get email messages with specified optional clauses used to query for messages. /// - [SKFunction, Description("Get email messages.")] + [KernelFunction, Description("Get email messages.")] public async Task GetEmailMessagesAsync( [Description("Optional limit of the number of message to retrieve.")] int? maxResults = 10, [Description("Optional number of message to skip before retrieving results.")] int? skip = 0, diff --git a/dotnet/src/Plugins/Plugins.MsGraph/OrganizationHierarchyPlugin.cs b/dotnet/src/Plugins/Plugins.MsGraph/OrganizationHierarchyPlugin.cs index 156212199fa0..a38274d3bd29 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/OrganizationHierarchyPlugin.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/OrganizationHierarchyPlugin.cs @@ -32,7 +32,7 @@ public OrganizationHierarchyPlugin(IOrganizationHierarchyConnector connector) /// /// An optional to observe while waiting for the task to complete. /// A JSON string containing the email addresses of the direct reports of the current user. - [SKFunction, Description("Get my direct report's email addresses.")] + [KernelFunction, Description("Get my direct report's email addresses.")] public async Task GetMyDirectReportsEmailAsync(CancellationToken cancellationToken = default) => JsonSerializer.Serialize(await this._connector.GetDirectReportsEmailAsync(cancellationToken).ConfigureAwait(false)); @@ -41,7 +41,7 @@ public async Task GetMyDirectReportsEmailAsync(CancellationToken cancell /// /// An optional to observe while waiting for the task to complete. /// A string containing the email address of the manager of the current user. - [SKFunction, Description("Get my manager's email address.")] + [KernelFunction, Description("Get my manager's email address.")] public async Task GetMyManagerEmailAsync(CancellationToken cancellationToken = default) => await this._connector.GetManagerEmailAsync(cancellationToken).ConfigureAwait(false); @@ -50,7 +50,7 @@ public async Task GetMyManagerEmailAsync(CancellationToken cancellationT /// /// An optional to observe while waiting for the task to complete. /// A string containing the name of the manager of the current user. - [SKFunction, Description("Get my manager's name.")] + [KernelFunction, Description("Get my manager's name.")] public async Task GetMyManagerNameAsync(CancellationToken cancellationToken = default) => await this._connector.GetManagerNameAsync(cancellationToken).ConfigureAwait(false); } diff --git a/dotnet/src/Plugins/Plugins.MsGraph/Plugins.MsGraph.csproj b/dotnet/src/Plugins/Plugins.MsGraph/Plugins.MsGraph.csproj index 52004dbd4c09..c77934124df6 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/Plugins.MsGraph.csproj +++ b/dotnet/src/Plugins/Plugins.MsGraph/Plugins.MsGraph.csproj @@ -5,9 +5,11 @@ Microsoft.SemanticKernel.Plugins.MsGraph $(AssemblyName) netstandard2.0 + alpha + diff --git a/dotnet/src/Plugins/Plugins.MsGraph/TaskListPlugin.cs b/dotnet/src/Plugins/Plugins.MsGraph/TaskListPlugin.cs index e530072de005..3a548ae80fca 100644 --- a/dotnet/src/Plugins/Plugins.MsGraph/TaskListPlugin.cs +++ b/dotnet/src/Plugins/Plugins.MsGraph/TaskListPlugin.cs @@ -8,7 +8,6 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Orchestration; using Microsoft.SemanticKernel.Plugins.MsGraph.Diagnostics; using Microsoft.SemanticKernel.Plugins.MsGraph.Models; @@ -19,22 +18,6 @@ namespace Microsoft.SemanticKernel.Plugins.MsGraph; /// public sealed class TaskListPlugin { - /// - /// parameter names. - /// - public static class Parameters - { - /// - /// Task reminder as DateTimeOffset. - /// - public const string Reminder = "reminder"; - - /// - /// Whether to include completed tasks. - /// - public const string IncludeCompleted = "includeCompleted"; - } - private readonly ITaskManagementConnector _connector; private readonly ILogger _logger; @@ -48,7 +31,7 @@ public TaskListPlugin(ITaskManagementConnector connector, ILoggerFactory? logger Ensure.NotNull(connector, nameof(connector)); this._connector = connector; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(TaskListPlugin)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(TaskListPlugin)) ?? NullLogger.Instance; } /// @@ -72,7 +55,7 @@ public static DateTimeOffset GetNextDayOfWeek(DayOfWeek dayOfWeek, TimeSpan time /// /// Add a task to a To-Do list with an optional reminder. /// - [SKFunction, Description("Add a task to a task list with an optional reminder.")] + [KernelFunction, Description("Add a task to a task list with an optional reminder.")] public async Task AddTaskAsync( [Description("Title of the task.")] string title, [Description("Reminder for the task in DateTimeOffset (optional)")] string? reminder = null, @@ -98,7 +81,7 @@ public async Task AddTaskAsync( /// /// Get tasks from the default task list. /// - [SKFunction, Description("Get tasks from the default task list.")] + [KernelFunction, Description("Get tasks from the default task list.")] public async Task GetDefaultTasksAsync( [Description("Whether to include completed tasks (optional)")] string includeCompleted = "false", CancellationToken cancellationToken = default) diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Core/FileIOPluginTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Core/FileIOPluginTests.cs index 45e8aa69c1ec..70b7aac4a4fc 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Core/FileIOPluginTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Core/FileIOPluginTests.cs @@ -21,14 +21,8 @@ public void ItCanBeInstantiated() [Fact] public void ItCanBeImported() { - // Arrange - var kernel = new KernelBuilder().Build(); - - // Act - var functions = kernel.ImportFunctions(new FileIOPlugin(), "fileIO"); - - // Assert no exception occurs e.g. due to reflection - Assert.NotNull(functions); + // Act - Assert no exception occurs e.g. due to reflection + Assert.NotNull(KernelPluginFactory.CreateFromType("fileIO")); } [Fact] @@ -37,7 +31,7 @@ public async Task ItCanReadAsync() // Arrange var plugin = new FileIOPlugin(); var path = Path.GetTempFileName(); - File.WriteAllText(path, "hello world"); + await File.WriteAllTextAsync(path, "hello world"); // Act var result = await plugin.ReadAsync(path); diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Core/HttpPluginTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Core/HttpPluginTests.cs index 6c49006dbd54..3ca7765db480 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Core/HttpPluginTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Core/HttpPluginTests.cs @@ -34,12 +34,8 @@ public void ItCanBeInstantiated() [Fact] public void ItCanBeImported() { - // Arrange - var kernel = KernelBuilder.Create(); - var plugin = new HttpPlugin(); - // Act - Assert no exception occurs e.g. due to reflection - kernel.ImportFunctions(plugin, "http"); + Assert.NotNull(KernelPluginFactory.CreateFromType("http")); } [Fact] diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Core/MathPluginTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Core/MathPluginTests.cs index 184c9890c500..a38a8c8b20a6 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Core/MathPluginTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Core/MathPluginTests.cs @@ -21,11 +21,8 @@ public void ItCanBeInstantiated() [Fact] public void ItCanBeImported() { - // Arrange - var kernel = new KernelBuilder().Build(); - // Act - Assert no exception occurs e.g. due to reflection - kernel.ImportFunctions(new MathPlugin(), "math"); + Assert.NotNull(KernelPluginFactory.CreateFromType("math")); } [Theory] @@ -43,7 +40,11 @@ public async Task AddWhenValidParametersShouldSucceedAsync(int initialValue, int var target = new MathPlugin(); // Act - var result = await FunctionHelpers.CallViaKernelAsync(target, "Add", ("input", initialValue), ("amount", amount)); + var result = await KernelPluginFactory.CreateFromObject(target)["Add"].InvokeAsync(new(), new() + { + ["value"] = initialValue, + ["amount"] = amount, + }); // Assert Assert.Equal(expectedResult, result.GetValue()); @@ -64,7 +65,11 @@ public async Task SubtractWhenValidParametersShouldSucceedAsync(int initialValue var target = new MathPlugin(); // Act - var result = await FunctionHelpers.CallViaKernelAsync(target, "Subtract", ("input", initialValue), ("amount", amount)); // Assert + var result = await KernelPluginFactory.CreateFromObject(target)["Subtract"].InvokeAsync(new(), new() + { + ["value"] = initialValue, + ["amount"] = amount, + }); // Assert Assert.Equal(expectedResult, result.GetValue()); @@ -85,10 +90,15 @@ public async Task SubtractWhenValidParametersShouldSucceedAsync(int initialValue public async Task AddWhenInvalidInitialValueShouldThrowAsync(string initialValue) { // Arrange - var target = new MathPlugin(); + KernelFunction func = KernelPluginFactory.CreateFromType()["Add"]; // Act - var ex = await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "Add", ("input", initialValue), ("amount", "1"))); + var ex = await Assert.ThrowsAsync(() => + func.InvokeAsync(new(), new() + { + ["value"] = initialValue, + ["amount"] = "1", + })); // Assert AssertExtensions.AssertIsArgumentOutOfRange(ex, "value", initialValue); @@ -109,11 +119,15 @@ public async Task AddWhenInvalidInitialValueShouldThrowAsync(string initialValue public async Task AddWhenInvalidAmountShouldThrowAsync(string amount) { // Arrange - var target = new MathPlugin(); + KernelFunction func = KernelPluginFactory.CreateFromType()["Add"]; // Act - var ex = await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "Add", ("input", "1"), ("amount", amount))); - + var ex = await Assert.ThrowsAsync(() => + func.InvokeAsync(new(), new() + { + ["value"] = "1", + ["amount"] = amount, + })); // Assert AssertExtensions.AssertIsArgumentOutOfRange(ex, "amount", amount); } @@ -133,10 +147,15 @@ public async Task AddWhenInvalidAmountShouldThrowAsync(string amount) public async Task SubtractWhenInvalidInitialValueShouldThrowAsync(string initialValue) { // Arrange - var target = new MathPlugin(); + KernelFunction func = KernelPluginFactory.CreateFromType()["Subtract"]; // Act - var ex = await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "Subtract", ("input", initialValue), ("amount", "1"))); + var ex = await Assert.ThrowsAsync(() => + func.InvokeAsync(new(), new() + { + ["value"] = initialValue, + ["amount"] = "1", + })); // Assert AssertExtensions.AssertIsArgumentOutOfRange(ex, "value", initialValue); @@ -157,10 +176,15 @@ public async Task SubtractWhenInvalidInitialValueShouldThrowAsync(string initial public async Task SubtractAsyncWhenInvalidAmountShouldThrowAsync(string amount) { // Arrange - var target = new MathPlugin(); + KernelFunction func = KernelPluginFactory.CreateFromType()["Subtract"]; // Act - var ex = await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "Subtract", ("input", "1"), ("amount", amount))); + var ex = await Assert.ThrowsAsync(() => + func.InvokeAsync(new(), new() + { + ["value"] = "1", + ["amount"] = amount, + })); // Assert AssertExtensions.AssertIsArgumentOutOfRange(ex, "amount", amount); diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Core/TextPluginTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Core/TextPluginTests.cs index 80d683fb95f4..e162750e2cf8 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Core/TextPluginTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Core/TextPluginTests.cs @@ -18,11 +18,8 @@ public void ItCanBeInstantiated() [Fact] public void ItCanBeImported() { - // Arrange - var kernel = new KernelBuilder().Build(); - // Act - Assert no exception occurs e.g. due to reflection - kernel.ImportFunctions(new TextPlugin(), "text"); + Assert.NotNull(KernelPluginFactory.CreateFromType("text")); } [Fact] @@ -98,13 +95,13 @@ public void ItCanLowercase() [InlineData("", 0)] [InlineData(" ", 1)] [InlineData(null, 0)] - public void ItCanLength(string textToLength, int expectedLength) + public void ItCanLength(string? textToLength, int expectedLength) { // Arrange var target = new TextPlugin(); // Act - var result = target.Length(textToLength); + var result = target.Length(textToLength ?? string.Empty); // Assert Assert.Equal(expectedLength, result); @@ -118,14 +115,14 @@ public void ItCanLength(string textToLength, int expectedLength) [InlineData("", "")] [InlineData(" ", " ")] [InlineData(null, "")] - public void ItCanConcat(string textToConcat, string text2ToConcat) + public void ItCanConcat(string? textToConcat, string text2ToConcat) { // Arrange var target = new TextPlugin(); var expected = string.Concat(textToConcat, text2ToConcat); // Act - string result = target.Concat(textToConcat, text2ToConcat); + string result = target.Concat(textToConcat ?? string.Empty, text2ToConcat); // Assert Assert.Equal(expected, result); diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Core/TimePluginTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Core/TimePluginTests.cs index f4b85f1b981d..3df8f6d2636e 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Core/TimePluginTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Core/TimePluginTests.cs @@ -24,11 +24,8 @@ public void ItCanBeInstantiated() [Fact] public void ItCanBeImported() { - // Arrange - var kernel = new KernelBuilder().Build(); - // Act - Assert no exception occurs e.g. due to reflection - kernel.ImportFunctions(new TimePlugin(), "time"); + Assert.NotNull(KernelPluginFactory.CreateFromType("time")); } [Fact] @@ -57,9 +54,9 @@ public void Day() [Fact] public async Task LastMatchingDayBadInputAsync() { - var plugin = new TimePlugin(); + KernelFunction func = KernelPluginFactory.CreateFromType()["DateMatchingLastDayName"]; - var ex = await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(plugin, "DateMatchingLastDayName", ("input", "not a day name"))); + var ex = await Assert.ThrowsAsync(() => func.InvokeAsync(new(), new() { ["input"] = "not a day name" })); AssertExtensions.AssertIsArgumentOutOfRange(ex, "input", "not a day name"); } diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Core/WaitPluginTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Core/WaitPluginTests.cs index a6d23b1fb427..f1002856b502 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Core/WaitPluginTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Core/WaitPluginTests.cs @@ -23,11 +23,8 @@ public void ItCanBeInstantiated() [Fact] public void ItCanBeImported() { - // Arrange - var kernel = new KernelBuilder().Build(); - // Act - Assert no exception occurs e.g. due to reflection - kernel.ImportFunctions(new WaitPlugin(), "wait"); + Assert.NotNull(KernelPluginFactory.CreateFromType("wait")); } [Theory] @@ -49,7 +46,7 @@ public async Task ItWaitSecondsWhenValidParametersSucceedAsync(string textSecond // Act and Assert long startingTime = timeProvider.GetTimestamp(); - Task wait = FunctionHelpers.CallViaKernelAsync(target, "Seconds", ("input", textSeconds)); + Task wait = KernelPluginFactory.CreateFromObject(target)["Seconds"].InvokeAsync(new(), new() { ["seconds"] = textSeconds }); if (expectedMilliseconds > 0) { @@ -78,10 +75,10 @@ public async Task ItWaitSecondsWhenValidParametersSucceedAsync(string textSecond public async Task ItWaitSecondsWhenInvalidParametersFailsAsync(string textSeconds) { // Arrange - var target = new WaitPlugin(); + KernelFunction func = KernelPluginFactory.CreateFromType()["Seconds"]; // Act - var ex = await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "Seconds", ("input", textSeconds))); + var ex = await Assert.ThrowsAsync(() => func.InvokeAsync(new(), new() { ["seconds"] = textSeconds })); // Assert AssertExtensions.AssertIsArgumentOutOfRange(ex, "seconds", textSeconds); diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Memory/MemoryBuilderTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Memory/MemoryBuilderTests.cs index 92aa2ca5dc07..bf849f66e222 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Memory/MemoryBuilderTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Memory/MemoryBuilderTests.cs @@ -1,15 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. +using System.Net.Http; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Embeddings; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory; using Moq; using Xunit; -namespace SemanticKernel.Plugins.UnitTests.Memory; +namespace SemanticKernel.UnitTests.Memory; /// /// Unit tests for class. @@ -23,7 +22,7 @@ public void ItThrowsExceptionWhenMemoryStoreIsNotProvided() var builder = new MemoryBuilder(); // Act - var exception = Assert.Throws(() => builder.Build()); + var exception = Assert.Throws(() => builder.Build()); // Assert Assert.Equal("IMemoryStore dependency was not provided. Use WithMemoryStore method.", exception.Message); @@ -37,10 +36,10 @@ public void ItThrowsExceptionWhenEmbeddingGenerationIsNotProvided() .WithMemoryStore(Mock.Of()); // Act - var exception = Assert.Throws(() => builder.Build()); + var exception = Assert.Throws(() => builder.Build()); // Assert - Assert.Equal("ITextEmbeddingGeneration dependency was not provided. Use WithTextEmbeddingGeneration method.", exception.Message); + Assert.Equal("ITextEmbeddingGenerationService dependency was not provided. Use WithTextEmbeddingGeneration method.", exception.Message); } [Fact] @@ -49,7 +48,7 @@ public void ItInitializesMemoryWhenRequiredDependenciesAreProvided() // Arrange var builder = new MemoryBuilder() .WithMemoryStore(Mock.Of()) - .WithTextEmbeddingGeneration(Mock.Of()); + .WithTextEmbeddingGeneration(Mock.Of()); // Act var memory = builder.Build(); @@ -75,39 +74,39 @@ public void ItUsesProvidedLoggerFactory() return Mock.Of(); }) - .WithTextEmbeddingGeneration((loggerFactory, httpHandlerFactory) => + .WithTextEmbeddingGeneration((loggerFactory, httpClient) => { Assert.Same(loggerFactoryUsed, loggerFactory); Assert.NotSame(loggerFactoryUnused, loggerFactory); - return Mock.Of(); + return Mock.Of(); }) .Build(); } [Fact] - public void ItUsesProvidedHttpHandlerFactory() + public void ItUsesProvidedHttpClientFactory() { // Arrange - var httpHandlerFactoryUsed = Mock.Of(); - var httpHandlerFactoryUnused = Mock.Of(); + using var httpClientUsed = new HttpClient(); + using var httpClientUnused = new HttpClient(); // Act & Assert var builder = new MemoryBuilder() - .WithHttpHandlerFactory(httpHandlerFactoryUsed) - .WithMemoryStore((loggerFactory, httpHandlerFactory) => + .WithHttpClient(httpClientUsed) + .WithMemoryStore((loggerFactory, httpClient) => { - Assert.Same(httpHandlerFactoryUsed, httpHandlerFactory); - Assert.NotSame(httpHandlerFactoryUnused, httpHandlerFactory); + Assert.Same(httpClientUsed, httpClient); + Assert.NotSame(httpClientUnused, httpClient); return Mock.Of(); }) - .WithTextEmbeddingGeneration((loggerFactory, httpHandlerFactory) => + .WithTextEmbeddingGeneration((loggerFactory, httpClient) => { - Assert.Same(httpHandlerFactoryUsed, httpHandlerFactory); - Assert.NotSame(httpHandlerFactoryUnused, httpHandlerFactory); + Assert.Same(httpClientUsed, httpClient); + Assert.NotSame(httpClientUnused, httpClient); - return Mock.Of(); + return Mock.Of(); }) .Build(); } diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Memory/VolatileMemoryStoreTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Memory/VolatileMemoryStoreTests.cs index 4fb078a6db79..0c50a7add840 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Memory/VolatileMemoryStoreTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Memory/VolatileMemoryStoreTests.cs @@ -5,12 +5,11 @@ using System.Collections.Immutable; using System.Linq; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Plugins.Memory; using Xunit; -namespace SemanticKernel.Plugins.UnitTests.Memory; +namespace SemanticKernel.UnitTests.Memory; public class VolatileMemoryStoreTests { @@ -100,7 +99,7 @@ public async Task ItCannotInsertIntoNonExistentCollectionAsync() this._collectionNum++; // Assert - await Assert.ThrowsAsync(async () => await this._db.UpsertAsync(collection, testRecord)); + await Assert.ThrowsAsync(async () => await this._db.UpsertAsync(collection, testRecord)); } [Fact] @@ -587,6 +586,6 @@ public async Task ItThrowsWhenDeletingNonExistentCollectionAsync() this._collectionNum++; // Act - await Assert.ThrowsAsync(() => this._db.DeleteCollectionAsync(collection)); + await Assert.ThrowsAsync(() => this._db.DeleteCollectionAsync(collection)); } } diff --git a/dotnet/src/Plugins/Plugins.UnitTests/MsGraph/CalendarPluginTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/MsGraph/CalendarPluginTests.cs index 573e6c6c69a9..05e31967b40d 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/MsGraph/CalendarPluginTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/MsGraph/CalendarPluginTests.cs @@ -1,14 +1,12 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.Globalization; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Plugins.MsGraph; using Microsoft.SemanticKernel.Plugins.MsGraph.Models; using Moq; -using SemanticKernel.UnitTests; using Xunit; namespace SemanticKernel.Plugins.UnitTests.MsGraph; @@ -40,13 +38,15 @@ public async Task AddEventAsyncSucceedsAsync() CalendarPlugin target = new(connectorMock.Object); // Act - var context = await FunctionHelpers.CallViaKernelAsync(target, "AddEvent", - ("input", anySubject), - ("start", anyStartTime.ToString(CultureInfo.InvariantCulture)), - ("end", anyEndTime.ToString(CultureInfo.InvariantCulture)), - ("location", anyLocation), - ("content", anyContent), - ("attendees", string.Join(";", anyAttendees))); + var context = await KernelPluginFactory.CreateFromObject(target)["AddEvent"].InvokeAsync(new(), new() + { + ["input"] = anySubject, + ["start"] = anyStartTime, + ["end"] = anyEndTime, + ["location"] = anyLocation, + ["content"] = anyContent, + ["attendees"] = string.Join(";", anyAttendees) + }); // Assert connectorMock.VerifyAll(); @@ -78,12 +78,14 @@ public async Task AddEventAsyncWithoutLocationSucceedsAsync() CalendarPlugin target = new(connectorMock.Object); // Act - var context = await FunctionHelpers.CallViaKernelAsync(target, "AddEvent", - ("input", anySubject), - ("start", anyStartTime.ToString(CultureInfo.InvariantCulture)), - ("end", anyEndTime.ToString(CultureInfo.InvariantCulture)), - ("content", anyContent), - ("attendees", string.Join(";", anyAttendees))); + var context = await KernelPluginFactory.CreateFromObject(target)["AddEvent"].InvokeAsync(new(), new() + { + ["input"] = anySubject, + ["start"] = anyStartTime, + ["end"] = anyEndTime, + ["content"] = anyContent, + ["attendees"] = string.Join(";", anyAttendees), + }); // Assert connectorMock.VerifyAll(); @@ -115,12 +117,14 @@ public async Task AddEventAsyncWithoutContentSucceedsAsync() CalendarPlugin target = new(connectorMock.Object); // Act - var context = await FunctionHelpers.CallViaKernelAsync(target, "AddEvent", - ("input", anySubject), - ("start", anyStartTime.ToString(CultureInfo.InvariantCulture)), - ("end", anyEndTime.ToString(CultureInfo.InvariantCulture)), - ("location", anyLocation), - ("attendees", string.Join(";", anyAttendees))); + var context = await KernelPluginFactory.CreateFromObject(target)["AddEvent"].InvokeAsync(new(), new() + { + ["input"] = anySubject, + ["start"] = anyStartTime, + ["end"] = anyEndTime, + ["location"] = anyLocation, + ["attendees"] = string.Join(";", anyAttendees), + }); // Assert connectorMock.VerifyAll(); @@ -152,12 +156,14 @@ public async Task AddEventAsyncWithoutAttendeesSucceedsAsync() CalendarPlugin target = new(connectorMock.Object); // Act - var context = await FunctionHelpers.CallViaKernelAsync(target, "AddEvent", - ("input", anySubject), - ("start", anyStartTime.ToString(CultureInfo.InvariantCulture)), - ("end", anyEndTime.ToString(CultureInfo.InvariantCulture)), - ("location", anyLocation), - ("content", anyContent)); + var context = await KernelPluginFactory.CreateFromObject(target)["AddEvent"].InvokeAsync(new(), new() + { + ["input"] = anySubject, + ["start"] = anyStartTime, + ["end"] = anyEndTime, + ["location"] = anyLocation, + ["attendees"] = anyContent, + }); // Assert connectorMock.VerifyAll(); @@ -178,13 +184,14 @@ public async Task AddEventAsyncWithoutStartFailsAsync() CalendarPlugin target = new(connectorMock.Object); // Act and Assert - await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "AddEvent", - ("input", anySubject), - ("end", anyEndTime.ToString(CultureInfo.InvariantCulture)), - ("location", anyLocation), - ("content", anyContent), - ("attendees", string.Join(";", anyAttendees))) - ); + await Assert.ThrowsAsync(() => KernelPluginFactory.CreateFromObject(target)["AddEvent"].InvokeAsync(new(), new() + { + ["input"] = anySubject, + ["end"] = anyEndTime, + ["location"] = anyLocation, + ["content"] = anyContent, + ["attendees"] = string.Join(";", anyAttendees), + })); } [Fact] @@ -202,13 +209,14 @@ public async Task AddEventAsyncWithoutEndFailsAsync() CalendarPlugin target = new(connectorMock.Object); // Act - await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "AddEvent", - ("input", anySubject), - ("start", anyStartTime.ToString(CultureInfo.InvariantCulture)), - ("location", anyLocation), - ("content", anyContent), - ("attendees", string.Join(";", anyAttendees))) - ); + await Assert.ThrowsAsync(() => KernelPluginFactory.CreateFromObject(target)["AddEvent"].InvokeAsync(new(), new() + { + ["input"] = anySubject, + ["start"] = anyStartTime, + ["location"] = anyLocation, + ["content"] = anyContent, + ["attendees"] = string.Join(";", anyAttendees), + })); } [Fact] @@ -226,13 +234,14 @@ public async Task AddEventAsyncWithoutSubjectFailsAsync() CalendarPlugin target = new(connectorMock.Object); // Act & Assert - var ex = await Assert.ThrowsAsync(() => FunctionHelpers.CallViaKernelAsync(target, "AddEvent", - ("start", anyStartTime.ToString(CultureInfo.InvariantCulture)), - ("end", anyEndTime.ToString(CultureInfo.InvariantCulture)), - ("location", anyLocation), - ("content", anyContent), - ("attendees", string.Join(";", anyAttendees))) - ); + var ex = await Assert.ThrowsAsync(() => KernelPluginFactory.CreateFromObject(target)["AddEvent"].InvokeAsync(new(), new() + { + ["start"] = anyStartTime, + ["end"] = anyEndTime, + ["location"] = anyLocation, + ["content"] = anyContent, + ["attendees"] = string.Join(";", anyAttendees), + })); Assert.True(ex.InnerException is ArgumentException); Assert.Equal("input", ((ArgumentException)ex.InnerException).ParamName); diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Plugins.UnitTests.csproj b/dotnet/src/Plugins/Plugins.UnitTests/Plugins.UnitTests.csproj index ef6f62b2800a..60d06162de6f 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Plugins.UnitTests.csproj +++ b/dotnet/src/Plugins/Plugins.UnitTests/Plugins.UnitTests.csproj @@ -9,7 +9,7 @@ enable disable false - CA2007,VSTHRD111 + CA2007,VSTHRD111,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0050,SKEXP0051,SKEXP0052,SKEXP0053,SKEXP0054 @@ -22,6 +22,10 @@ runtime; build; native; contentfiles; analyzers; buildtransitive all + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + diff --git a/dotnet/src/Plugins/Plugins.UnitTests/Web/SearchUrlSkillTests.cs b/dotnet/src/Plugins/Plugins.UnitTests/Web/SearchUrlSkillTests.cs index 2e74c54d48e5..ac0bf4d48796 100644 --- a/dotnet/src/Plugins/Plugins.UnitTests/Web/SearchUrlSkillTests.cs +++ b/dotnet/src/Plugins/Plugins.UnitTests/Web/SearchUrlSkillTests.cs @@ -22,11 +22,8 @@ public void ItCanBeInstantiated() [Fact] public void ItCanBeImported() { - // Arrange - IKernel kernel = new KernelBuilder().Build(); - // Act - Assert no exception occurs e.g. due to reflection - kernel.ImportFunctions(new SearchUrlPlugin(), "search"); + Assert.NotNull(KernelPluginFactory.CreateFromType("search")); } [Fact] diff --git a/dotnet/src/Plugins/Plugins.Web/AssemblyInfo.cs b/dotnet/src/Plugins/Plugins.Web/AssemblyInfo.cs new file mode 100644 index 000000000000..7435b9a52159 --- /dev/null +++ b/dotnet/src/Plugins/Plugins.Web/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0054")] diff --git a/dotnet/src/Plugins/Plugins.Web/Bing/BingConnector.cs b/dotnet/src/Plugins/Plugins.Web/Bing/BingConnector.cs index fb8fddd4ecdd..7f26c5a17bef 100644 --- a/dotnet/src/Plugins/Plugins.Web/Bing/BingConnector.cs +++ b/dotnet/src/Plugins/Plugins.Web/Bing/BingConnector.cs @@ -11,7 +11,7 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Http; namespace Microsoft.SemanticKernel.Plugins.Web.Bing; @@ -30,7 +30,7 @@ public sealed class BingConnector : IWebSearchEngineConnector /// The API key to authenticate the connector. /// The to use for logging. If null, no logging will be performed. public BingConnector(string apiKey, ILoggerFactory? loggerFactory = null) : - this(apiKey, new HttpClient(NonDisposableHttpClientHandler.Instance, false), loggerFactory) + this(apiKey, HttpClientProvider.GetHttpClient(), loggerFactory) { } @@ -45,9 +45,9 @@ public BingConnector(string apiKey, HttpClient httpClient, ILoggerFactory? logge Verify.NotNull(httpClient); this._apiKey = apiKey; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(BingConnector)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(BingConnector)) ?? NullLogger.Instance; this._httpClient = httpClient; - this._httpClient.DefaultRequestHeaders.Add("User-Agent", Telemetry.HttpUserAgent); + this._httpClient.DefaultRequestHeaders.Add("User-Agent", HttpHeaderValues.UserAgent); } /// diff --git a/dotnet/src/Plugins/Plugins.Web/Google/GoogleConnector.cs b/dotnet/src/Plugins/Plugins.Web/Google/GoogleConnector.cs index 2720b3c51838..9e558459f238 100644 --- a/dotnet/src/Plugins/Plugins.Web/Google/GoogleConnector.cs +++ b/dotnet/src/Plugins/Plugins.Web/Google/GoogleConnector.cs @@ -9,7 +9,6 @@ using Google.Apis.Services; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; namespace Microsoft.SemanticKernel.Plugins.Web.Google; @@ -53,7 +52,7 @@ public GoogleConnector( this._search = new CustomSearchAPIService(initializer); this._searchEngineId = searchEngineId; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(GoogleConnector)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(GoogleConnector)) ?? NullLogger.Instance; } /// diff --git a/dotnet/src/Plugins/Plugins.Web/Plugins.Web.csproj b/dotnet/src/Plugins/Plugins.Web/Plugins.Web.csproj index d8154e524997..f450f8fabb14 100644 --- a/dotnet/src/Plugins/Plugins.Web/Plugins.Web.csproj +++ b/dotnet/src/Plugins/Plugins.Web/Plugins.Web.csproj @@ -5,6 +5,7 @@ Microsoft.SemanticKernel.Plugins.Web $(AssemblyName) netstandard2.0 + alpha diff --git a/dotnet/src/Plugins/Plugins.Web/SearchUrlPlugin.cs b/dotnet/src/Plugins/Plugins.Web/SearchUrlPlugin.cs index c7e2abd64e93..e1e5155d7a09 100644 --- a/dotnet/src/Plugins/Plugins.Web/SearchUrlPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Web/SearchUrlPlugin.cs @@ -18,7 +18,7 @@ public sealed class SearchUrlPlugin /// /// Get search URL for Amazon /// - [SKFunction, Description("Return URL for Amazon search query")] + [KernelFunction, Description("Return URL for Amazon search query")] public string AmazonSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -31,7 +31,7 @@ public string AmazonSearchUrl([Description("Text to search for")] string query) /// /// Get search URL for Bing /// - [SKFunction, Description("Return URL for Bing search query.")] + [KernelFunction, Description("Return URL for Bing search query.")] public string BingSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -41,7 +41,7 @@ public string BingSearchUrl([Description("Text to search for")] string query) /// /// Get search URL for Bing Images /// - [SKFunction, Description("Return URL for Bing Images search query.")] + [KernelFunction, Description("Return URL for Bing Images search query.")] public string BingImagesSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -51,7 +51,7 @@ public string BingImagesSearchUrl([Description("Text to search for")] string que /// /// Get search URL for Bing Maps /// - [SKFunction, Description("Return URL for Bing Maps search query.")] + [KernelFunction, Description("Return URL for Bing Maps search query.")] public string BingMapsSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -61,7 +61,7 @@ public string BingMapsSearchUrl([Description("Text to search for")] string query /// /// Get search URL for Bing Shopping /// - [SKFunction, Description("Return URL for Bing Shopping search query.")] + [KernelFunction, Description("Return URL for Bing Shopping search query.")] public string BingShoppingSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -71,7 +71,7 @@ public string BingShoppingSearchUrl([Description("Text to search for")] string q /// /// Get search URL for Bing News /// - [SKFunction, Description("Return URL for Bing News search query.")] + [KernelFunction, Description("Return URL for Bing News search query.")] public string BingNewsSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -81,7 +81,7 @@ public string BingNewsSearchUrl([Description("Text to search for")] string query /// /// Get search URL for Bing Travel /// - [SKFunction, Description("Return URL for Bing Travel search query.")] + [KernelFunction, Description("Return URL for Bing Travel search query.")] public string BingTravelSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -94,7 +94,7 @@ public string BingTravelSearchUrl([Description("Text to search for")] string que /// /// Get search URL for Facebook /// - [SKFunction, Description("Return URL for Facebook search query.")] + [KernelFunction, Description("Return URL for Facebook search query.")] public string FacebookSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -107,7 +107,7 @@ public string FacebookSearchUrl([Description("Text to search for")] string query /// /// Get search URL for GitHub /// - [SKFunction, Description("Return URL for GitHub search query.")] + [KernelFunction, Description("Return URL for GitHub search query.")] public string GitHubSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -120,7 +120,7 @@ public string GitHubSearchUrl([Description("Text to search for")] string query) /// /// Get search URL for LinkedIn /// - [SKFunction, Description("Return URL for LinkedIn search query.")] + [KernelFunction, Description("Return URL for LinkedIn search query.")] public string LinkedInSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -133,7 +133,7 @@ public string LinkedInSearchUrl([Description("Text to search for")] string query /// /// Get search URL for Twitter /// - [SKFunction, Description("Return URL for Twitter search query.")] + [KernelFunction, Description("Return URL for Twitter search query.")] public string TwitterSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); @@ -146,7 +146,7 @@ public string TwitterSearchUrl([Description("Text to search for")] string query) /// /// Get search URL for Wikipedia /// - [SKFunction, Description("Return URL for Wikipedia search query.")] + [KernelFunction, Description("Return URL for Wikipedia search query.")] public string WikipediaSearchUrl([Description("Text to search for")] string query) { string encoded = UrlEncoder.Default.Encode(query); diff --git a/dotnet/src/Plugins/Plugins.Web/WebFileDownloadPlugin.cs b/dotnet/src/Plugins/Plugins.Web/WebFileDownloadPlugin.cs index a231b9e3ec6b..a1bc16fa7182 100644 --- a/dotnet/src/Plugins/Plugins.Web/WebFileDownloadPlugin.cs +++ b/dotnet/src/Plugins/Plugins.Web/WebFileDownloadPlugin.cs @@ -9,6 +9,7 @@ using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Http; namespace Microsoft.SemanticKernel.Plugins.Web; @@ -30,7 +31,7 @@ public sealed class WebFileDownloadPlugin /// /// The to use for logging. If null, no logging will be performed. public WebFileDownloadPlugin(ILoggerFactory? loggerFactory = null) : - this(new HttpClient(NonDisposableHttpClientHandler.Instance, false), loggerFactory) + this(HttpClientProvider.GetHttpClient(), loggerFactory) { } @@ -42,7 +43,7 @@ public WebFileDownloadPlugin(ILoggerFactory? loggerFactory = null) : public WebFileDownloadPlugin(HttpClient httpClient, ILoggerFactory? loggerFactory = null) { this._httpClient = httpClient; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(WebFileDownloadPlugin)) : NullLogger.Instance; + this._logger = loggerFactory?.CreateLogger(typeof(WebFileDownloadPlugin)) ?? NullLogger.Instance; } /// @@ -53,7 +54,7 @@ public WebFileDownloadPlugin(HttpClient httpClient, ILoggerFactory? loggerFactor /// The token to use to request cancellation. /// Task. /// Thrown when the location where to download the file is not provided - [SKFunction, Description("Downloads a file to local storage")] + [KernelFunction, Description("Downloads a file to local storage")] public async Task DownloadToFileAsync( [Description("URL of file to download")] Uri url, [Description("Path where to save file locally")] string filePath, diff --git a/dotnet/src/Plugins/Plugins.Web/WebSearchEnginePlugin.cs b/dotnet/src/Plugins/Plugins.Web/WebSearchEnginePlugin.cs index ce480ed58651..c9abab4b4f86 100644 --- a/dotnet/src/Plugins/Plugins.Web/WebSearchEnginePlugin.cs +++ b/dotnet/src/Plugins/Plugins.Web/WebSearchEnginePlugin.cs @@ -3,6 +3,7 @@ using System; using System.ComponentModel; using System.Linq; +using System.Text.Encodings.Web; using System.Text.Json; using System.Threading; using System.Threading.Tasks; @@ -26,6 +27,14 @@ public sealed class WebSearchEnginePlugin private readonly IWebSearchEngineConnector _connector; + /// + /// The usage of JavaScriptEncoder.UnsafeRelaxedJsonEscaping here is considered safe in this context + /// because the JSON result is not used for any security sensitive operations like HTML injection. + /// + private static readonly JsonSerializerOptions s_jsonOptionsCache = new() + { + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + }; /// /// Initializes a new instance of the class. /// @@ -43,21 +52,25 @@ public WebSearchEnginePlugin(IWebSearchEngineConnector connector) /// The number of results to skip. Default is 0. /// A cancellation token to observe while waiting for the task to complete. /// A task that represents the asynchronous operation. The value of the TResult parameter contains the search results as a string. - [SKFunction, Description("Perform a web search.")] + /// + /// This method is marked as "unsafe." The usage of JavaScriptEncoder.UnsafeRelaxedJsonEscaping may introduce security risks. + /// Only use this method if you are aware of the potential risks and have validated the input to prevent security vulnerabilities. + /// + [KernelFunction, Description("Perform a web search.")] public async Task SearchAsync( [Description("Search query")] string query, [Description("Number of results")] int count = 10, [Description("Number of results to skip")] int offset = 0, CancellationToken cancellationToken = default) { - var results = await this._connector.SearchAsync(query, count, offset, cancellationToken).ConfigureAwait(false); - if (!results.Any()) + var results = (await this._connector.SearchAsync(query, count, offset, cancellationToken).ConfigureAwait(false)).ToArray(); + if (results.Length == 0) { throw new InvalidOperationException("Failed to get a response from the web search engine."); } return count == 1 - ? results.FirstOrDefault() ?? string.Empty - : JsonSerializer.Serialize(results); + ? results[0] ?? string.Empty + : JsonSerializer.Serialize(results, s_jsonOptionsCache); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/AIFunctionResultExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/AIFunctionResultExtensions.cs deleted file mode 100644 index a443b25ccd7e..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/AIFunctionResultExtensions.cs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.AI; - -/// -/// Class with extension methods related to AI logic for class. -/// -public static class AIFunctionResultExtensions -{ - /// - /// Function result metadata key for records. - /// - public const string ModelResultsMetadataKey = "ModelResults"; - - /// - /// Returns collection of records from metadata. - /// - /// Instance of class. - public static IReadOnlyCollection? GetModelResults(this FunctionResult result) - { - if (result.TryGetMetadataValue(ModelResultsMetadataKey, out IReadOnlyCollection? modelResults)) - { - return modelResults; - } - - return null; - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/AIRequestSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/AIRequestSettings.cs deleted file mode 100644 index 4856b2adc563..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/AIRequestSettings.cs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.AI.TextCompletion; - -namespace Microsoft.SemanticKernel.AI; - -/// -/// Request settings for an AI request. -/// Implementors of or can extend this -/// if the service they are calling supports additional properties. For an example please reference -/// the Microsoft.SemanticKernel.Connectors.AI.OpenAI.OpenAIRequestSettings implementation. -/// -public class AIRequestSettings -{ - private Dictionary? _extensionData; - - /// - /// Service identifier. - /// This identifies a service and is set when the AI service is registered. - /// - [JsonPropertyName("service_id")] - public string? ServiceId { get; set; } = null; - - /// - /// Model identifier. - /// This identifies the AI model these settings are configured for e.g., gpt-4, gpt-3.5-turbo - /// - [JsonPropertyName("model_id")] - public string? ModelId { get; set; } = null; - - /// - /// Extra properties - /// - [JsonExtensionData] - public Dictionary ExtensionData - { - get => this._extensionData ??= new(); - set => this._extensionData = value; - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs index cf29fb2dd63b..7c572509056c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs @@ -1,10 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. using System; -using System.ComponentModel; -using Microsoft.SemanticKernel.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; -namespace Microsoft.SemanticKernel.AI.ChatCompletion; +namespace Microsoft.SemanticKernel.ChatCompletion; /// /// A description of the intended purpose of a message within a chat completions interaction. @@ -14,22 +14,22 @@ namespace Microsoft.SemanticKernel.AI.ChatCompletion; /// /// The role that instructs or sets the behavior of the assistant. /// - public static readonly AuthorRole System = new("system"); + public static AuthorRole System { get; } = new("system"); /// /// The role that provides responses to system-instructed, user-prompted input. /// - public static readonly AuthorRole Assistant = new("assistant"); + public static AuthorRole Assistant { get; } = new("assistant"); /// /// The role that provides input for chat completions. /// - public static readonly AuthorRole User = new("user"); + public static AuthorRole User { get; } = new("user"); /// /// The role that provides additional information and references for chat completions. /// - public static readonly AuthorRole Tool = new("tool"); + public static AuthorRole Tool { get; } = new("tool"); /// /// Gets the label associated with this AuthorRole. @@ -42,10 +42,11 @@ namespace Microsoft.SemanticKernel.AI.ChatCompletion; /// /// Creates a new AuthorRole instance with the provided label. /// - /// + /// The label to associate with this AuthorRole. + [JsonConstructor] public AuthorRole(string label) { - Verify.NotNull(label, nameof(label)); + Verify.NotNullOrWhiteSpace(label, nameof(label)); this.Label = label!; } @@ -57,9 +58,7 @@ public AuthorRole(string label) /// the second AuthorRole instance to compare /// true if left and right are both null or have equivalent labels; false otherwise public static bool operator ==(AuthorRole left, AuthorRole right) - { - return left.Equals(right); - } + => left.Equals(right); /// /// Returns a value indicating whether two AuthorRole instances are not equivalent, as determined by a @@ -72,19 +71,17 @@ public AuthorRole(string label) => !(left == right); /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override bool Equals(object obj) + public override bool Equals([NotNullWhen(true)] object? obj) => obj is AuthorRole otherRole && this == otherRole; - /// - [EditorBrowsable(EditorBrowsableState.Never)] - public override int GetHashCode() - => this.Label.GetHashCode(); - /// public bool Equals(AuthorRole other) => string.Equals(this.Label, other.Label, StringComparison.OrdinalIgnoreCase); /// - public override string ToString() => this.Label; + public override int GetHashCode() + => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label ?? string.Empty); + + /// + public override string ToString() => this.Label ?? string.Empty; } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionExtensions.cs deleted file mode 100644 index 94dcdd9c39d9..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionExtensions.cs +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.AI.ChatCompletion; -/// -/// Provides extension methods for the IChatCompletion interface. -/// -public static class ChatCompletionExtensions -{ - /// - /// Generates a new chat message as an asynchronous stream. - /// - /// The target IChatCompletion interface to extend. - /// The chat history. - /// The AI request settings (optional). - /// The asynchronous cancellation token (optional). - /// This extension does not support multiple prompt results (only the first will be returned). - /// An asynchronous stream of the generated chat message in string format. - public static async IAsyncEnumerable GenerateMessageStreamAsync( - this IChatCompletion chatCompletion, - ChatHistory chat, - AIRequestSettings? requestSettings = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - // Using var below results in Microsoft.CSharp.RuntimeBinder.RuntimeBinderException : Cannot apply indexing with [] to an expression of type 'object' - IAsyncEnumerable chatCompletionResults = chatCompletion.GetStreamingChatCompletionsAsync(chat, requestSettings, cancellationToken); - await foreach (var chatCompletionResult in chatCompletionResults) - { - await foreach (var chatMessageStream in chatCompletionResult.GetStreamingChatMessageAsync(cancellationToken).ConfigureAwait(false)) - { - yield return chatMessageStream.Content; - } - - yield break; - } - } - - /// - /// Generates a new chat message asynchronously. - /// - /// The target IChatCompletion interface to extend. - /// The chat history. - /// The AI request settings (optional). - /// The asynchronous cancellation token (optional). - /// This extension does not support multiple prompt results (only the first will be returned). - /// A task representing the generated chat message in string format. - public static async Task GenerateMessageAsync( - this IChatCompletion chatCompletion, - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - // Using var below results in Microsoft.CSharp.RuntimeBinder.RuntimeBinderException : Cannot apply indexing with [] to an expression of type 'object' - IReadOnlyList chatResults = await chatCompletion.GetChatCompletionsAsync(chat, requestSettings, cancellationToken).ConfigureAwait(false); - var firstChatMessage = await chatResults[0].GetChatMessageAsync(cancellationToken).ConfigureAwait(false); - return firstChatMessage.Content; - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs index 589ca69aecdd..102faca62de8 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs @@ -1,40 +1,113 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Services; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; -// Use base namespace for better discoverability and to avoid conflicts with other extensions. -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 // Namespace does not match folder structure +namespace Microsoft.SemanticKernel.ChatCompletion; /// -/// Provides extension methods for working with chat completion services. +/// Class sponsor that holds extension methods for interface. /// public static class ChatCompletionServiceExtensions { /// - /// Get the matching the given , or - /// the default if is not provided or not found. + /// Get chat multiple chat message content choices for the prompt and settings. /// - /// The service provider. - /// Optional identifier of the desired service. - /// The completion service id matching the given id or the default. - /// Thrown when no suitable service is found. - public static IChatCompletion GetChatCompletionService( - this IAIServiceProvider services, - string? serviceId = null) => services.GetService(serviceId) - ?? throw new SKException("Chat completion service not found"); + /// + /// This should be used when the settings request for more than one choice. + /// + /// Target chat completion service. + /// The standardized prompt input. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different chat message content choices generated by the remote model + public static Task> GetChatMessageContentsAsync( + this IChatCompletionService chatCompletionService, + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + // Try to parse the text as a chat history + if (ChatPromptParser.TryParse(prompt, out var chatHistoryFromPrompt)) + { + return chatCompletionService.GetChatMessageContentsAsync(chatHistoryFromPrompt, executionSettings, kernel, cancellationToken); + } + + // Otherwise, use the prompt as the chat user message + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage(prompt); + + return chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + } + + /// + /// Get a single chat message content for the prompt and settings. + /// + /// The target IChatCompletionSErvice interface to extend. + /// The standardized prompt input. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different chat results generated by the remote model + public static async Task GetChatMessageContentAsync( + this IChatCompletionService chatCompletionService, + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => (await chatCompletionService.GetChatMessageContentsAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) + .Single(); /// - /// Returns true if a exist with the specified ID. + /// Get a single chat message content for the chat history and settings provided. /// - /// The service provider. - /// The service ID to search for. If null, it will look for a default service. - /// True if the service ID is registered, false otherwise. - public static bool HasChatCompletionService( - this IAIServiceProvider services, - string? serviceId = null) - => services.TryGetService(serviceId, out _); + /// The target IChatCompletionService interface to extend. + /// The chat history to complete. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different chat results generated by the remote model + public static async Task GetChatMessageContentAsync( + this IChatCompletionService chatCompletionService, + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => (await chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) + .Single(); + + /// + /// Get streaming chat message contents for the chat history provided using the specified settings. + /// + /// Throws if the specified type is not the same or fail to cast + /// The target IChatCompletionService interface to extend. + /// The standardized prompt input. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Streaming list of different completion streaming string updates generated by the remote model + public static IAsyncEnumerable GetStreamingChatMessageContentsAsync( + this IChatCompletionService chatCompletionService, + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + // Try to parse the text as a chat history + if (ChatPromptParser.TryParse(prompt, out var chatHistoryFromPrompt)) + { + return chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistoryFromPrompt, executionSettings, kernel, cancellationToken); + } + + // Otherwise, use the prompt as the chat user message + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage(prompt); + + return chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs index 1ec25161e418..e15d46965de7 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatHistory.cs @@ -1,73 +1,212 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections; using System.Collections.Generic; +using System.Text; -#pragma warning disable CA1710 +#pragma warning disable CA1033 // Interface methods should be callable by child types +#pragma warning disable CA1710 // Identifiers should have correct suffix -namespace Microsoft.SemanticKernel.AI.ChatCompletion; +namespace Microsoft.SemanticKernel.ChatCompletion; /// -/// Chat message history representation +/// Provides a history of chat messages from a chat conversation. /// -public class ChatHistory : List +public class ChatHistory : IList, IReadOnlyList { - private sealed class ChatMessage : ChatMessageBase - { - public ChatMessage(AuthorRole authorRole, string content) : base(authorRole, content) - { - } - } + /// The messages. + private readonly List _messages; + /// Initializes an empty history. /// - /// List of messages in the chat + /// Creates a new instance of the class /// - public List Messages => this; + public ChatHistory() + { + this._messages = new(); + } /// - /// Add a message to the chat history + /// Creates a new instance of the class with a system message /// - /// Role of the message author - /// Message content - public void AddMessage(AuthorRole authorRole, string content) + /// The system message to add to the history. + public ChatHistory(string systemMessage) { - this.Add(new ChatMessage(authorRole, content)); + Verify.NotNullOrWhiteSpace(systemMessage); + + this._messages = new(); + this.AddSystemMessage(systemMessage); } + /// Initializes the history will all of the specified messages. + /// The messages to copy into the history. + /// is null. + public ChatHistory(IEnumerable messages) + { + Verify.NotNull(messages); + this._messages = new(messages); + } + + /// Gets the number of messages in the history. + public int Count => this._messages.Count; + /// - /// Insert a message into the chat history - /// - /// Index of the message to insert /// Role of the message author /// Message content - public void InsertMessage(int index, AuthorRole authorRole, string content) - { - this.Insert(index, new ChatMessage(authorRole, content)); - } + /// Encoding of the message content + /// Dictionary for any additional metadata + /// + public void AddMessage(AuthorRole authorRole, string content, Encoding? encoding = null, IReadOnlyDictionary? metadata = null) => + this.Add(new ChatMessageContent(authorRole, content, null, null, encoding, metadata)); + + /// + /// Role of the message author + /// Instance of with content items + /// Encoding of the message content + /// Dictionary for any additional metadata + /// + public void AddMessage(AuthorRole authorRole, ChatMessageContentItemCollection contentItems, Encoding? encoding = null, IReadOnlyDictionary? metadata = null) => + this.Add(new ChatMessageContent(authorRole, contentItems, null, null, encoding, metadata)); /// /// Add a user message to the chat history /// /// Message content - public void AddUserMessage(string content) - { + public void AddUserMessage(string content) => this.AddMessage(AuthorRole.User, content); - } + + /// + /// Add a user message to the chat history + /// + /// Instance of with content items + public void AddUserMessage(ChatMessageContentItemCollection contentItems) => + this.AddMessage(AuthorRole.User, contentItems); /// /// Add an assistant message to the chat history /// /// Message content - public void AddAssistantMessage(string content) - { + public void AddAssistantMessage(string content) => this.AddMessage(AuthorRole.Assistant, content); - } /// /// Add a system message to the chat history /// /// Message content - public void AddSystemMessage(string content) - { + public void AddSystemMessage(string content) => this.AddMessage(AuthorRole.System, content); + + /// Adds a message to the history. + /// The message to add. + /// is null. + public void Add(ChatMessageContent item) + { + Verify.NotNull(item); + this._messages.Add(item); } + + /// Adds the messages to the history. + /// The collection whose messages should be added to the history. + /// is null. + public void AddRange(IEnumerable items) + { + Verify.NotNull(items); + this._messages.AddRange(items); + } + + /// Inserts a message into the history at the specified index. + /// The index at which the item should be inserted. + /// The message to insert. + /// is null. + public void Insert(int index, ChatMessageContent item) + { + Verify.NotNull(item); + this._messages.Insert(index, item); + } + + /// + /// Copies all of the messages in the history to an array, starting at the specified destination array index. + /// + /// The destination array into which the messages should be copied. + /// The zero-based index into at which copying should begin. + /// is null. + /// The number of messages in the history is greater than the available space from to the end of . + /// is less than 0. + public void CopyTo(ChatMessageContent[] array, int arrayIndex) => this._messages.CopyTo(array, arrayIndex); + + /// Removes all messages from the history. + public void Clear() => this._messages.Clear(); + + /// Gets or sets the message at the specified index in the history. + /// The index of the message to get or set. + /// The message at the specified index. + /// is null. + /// The was not valid for this history. + public ChatMessageContent this[int index] + { + get => this._messages[index]; + set + { + Verify.NotNull(value); + this._messages[index] = value; + } + } + + /// Determines whether a message is in the history. + /// The message to locate. + /// true if the message is found in the history; otherwise, false. + /// is null. + public bool Contains(ChatMessageContent item) + { + Verify.NotNull(item); + return this._messages.Contains(item); + } + + /// Searches for the specified message and returns the index of the first occurrence. + /// The message to locate. + /// The index of the first found occurrence of the specified message; -1 if the message could not be found. + /// is null. + public int IndexOf(ChatMessageContent item) + { + Verify.NotNull(item); + return this._messages.IndexOf(item); + } + + /// Removes the message at the specified index from the history. + /// The index of the message to remove. + /// The was not valid for this history. + public void RemoveAt(int index) => this._messages.RemoveAt(index); + + /// Removes the first occurrence of the specified message from the history. + /// The message to remove from the history. + /// true if the item was successfully removed; false if it wasn't located in the history. + /// is null. + public bool Remove(ChatMessageContent item) + { + Verify.NotNull(item); + return this._messages.Remove(item); + } + + /// + /// Removes a range of messages from the history. + /// + /// The index of the range of elements to remove. + /// The number of elements to remove. + /// is less than 0. + /// is less than 0. + /// and do not denote a valid range of messages. + public void RemoveRange(int index, int count) + { + this._messages.RemoveRange(index, count); + } + + /// + bool ICollection.IsReadOnly => false; + + /// + IEnumerator IEnumerable.GetEnumerator() => this._messages.GetEnumerator(); + + /// + IEnumerator IEnumerable.GetEnumerator() => this._messages.GetEnumerator(); } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatMessageBase.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatMessageBase.cs deleted file mode 100644 index 8fbc5579eee1..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatMessageBase.cs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.AI.ChatCompletion; - -/// -/// Chat message abstraction -/// -public abstract class ChatMessageBase -{ - /// - /// Role of the author of the message - /// - public AuthorRole Role { get; set; } - - /// - /// Content of the message - /// - public string Content { get; set; } - - /// - /// Creates a new instance of the class - /// - /// Role of the author of the message - /// Content of the message - protected ChatMessageBase(AuthorRole role, string content) - { - this.Role = role; - this.Content = content; - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatMessageContentItemCollection.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatMessageContentItemCollection.cs new file mode 100644 index 000000000000..e8f990fc3a57 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatMessageContentItemCollection.cs @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.ChatCompletion; + +#pragma warning disable CA1033 // Interface methods should be callable by child types + +/// +/// Contains collection of chat message content items of type . +/// +public class ChatMessageContentItemCollection : IList, IReadOnlyList +{ + /// + /// Initializes a new instance of the class. + /// + public ChatMessageContentItemCollection() + { + this._items = new(); + } + + /// + /// Gets or sets the content item at the specified index in the collection. + /// + /// The index of the content item to get or set. + /// The content item at the specified index. + /// is null. + /// The was not valid for this collection. + public KernelContent this[int index] + { + get => this._items[index]; + set + { + Verify.NotNull(value); + this._items[index] = value; + } + } + + /// + /// Gets the number of content items in the collection. + /// + public int Count => this._items.Count; + + /// + /// Adds a content item to the collection. + /// + /// The content item to add. + /// is null. + public void Add(KernelContent item) + { + Verify.NotNull(item); + this._items.Add(item); + } + + /// + /// Removes all content items from the collection. + /// + public void Clear() => this._items.Clear(); + + /// + /// Determines whether a content item is in the collection. + /// + /// The content item to locate. + /// True if the content item is found in the collection; otherwise, false. + /// is null. + public bool Contains(KernelContent item) + { + Verify.NotNull(item); + return this._items.Contains(item); + } + + /// + /// Copies all of the content items in the collection to an array, starting at the specified destination array index. + /// + /// The destination array into which the content items should be copied. + /// The zero-based index into at which copying should begin. + /// is null. + /// The number of content items in the collection is greater than the available space from to the end of . + /// is less than 0. + public void CopyTo(KernelContent[] array, int arrayIndex) => this._items.CopyTo(array, arrayIndex); + + /// + /// Searches for the specified content item and returns the index of the first occurrence. + /// + /// The content item to locate. + /// The index of the first found occurrence of the specified content item; -1 if the content item could not be found. + /// is null. + public int IndexOf(KernelContent item) + { + Verify.NotNull(item); + return this._items.IndexOf(item); + } + + /// + /// Inserts a content item into the collection at the specified index. + /// + /// The index at which the content item should be inserted. + /// The content item to insert. + /// is null. + public void Insert(int index, KernelContent item) + { + Verify.NotNull(item); + this._items.Insert(index, item); + } + + /// + /// Removes the first occurrence of the specified content item from the collection. + /// + /// The content item to remove from the collection. + /// True if the item was successfully removed; false if it wasn't located in the collection. + /// is null. + public bool Remove(KernelContent item) + { + Verify.NotNull(item); + return this._items.Remove(item); + } + + /// + /// Removes the content item at the specified index from the collection. + /// + /// The index of the content item to remove. + public void RemoveAt(int index) => this._items.RemoveAt(index); + + bool ICollection.IsReadOnly => false; + + IEnumerator IEnumerable.GetEnumerator() => this._items.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => this._items.GetEnumerator(); + + #region private + + private readonly List _items; + + #endregion +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs new file mode 100644 index 000000000000..dae1b777d03d --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs @@ -0,0 +1,119 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; + +namespace Microsoft.SemanticKernel.ChatCompletion; + +/// +/// Chat Prompt parser. +/// +internal static class ChatPromptParser +{ + private const string MessageTagName = "message"; + private const string RoleAttributeName = "role"; + private const string ImageTagName = "image"; + private const string TextTagName = "text"; + + /// + /// Parses a prompt for an XML representation of a . + /// + /// The prompt to parse. + /// The parsed , or null if it couldn't be parsed. + /// true if the history could be parsed; otherwise, false. + public static bool TryParse(string prompt, [NotNullWhen(true)] out ChatHistory? chatHistory) + { + // Parse the input string into nodes and then those nodes into a chat history. + // The XML parsing is expensive, so we do a quick up-front check to make sure + // the text contains "= 0 && + XmlPromptParser.TryParse(prompt, out var nodes) && + TryParse(nodes, out chatHistory)) + { + return true; + } + + chatHistory = null; + return false; + } + + /// + /// Parses collection of instances and sets output as . + /// + /// Collection of to parse. + /// Parsing output as . + /// Returns true if parsing was successful, otherwise false. + private static bool TryParse(List nodes, [NotNullWhen(true)] out ChatHistory? chatHistory) + { + chatHistory = null; + + foreach (var node in nodes.Where(IsValidChatMessage)) + { + (chatHistory ??= new()).Add(ParseChatNode(node)); + } + + return chatHistory is not null; + } + + /// + /// Parses a chat node and constructs a object. + /// + /// The prompt node to parse. + /// object. + private static ChatMessageContent ParseChatNode(PromptNode node) + { + ChatMessageContentItemCollection items = new(); + foreach (var childNode in node.ChildNodes.Where(childNode => childNode.Content is not null)) + { + if (childNode.TagName.Equals(ImageTagName, StringComparison.OrdinalIgnoreCase)) + { + items.Add(new ImageContent(new Uri(childNode.Content!))); + } + else if (childNode.TagName.Equals(TextTagName, StringComparison.OrdinalIgnoreCase)) + { + items.Add(new TextContent(childNode.Content)); + } + } + + if (items.Count == 1 && items[0] is TextContent textContent) + { + node.Content = textContent.Text; + items.Clear(); + } + + var authorRole = new AuthorRole(node.Attributes[RoleAttributeName]); + + return items.Count > 0 + ? new ChatMessageContent(authorRole, items) + : new ChatMessageContent(authorRole, node.Content); + } + + /// + /// Checks if is valid chat message. + /// + /// Instance of . + /// + /// A valid chat message is a node with the following structure:
+ /// TagName = "message"
+ /// Attributes = { "role" : "..." }
+ /// optional one or more child nodes ...
+ /// content not null or single child node ... + ///
+ private static bool IsValidChatMessage(PromptNode node) + { + return + node.TagName.Equals(MessageTagName, StringComparison.OrdinalIgnoreCase) && + node.Attributes.ContainsKey(RoleAttributeName) && + IsValidChildNodes(node); + } + + private static bool IsValidChildNodes(PromptNode node) + { + var textTagsCount = node.ChildNodes.Count(n => n.TagName.Equals(TextTagName, StringComparison.OrdinalIgnoreCase)); + return textTagsCount == 1 || (textTagsCount == 0 && node.Content is not null); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletion.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletion.cs deleted file mode 100644 index 6d713703c93d..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletion.cs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.AI.ChatCompletion; - -/// -/// Interface for chat completion services -/// -public interface IChatCompletion : IAIService -{ - /// - /// Create a new empty chat instance - /// - /// Optional chat instructions for the AI service - /// Chat object - ChatHistory CreateNewChat(string? instructions = null); - - /// - /// Get chat completion results for the prompt and settings. - /// - /// The chat history context. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// List of different chat results generated by the remote model - Task> GetChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default); - - /// - /// Get chat streaming completion results for the prompt and settings. - /// - /// The chat history context. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// AsyncEnumerable list of different streaming chat results generated by the remote model - IAsyncEnumerable GetStreamingChatCompletionsAsync( - ChatHistory chat, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs new file mode 100644 index 000000000000..3d5ed9fa0fe8 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.ChatCompletion; + +/// +/// Interface for chat completion services. +/// +public interface IChatCompletionService : IAIService +{ + /// + /// Get chat multiple chat content choices for the prompt and settings. + /// + /// + /// This should be used when the settings request for more than one choice. + /// + /// The chat history context. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different chat results generated by the remote model + Task> GetChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default); + + /// + /// Get streaming chat contents for the chat history provided using the specified settings. + /// + /// Throws if the specified type is not the same or fail to cast + /// The chat history to complete. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Streaming list of different completion streaming string updates generated by the remote model + IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatResult.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatResult.cs deleted file mode 100644 index f02027449744..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatResult.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.AI.ChatCompletion; - -/// -/// Interface for chat completion results -/// -public interface IChatResult : IResultBase -{ - /// - /// Get the chat message from the result. - /// - /// The to monitor for cancellation requests. The default is . - /// Current chat message content - Task GetChatMessageAsync(CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatStreamingResult.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatStreamingResult.cs deleted file mode 100644 index 5105c64ebb7d..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatStreamingResult.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; - -namespace Microsoft.SemanticKernel.AI.ChatCompletion; - -/// -/// Interface for chat completion streaming results -/// -public interface IChatStreamingResult : IResultBase -{ - /// - /// Get the chat message from the streaming result. - /// - /// The to monitor for cancellation requests. The default is . - /// Current chat message streaming content - IAsyncEnumerable GetStreamingChatMessageAsync(CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs index a28d3cb9167f..a674e1f6eb2c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/EmbeddingGenerationServiceExtensions.cs @@ -1,16 +1,17 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; -namespace Microsoft.SemanticKernel.AI.Embeddings; +namespace Microsoft.SemanticKernel.Embeddings; /// -/// Provides a collection of static methods for operating on objects. +/// Provides a collection of static methods for operating on objects. /// +[Experimental("SKEXP0001")] public static class EmbeddingGenerationExtensions { /// @@ -20,13 +21,18 @@ public static class EmbeddingGenerationExtensions /// The numeric type of the embedding data. /// The embedding generator. /// A value from which an embedding will be generated. + /// The containing services, plugins, and other state for use throughout the operation. /// Cancellation token /// A list of embedding structs representing the input . - public static async Task> GenerateEmbeddingAsync - (this IEmbeddingGeneration generator, TValue value, CancellationToken cancellationToken = default) + [Experimental("SKEXP0001")] + public static async Task> GenerateEmbeddingAsync( + this IEmbeddingGenerationService generator, + TValue value, + Kernel? kernel = null, + CancellationToken cancellationToken = default) where TEmbedding : unmanaged { Verify.NotNull(generator); - return (await generator.GenerateEmbeddingsAsync(new[] { value }, cancellationToken).ConfigureAwait(false)).FirstOrDefault(); + return (await generator.GenerateEmbeddingsAsync(new[] { value }, kernel, cancellationToken).ConfigureAwait(false)).FirstOrDefault(); } } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/IEmbeddingGeneration.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/IEmbeddingGeneration.cs deleted file mode 100644 index a2fc94d18f38..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/IEmbeddingGeneration.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.AI.Embeddings; - -/// -/// Represents a generator of embeddings. -/// -/// The type from which embeddings will be generated. -/// The numeric type of the embedding data. -public interface IEmbeddingGeneration : IAIService - where TEmbedding : unmanaged -{ - /// - /// Generates an embedding from the given . - /// - /// List of strings to generate embeddings for - /// The to monitor for cancellation requests. The default is . - /// List of embeddings - Task>> GenerateEmbeddingsAsync(IList data, CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/IEmbeddingGenerationService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/IEmbeddingGenerationService.cs new file mode 100644 index 000000000000..b87a5f6432af --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/IEmbeddingGenerationService.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.Embeddings; + +/// +/// Represents a generator of embeddings. +/// +/// The type from which embeddings will be generated. +/// The numeric type of the embedding data. +[Experimental("SKEXP0001")] +public interface IEmbeddingGenerationService : IAIService + where TEmbedding : unmanaged +{ + /// + /// Generates an embedding from the given . + /// + /// List of strings to generate embeddings for + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of embeddings + Task>> GenerateEmbeddingsAsync( + IList data, + Kernel? kernel = null, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/ITextEmbeddingGeneration.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/ITextEmbeddingGeneration.cs deleted file mode 100644 index 372a2a2b512a..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/ITextEmbeddingGeneration.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.AI.Embeddings; - -/// -/// Represents a generator of text embeddings of type float. -/// -public interface ITextEmbeddingGeneration : IEmbeddingGeneration, IAIService -{ -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/ITextEmbeddingGenerationService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/ITextEmbeddingGenerationService.cs new file mode 100644 index 000000000000..905b107bfb20 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/ITextEmbeddingGenerationService.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel.Embeddings; + +/// +/// Represents a generator of text embeddings of type float. +/// +[Experimental("SKEXP0001")] +public interface ITextEmbeddingGenerationService : IEmbeddingGenerationService +{ +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/TextEmbeddingServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/TextEmbeddingServiceExtensions.cs deleted file mode 100644 index 2f453ba308d5..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/Embeddings/TextEmbeddingServiceExtensions.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Services; - -// Use base namespace for better discoverability and to avoid conflicts with other extensions. -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 // Namespace does not match folder structure - -/// -/// Provides extension methods for working with text embedding services. -/// -public static class TextEmbeddingServiceExtensions -{ - /// - /// Get the matching the given , or the default - /// if the is not provided or not found. - /// - /// The service provider. - /// Optional identifier of the desired service. - /// The embedding service matching the given id or the default service. - /// Thrown when no suitable service is found. - public static ITextEmbeddingGeneration GetTextEmbeddingService( - this IAIServiceProvider services, - string? serviceId = null) - => services.GetService(serviceId) - ?? throw new SKException("Text embedding service not found"); - - /// - /// Returns true if a exist with the specified ID. - /// - /// The service provider. - /// The service ID to search for. If null, it will look for a default service. - /// True if the service ID is registered, false otherwise. - public static bool HasTextEmbeddingService( - this IAIServiceProvider services, - string? serviceId = null) - => services.TryGetService(serviceId, out _); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/IResultBase.cs b/dotnet/src/SemanticKernel.Abstractions/AI/IResultBase.cs deleted file mode 100644 index 0c631cc3fb6e..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/IResultBase.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.AI; - -/// -/// Interface for model results -/// -public interface IResultBase -{ - /// - /// Gets the model result data. - /// - ModelResult ModelResult { get; } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ImageGeneration/IImageGeneration.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ImageGeneration/IImageGeneration.cs deleted file mode 100644 index 7554234dd6e9..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ImageGeneration/IImageGeneration.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.AI.ImageGeneration; - -/// -/// Interface for image generation services -/// -public interface IImageGeneration : IAIService -{ - /// - /// Generate an image matching the given description - /// - /// Image description - /// Image width in pixels - /// Image height in pixels - /// The to monitor for cancellation requests. The default is . - /// Generated image in base64 format or image URL - public Task GenerateImageAsync( - string description, - int width, - int height, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ImageGeneration/ImageGenerationServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ImageGeneration/ImageGenerationServiceExtensions.cs deleted file mode 100644 index d458ff3fa7f4..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ImageGeneration/ImageGenerationServiceExtensions.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Services; - -// Use base namespace for better discoverability and to avoid conflicts with other extensions. -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 // Namespace does not match folder structure - -/// -/// Provides extension methods for working with services. -/// -public static class ImageGenerationServiceExtensions -{ - /// - /// Get the matching the given , or the default - /// if the is not provided or not found. - /// - /// The service provider. - /// Optional identifier of the desired service. - /// The id matching the given id or the default. - /// Thrown when no suitable service is found. - public static IImageGeneration GetImageGenerationService( - this IAIServiceProvider services, - string? serviceId = null) => services.GetService(serviceId) - ?? throw new SKException("Image generation service not found"); - - /// - /// Returns true if a exist with the specified ID. - /// - /// The service provider. - /// The service ID to search for. If null, it will look for a default service. - /// True if the service ID is registered, false otherwise. - public static bool HasImageGenerationService( - this IAIServiceProvider services, - string? serviceId = null) - => services.TryGetService(serviceId, out _); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs new file mode 100644 index 000000000000..24d0ba1a57ff --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides execution settings for an AI request. +/// +/// +/// Implementors of or can extend this +/// if the service they are calling supports additional properties. For an example, please reference +/// the Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIPromptExecutionSettings implementation. +/// +public class PromptExecutionSettings +{ + /// + /// Gets the default service identifier. + /// + /// + /// In a dictionary of , this is the key that should be used settings considered the default. + /// + public static string DefaultServiceId => "default"; + + /// + /// Model identifier. + /// This identifies the AI model these settings are configured for e.g., gpt-4, gpt-3.5-turbo + /// + [JsonPropertyName("model_id")] + public string? ModelId { get; set; } + + /// + /// Extra properties that may be included in the serialized execution settings. + /// + /// + /// Avoid using this property if possible. Instead, use one of the classes that extends . + /// + [JsonExtensionData] + public Dictionary? ExtensionData { get; set; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptNode.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptNode.cs new file mode 100644 index 000000000000..af3565abf3ca --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptNode.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel; + +/// +/// Class that contains information about node in prompt. +/// +internal sealed class PromptNode +{ + private Dictionary? _attributes; + private List? _childNodes; + + /// + /// Node tag name. + /// + public string TagName { get; set; } + + /// + /// Node content. + /// + public string? Content { get; set; } + + /// + /// Collection of node attributes. + /// + public Dictionary Attributes + { + get => this._attributes ??= new(); + set => this._attributes = value; + } + + /// + /// Collection of child nodes. + /// + public List ChildNodes + { + get => this._childNodes ??= new(); + set => this._childNodes = value; + } + + /// + /// Initializes a new instance of the class. + /// + /// Node tag name. + public PromptNode(string tagName) + { + this.TagName = tagName; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletion.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletion.cs deleted file mode 100644 index 42d8f295ef65..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextCompletion.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.AI.TextCompletion; - -/// -/// Interface for text completion services -/// -public interface ITextCompletion : IAIService -{ - /// - /// Get completion results for the prompt and settings. - /// - /// The prompt to complete. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// List of different completions results generated by the remote model - Task> GetCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default); - - /// - /// Get streaming completion results for the prompt and settings. - /// - /// The prompt to complete. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// List of different completion streaming results generated by the remote model - IAsyncEnumerable GetStreamingCompletionsAsync( - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextResult.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextResult.cs deleted file mode 100644 index f108f3d00104..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextResult.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.AI.TextCompletion; - -/// -/// Interface for text completion results. -/// -public interface ITextResult : IResultBase -{ - /// - /// Asynchronously retrieves the text completion result. - /// - /// An optional to observe while waiting for the task to complete. - /// A representing the asynchronous operation, with the result being the completed text. - Task GetCompletionAsync(CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextStreamingResult.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextStreamingResult.cs deleted file mode 100644 index b4c4f51d6fcf..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/ITextStreamingResult.cs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; - -namespace Microsoft.SemanticKernel.AI.TextCompletion; - -/// -/// Interface for text completion streaming results. -/// Provides an asynchronous enumerable of text completion results. -/// -public interface ITextStreamingResult : IResultBase -{ - /// - /// Gets an asynchronous enumerable of text completion results. - /// - /// An optional to observe while waiting for the task to complete. - /// An of representing the text completion results. - IAsyncEnumerable GetCompletionStreamingAsync(CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionExtensions.cs deleted file mode 100644 index 4350f709d287..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionExtensions.cs +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.AI.TextCompletion; - -/// -/// Class sponsor that holds extension methods for ITextCompletion interface. -/// -public static class TextCompletionExtensions -{ - /// - /// Creates a completion for the prompt and settings. - /// - /// Target interface to extend - /// The prompt to complete. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// This extension does not support multiple prompt results (Only the first will be returned) - /// Text generated by the remote model - public static async Task CompleteAsync(this ITextCompletion textCompletion, - string text, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - var completions = await textCompletion.GetCompletionsAsync(text, requestSettings, cancellationToken).ConfigureAwait(false); - var firstResult = completions[0]; - - return await firstResult.GetCompletionAsync(cancellationToken).ConfigureAwait(false); - } - - /// - /// Creates a completion for the prompt and settings. - /// - /// Target interface to extend - /// The prompt to complete. - /// Request settings for the completion API - /// The to monitor for cancellation requests. The default is . - /// This extension does not support multiple prompt results (Only the first will be returned) - /// Streaming content of the text generated by the remote model - public static async IAsyncEnumerable CompleteStreamAsync(this ITextCompletion textCompletion, - string text, - AIRequestSettings? requestSettings = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - IAsyncEnumerable completionResults = textCompletion.GetStreamingCompletionsAsync(text, requestSettings, cancellationToken); - - await foreach (var completionResult in completionResults) - { - await foreach (var word in completionResult.GetCompletionStreamingAsync(cancellationToken).ConfigureAwait(false)) - { - yield return word; - } - - yield break; - } - } - - /// - /// Creates a completion for the prompt and settings. - /// - /// Target interface to extend. - /// The prompt to complete. - /// Request settings for the completion API. - /// The to monitor for cancellation requests. The default is . - /// Streaming content of the text generated by the remote model. - public static async IAsyncEnumerable CompleteStreamsAsync(this ITextCompletion textCompletion, - string text, - AIRequestSettings? requestSettings = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - IAsyncEnumerable completionResults = textCompletion.GetStreamingCompletionsAsync(text, requestSettings, cancellationToken); - - await foreach (var completionResult in completionResults) - { - await foreach (var word in completionResult.GetCompletionStreamingAsync(cancellationToken).ConfigureAwait(false)) - { - yield return word; - } - } - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionServiceExtensions.cs deleted file mode 100644 index dc69b42f4e99..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextCompletion/TextCompletionServiceExtensions.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Services; - -// Use base namespace for better discoverability and to avoid conflicts with other extensions. -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 // Namespace does not match folder structure - -/// -/// Provides extension methods for working with services. -/// -public static class TextCompletionServiceExtensions -{ - /// - /// Get the matching the given , or the default - /// if the is not provided or not found. - /// - /// The service provider. - /// Optional identifier of the desired service. - /// The text completion service id matching the given ID or the default. - /// Thrown when no suitable service is found. - public static ITextCompletion GetTextCompletionServiceOrDefault( - this IAIServiceProvider services, - string? serviceId = null) => services.GetService(serviceId) - ?? throw new SKException("Text completion service not found"); - - /// - /// Returns true if a exist with the specified ID. - /// - /// The service provider. - /// The service ID to search for. If null, it will look for a default service. - /// True if the service ID is registered, false otherwise. - public static bool HasTextCompletionService( - this IAIServiceProvider services, - string? serviceId = null) - => services.TryGetService(serviceId, out _); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/ITextGenerationService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/ITextGenerationService.cs new file mode 100644 index 000000000000..9fea275eaa03 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/ITextGenerationService.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.TextGeneration; + +/// +/// Interface for text generation services +/// +public interface ITextGenerationService : IAIService +{ + /// + /// Get completion results for the prompt and settings. + /// + /// The raw prompt input. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different completions results generated by the remote model + Task> GetTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default); + + /// + /// Get streaming results for the prompt using the specified execution settings. + /// Each modality may support for different types of streaming contents. + /// + /// + /// Usage of this method with value types may be more efficient if the connector supports it. + /// + /// Throws if the specified type is not the same or fail to cast + /// The prompt to complete. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Streaming list of different completion streaming string updates generated by the remote model + IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs new file mode 100644 index 000000000000..7213ea929bcc --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs @@ -0,0 +1,101 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.TextGeneration; + +/// +/// Class sponsor that holds extension methods for interface. +/// +public static class TextGenerationExtensions +{ + /// + /// Get a single text generation result for the prompt and settings. + /// + /// Text generation service + /// The standardized prompt input. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different text results generated by the remote model + public static async Task GetTextContentAsync( + this ITextGenerationService textGenerationService, + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => (await textGenerationService.GetTextContentsAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) + .Single(); + + /// + /// Get a single text generation result for the standardized prompt and settings. + /// + /// Text generation service + /// The standardized prompt input. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different text results generated by the remote model + internal static async Task GetTextContentWithDefaultParserAsync( + this ITextGenerationService textGenerationService, + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + if (textGenerationService is IChatCompletionService chatCompletion + && ChatPromptParser.TryParse(prompt, out var chatHistory)) + { + var chatMessage = await chatCompletion.GetChatMessageContentAsync(chatHistory, executionSettings, kernel, cancellationToken).ConfigureAwait(false); + return new TextContent(chatMessage.Content, chatMessage.ModelId, chatMessage.InnerContent, chatMessage.Encoding, chatMessage.Metadata); + } + + // When using against text generations, the prompt will be used as is. + return await textGenerationService.GetTextContentAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false); + } + + /// + /// Get streaming results for the standardized prompt using the specified settings. + /// Each modality may support for different types of streaming contents. + /// + /// + /// Usage of this method with value types may be more efficient if the connector supports it. + /// + /// Throws if the specified type is not the same or fail to cast + /// Text generation service + /// The standardized prompt to complete. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Streaming list of different generation streaming string updates generated by the remote model + internal static async IAsyncEnumerable GetStreamingTextContentsWithDefaultParserAsync( + this ITextGenerationService textGenerationService, + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (textGenerationService is IChatCompletionService chatCompletion + && ChatPromptParser.TryParse(prompt, out var chatHistory)) + { + await foreach (var chatMessage in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken)) + { + yield return new StreamingTextContent(chatMessage.Content, chatMessage.ChoiceIndex, chatMessage.ModelId, chatMessage, chatMessage.Encoding, chatMessage.Metadata); + } + + yield break; + } + + // When using against text generations, the prompt will be used as is. + await foreach (var textChunk in textGenerationService.GetStreamingTextContentsAsync(prompt, executionSettings, kernel, cancellationToken)) + { + yield return textChunk; + } + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs new file mode 100644 index 000000000000..fb499215d7f0 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel.TextToImage; + +/// +/// Interface for text to image services +/// +[Experimental("SKEXP0002")] +public interface ITextToImageService : IAIService +{ + /// + /// Generate an image matching the given description + /// + /// Image description + /// Image width in pixels + /// Image height in pixels + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Generated image in base64 format or image URL + [Experimental("SKEXP0002")] + public Task GenerateImageAsync( + string description, + int width, + int height, + Kernel? kernel = null, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/XmlPromptParser.cs b/dotnet/src/SemanticKernel.Abstractions/AI/XmlPromptParser.cs new file mode 100644 index 000000000000..4ee204b8a39d --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/XmlPromptParser.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Xml; + +namespace Microsoft.SemanticKernel; + +/// +/// Class to parse text prompt from XML format. +/// +internal static class XmlPromptParser +{ + /// + /// Parses text prompt and sets output as collection of instances. + /// + /// Text prompt to parse. + /// Parsing output as collection of instances. + /// Returns true if parsing was successful, otherwise false. + public static bool TryParse(string prompt, [NotNullWhen(true)] out List? result) + { + result = null; + + // The below parsing is _very_ expensive, especially when the content is not valid XML and an + // exception is thrown. Try to avoid it in the common case where the prompt is obviously not XML. + // To be valid XML, at a minimum: + // - the string would need to be non-null + // - it would need to contain the start of a tag + // - it would need to contain a closing tag, which could include either + int startPos; + if (prompt is null || + (startPos = prompt.IndexOf('<')) < 0 || + (prompt.IndexOf("", startPos + 1, StringComparison.Ordinal) < 0)) + { + return false; + } + + var xmlDocument = new XmlDocument(); + try + { + xmlDocument.LoadXml($"{prompt}"); + } + catch (XmlException) + { + return false; + } + + foreach (XmlNode node in xmlDocument.DocumentElement!.ChildNodes) + { + if (GetPromptNode(node) is { } childPromptNode) + { + (result ??= new()).Add(childPromptNode); + } + } + + return result is not null; + } + + /// + /// Gets an instance of from and child nodes recursively. + /// + /// Instance of class. + private static PromptNode? GetPromptNode(XmlNode node) + { + if (node.NodeType != XmlNodeType.Element) + { + return null; + } + + var nodeContent = node.InnerText.Trim(); + + var promptNode = new PromptNode(node.Name) + { + Content = !string.IsNullOrEmpty(nodeContent) ? nodeContent : null + }; + + if (node.Attributes is not null) + { + foreach (XmlAttribute item in node.Attributes) + { + promptNode.Attributes.Add(item.Name, item.Value); + } + } + + foreach (XmlNode childNode in node.ChildNodes) + { + var childPromptNode = GetPromptNode(childNode); + + if (childPromptNode != null) + { + promptNode.ChildNodes.Add(childPromptNode); + } + } + + return promptNode; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs new file mode 100644 index 000000000000..53acccaac759 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents chat message content return from a service. +/// +public class ChatMessageContent : KernelContent +{ + /// + /// Role of the author of the message + /// + public AuthorRole Role { get; set; } + + /// + /// Content of the message + /// + public string? Content { get; set; } + + /// + /// Chat message content items + /// + public ChatMessageContentItemCollection? Items { get; set; } + + /// + /// The encoding of the text content. + /// + [JsonIgnore] + public Encoding Encoding { get; set; } + + /// + /// Creates a new instance of the class + /// + /// Role of the author of the message + /// Content of the message + /// The model ID used to generate the content + /// Inner content object reference + /// Encoding of the text + /// Dictionary for any additional metadata + [JsonConstructor] + public ChatMessageContent( + AuthorRole role, + string? content, + string? modelId = null, + object? innerContent = null, + Encoding? encoding = null, + IReadOnlyDictionary? metadata = null) + : base(innerContent, modelId, metadata) + { + this.Role = role; + this.Content = content; + this.Encoding = encoding ?? Encoding.UTF8; + } + + /// + /// Creates a new instance of the class + /// + /// Role of the author of the message + /// Instance of with content items + /// The model ID used to generate the content + /// Inner content object reference + /// Encoding of the text + /// Dictionary for any additional metadata + public ChatMessageContent( + AuthorRole role, + ChatMessageContentItemCollection items, + string? modelId = null, + object? innerContent = null, + Encoding? encoding = null, + IReadOnlyDictionary? metadata = null) + : base(innerContent, modelId, metadata) + { + this.Role = role; + this.Encoding = encoding ?? Encoding.UTF8; + this.Items = items; + } + + /// + public override string ToString() + { + return this.Content ?? string.Empty; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/ImageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/ImageContent.cs new file mode 100644 index 000000000000..d53d38c214be --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/ImageContent.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents image content. +/// +public sealed class ImageContent : KernelContent +{ + /// + /// The URI of image. + /// + public Uri? Uri { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// The URI of image. + /// The model ID used to generate the content + /// Inner content + /// Encoding of the text + /// Additional metadata + public ImageContent( + Uri uri, + string? modelId = null, + object? innerContent = null, + Encoding? encoding = null, + IReadOnlyDictionary? metadata = null) + : base(innerContent, modelId, metadata) + { + this.Uri = uri; + } + + /// + public override string ToString() + { + return this.Uri?.ToString() ?? string.Empty; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs new file mode 100644 index 000000000000..2114912fa552 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Base class for all AI non-streaming results +/// +public abstract class KernelContent +{ + /// + /// The inner content representation. Use this to bypass the current abstraction. + /// + /// + /// The usage of this property is considered "unsafe". Use it only if strictly necessary. + /// + [JsonIgnore] + public object? InnerContent { get; } + + /// + /// The model ID used to generate the content. + /// + public string? ModelId { get; } + + /// + /// The metadata associated with the content. + /// + public IReadOnlyDictionary? Metadata { get; } + + /// + /// Initializes a new instance of the class. + /// + /// The inner content representation + /// The model ID used to generate the content + /// Metadata associated with the content + protected KernelContent(object? innerContent, string? modelId = null, IReadOnlyDictionary? metadata = null) + { + this.ModelId = modelId; + this.InnerContent = innerContent; + this.Metadata = metadata; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs new file mode 100644 index 000000000000..25411b15c577 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel; + +/// +/// Abstraction of chat message content chunks when using streaming from interface. +/// +/// +/// Represents a chat message content chunk that was streamed from the remote model. +/// +public class StreamingChatMessageContent : StreamingKernelContent +{ + /// + /// Text associated to the message payload + /// + public string? Content { get; set; } + + /// + /// Role of the author of the message + /// + public AuthorRole? Role { get; set; } + + /// + /// The encoding of the text content. + /// + [JsonIgnore] + public Encoding Encoding { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// Role of the author of the message + /// Content of the message + /// Inner content object reference + /// Choice index + /// The model ID used to generate the content + /// Encoding of the chat + /// Additional metadata + [JsonConstructor] + public StreamingChatMessageContent(AuthorRole? role, string? content, object? innerContent = null, int choiceIndex = 0, string? modelId = null, Encoding? encoding = null, IReadOnlyDictionary? metadata = null) : base(innerContent, choiceIndex, modelId, metadata) + { + this.Role = role; + this.Content = content; + this.Encoding = encoding ?? Encoding.UTF8; + } + + /// + public override string ToString() => this.Content ?? string.Empty; + + /// + public override byte[] ToByteArray() => this.Encoding.GetBytes(this.ToString()); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingKernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingKernelContent.cs new file mode 100644 index 000000000000..0285eafe92c1 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingKernelContent.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a single update to a streaming content. +/// +public abstract class StreamingKernelContent +{ + /// + /// In a scenario of multiple choices per request, this represents zero-based index of the choice in the streaming sequence + /// + public int ChoiceIndex { get; } + + /// + /// The inner content representation. Use this to bypass the current abstraction. + /// + /// + /// The usage of this property is considered "unsafe". Use it only if strictly necessary. + /// + [JsonIgnore] + public object? InnerContent { get; } + + /// + /// The model ID used to generate the content. + /// + public string? ModelId { get; } + + /// + /// The metadata associated with the content. + /// + public IReadOnlyDictionary? Metadata { get; } + + /// + /// Abstract string representation of the chunk in a way it could compose/append with previous chunks. + /// + /// + /// Depending on the nature of the underlying type, this method may be more efficient than . + /// + /// String representation of the chunk + public abstract override string ToString(); + + /// + /// Abstract byte[] representation of the chunk in a way it could be composed/appended with previous chunks. + /// + /// + /// Depending on the nature of the underlying type, this method may be more efficient than . + /// + /// Byte array representation of the chunk + public abstract byte[] ToByteArray(); + + /// + /// Initializes a new instance of the class. + /// + /// Inner content object reference + /// Choice index + /// The model ID used to generate the content. + /// Additional metadata associated with the content. + protected StreamingKernelContent(object? innerContent, int choiceIndex = 0, string? modelId = null, IReadOnlyDictionary? metadata = null) + { + this.ModelId = modelId; + this.InnerContent = innerContent; + this.ChoiceIndex = choiceIndex; + this.Metadata = metadata; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingTextContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingTextContent.cs new file mode 100644 index 000000000000..f13d03820c60 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingTextContent.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel; + +/// +/// Abstraction of text content chunks when using streaming from interface. +/// +public class StreamingTextContent : StreamingKernelContent +{ + /// + /// Text associated to the update + /// + public string? Text { get; } + + /// + /// The encoding of the text content. + /// + [JsonIgnore] + public Encoding Encoding { get; set; } + + /// + /// Create a new instance of the class. + /// + /// Text update + /// Index of the choice + /// The model ID used to generate the content + /// Inner chunk object + /// Encoding of the text + /// Metadata information + [JsonConstructor] + public StreamingTextContent(string? text, int choiceIndex = 0, string? modelId = null, object? innerContent = null, Encoding? encoding = null, IReadOnlyDictionary? metadata = null) : base(innerContent, choiceIndex, modelId, metadata) + { + this.Text = text; + this.Encoding = encoding ?? Encoding.UTF8; + } + + /// + public override string ToString() + { + return this.Text ?? string.Empty; + } + + /// + public override byte[] ToByteArray() + { + return this.Encoding.GetBytes(this.ToString()); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/TextContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/TextContent.cs new file mode 100644 index 000000000000..64e04c37ee6c --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/TextContent.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents text content return from a service. +/// +public sealed class TextContent : KernelContent +{ + /// + /// The text content. + /// + public string? Text { get; set; } + + /// + /// The encoding of the text content. + /// + [JsonIgnore] + public Encoding Encoding { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// Text content + /// The model ID used to generate the content + /// Inner content + /// Encoding of the text + /// Additional metadata + public TextContent(string? text, string? modelId = null, object? innerContent = null, Encoding? encoding = null, IReadOnlyDictionary? metadata = null) : base(innerContent, modelId, metadata) + { + this.Text = text; + this.Encoding = encoding ?? Encoding.UTF8; + } + + /// + public override string ToString() + { + return this.Text ?? string.Empty; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/SKException.cs b/dotnet/src/SemanticKernel.Abstractions/Diagnostics/SKException.cs deleted file mode 100644 index c8c83ddfd33f..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/SKException.cs +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Diagnostics; - -/// -/// Represents the base exception from which all Semantic Kernel exceptions derive. -/// -public class SKException : Exception -{ - /// - /// Initializes a new instance of the class. - /// - public SKException() - { - } - - /// - /// Initializes a new instance of the class with a specified error message. - /// - /// The error message that explains the reason for the exception. - public SKException(string? message) : base(message) - { - } - - /// - /// Initializes a new instance of the class with a specified error message and a reference to the inner exception that is the cause of this exception. - /// - /// The error message that explains the reason for the exception. - /// The exception that is the cause of the current exception, or a null reference if no inner exception is specified. - public SKException(string? message, Exception? innerException) : base(message, innerException) - { - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/Telemetry.cs b/dotnet/src/SemanticKernel.Abstractions/Diagnostics/Telemetry.cs deleted file mode 100644 index 27bfa2006400..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/Telemetry.cs +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Diagnostics; -/// -/// Provides functionality to manage telemetry settings. -/// -public static class Telemetry -{ - /// - /// Environment variable used in Azure to enable/disable telemetry. - /// See: https://learn.microsoft.com/en-us/dotnet/api/azure.core.diagnosticsoptions.istelemetryenabled?view=azure-dotnet - /// - private const string TelemetryDisabledEnvVar = "AZURE_TELEMETRY_DISABLED"; - - /// - /// HTTP User Agent. - /// Note: Azure max length 24 chars. - /// - public const string HttpUserAgent = "Semantic-Kernel"; - - /// - /// Gets a value indicating whether telemetry is enabled or not. - /// Source: https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/core/Azure.Core/src/DiagnosticsOptions.cs - /// Azure customers setting AZURE_TELEMETRY_DISABLED=1 expect telemetry to be disabled. - /// - public static bool IsTelemetryEnabled => !EnvExtensions.GetBoolEnvVar(TelemetryDisabledEnvVar) ?? true; -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/CancelKernelEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/CancelKernelEventArgs.cs new file mode 100644 index 000000000000..ed07decf7f27 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Events/CancelKernelEventArgs.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides an for cancelable operations related +/// to -based operations. +/// +[Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] +public abstract class CancelKernelEventArgs : KernelEventArgs +{ + /// + /// Initializes a new instance of the class. + /// + /// The with which this event is associated. + /// The arguments associated with the operation. + /// A dictionary of metadata associated with the operation. + internal CancelKernelEventArgs(KernelFunction function, KernelArguments arguments, IReadOnlyDictionary? metadata = null) : + base(function, arguments, metadata) + { + } + + /// + /// Gets or sets a value indicating whether the operation associated with + /// the event should be canceled. + /// + /// + /// A cancelable event is raised by the system when it is about to perform an action + /// that can be canceled, such as invoking a . The event + /// handler may set to true to indicate that the operation should + /// be canceled. If there are multiple event handlers registered, subsequent handlers + /// may see and change a value set by a previous handler. The final result is what will + /// be considered by the component raising the event. + /// + public bool Cancel { get; set; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokedEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokedEventArgs.cs index b36674e28a15..0317cb5cf860 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokedEventArgs.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokedEventArgs.cs @@ -1,44 +1,38 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Collections.Generic; -using Microsoft.SemanticKernel.Orchestration; +using System; -namespace Microsoft.SemanticKernel.Events; +namespace Microsoft.SemanticKernel; /// -/// Event arguments available to the Kernel.FunctionInvoked event. +/// Provides a used in events just after a function is invoked. /// -public class FunctionInvokedEventArgs : SKCancelEventArgs +[Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] +public sealed class FunctionInvokedEventArgs : CancelKernelEventArgs { - private Dictionary? _metadata; - - /// - /// Indicates if the function execution should repeat. - /// - public bool IsRepeatRequested => this._repeatRequested; - - /// - /// Metadata for storing additional information about function execution result. - /// - public Dictionary Metadata => this._metadata ??= new(); - /// /// Initializes a new instance of the class. /// - /// Function view details - /// Function result - public FunctionInvokedEventArgs(FunctionView functionView, FunctionResult result) : base(functionView, result.Context) + /// The with which this event is associated. + /// The arguments associated with the operation. + /// The result of the function's invocation. + public FunctionInvokedEventArgs(KernelFunction function, KernelArguments arguments, FunctionResult result) : + base(function, arguments, (result ?? throw new ArgumentNullException(nameof(result))).Metadata) { - this._metadata = result._metadata; + this.Result = result; + this.ResultValue = result.Value; } - /// - /// Repeat the current function invocation. - /// - public void Repeat() + /// Gets the result of the function's invocation. + public FunctionResult Result { get; } + + /// Gets the raw result of the function's invocation. + internal object? ResultValue { get; private set; } + + /// Sets an object to use as the overridden new result for the function's invocation. + /// The value to use as the new result of the function's invocation. + public void SetResultValue(object? value) { - this._repeatRequested = true; + this.ResultValue = value; } - - private bool _repeatRequested; } diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokingEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokingEventArgs.cs index bf8e707b44ab..99396a137bfe 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokingEventArgs.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Events/FunctionInvokingEventArgs.cs @@ -1,35 +1,22 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.Orchestration; +using System; -namespace Microsoft.SemanticKernel.Events; +namespace Microsoft.SemanticKernel; /// -/// Event arguments available to the Kernel.FunctionInvoking event. +/// Provides a used in events just before a function is invoked. /// -public class FunctionInvokingEventArgs : SKCancelEventArgs +[Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] +public sealed class FunctionInvokingEventArgs : CancelKernelEventArgs { - /// - /// Indicates if the function execution should be skipped. - /// - public bool IsSkipRequested => this._skipRequested; - /// /// Initializes a new instance of the class. /// - /// Function view details - /// Context related to the event - public FunctionInvokingEventArgs(FunctionView functionView, SKContext context) : base(functionView, context) + /// The with which this event is associated. + /// The arguments associated with the operation. + public FunctionInvokingEventArgs(KernelFunction function, KernelArguments arguments) : + base(function, arguments, metadata: null) { } - - /// - /// Skip the current function invoking attempt. - /// - public void Skip() - { - this._skipRequested = true; - } - - private bool _skipRequested; } diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/KernelEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/KernelEventArgs.cs new file mode 100644 index 000000000000..6c659dc53f33 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Events/KernelEventArgs.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel; + +/// Provides an for operations related to -based operations. +[Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] +public abstract class KernelEventArgs : EventArgs +{ + /// + /// Initializes a new instance of the class. + /// + /// The with which this event is associated. + /// The arguments associated with the operation. + /// A dictionary of metadata associated with the operation. + internal KernelEventArgs(KernelFunction function, KernelArguments arguments, IReadOnlyDictionary? metadata) + { + Verify.NotNull(function); + Verify.NotNull(arguments); + + this.Function = function; + this.Arguments = arguments; + this.Metadata = metadata; + } + + /// + /// Gets the with which this event is associated. + /// + public KernelFunction Function { get; } + + /// + /// Gets the arguments associated with the operation. + /// + public KernelArguments Arguments { get; } + + /// + /// Gets a dictionary of metadata related to the event. + /// + public IReadOnlyDictionary? Metadata { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/PromptRenderedEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/PromptRenderedEventArgs.cs new file mode 100644 index 000000000000..83f14a76aafd --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Events/PromptRenderedEventArgs.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides a used in events raised just after a prompt has been rendered. +/// +[Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] +public sealed class PromptRenderedEventArgs : CancelKernelEventArgs +{ + private string _renderedPrompt; + + /// + /// Initializes a new instance of the class. + /// + /// The with which this event is associated. + /// The arguments associated with the operation. + /// The prompt that was rendered by the associated operation. + public PromptRenderedEventArgs(KernelFunction function, KernelArguments arguments, string renderedPrompt) : + base(function, arguments, metadata: null) + { + this.RenderedPrompt = renderedPrompt; + } + + /// Gets or sets the rendered prompt. + /// + /// An event handler may view the rendered prompt and change it, if desired. + /// If there are multiple event handlers registered, subsequent handlers may + /// overwrite a value set by a previous handler. The final result is what will + /// be the prompt used by the system. + /// + public string RenderedPrompt + { + get => this._renderedPrompt; + [MemberNotNull(nameof(_renderedPrompt))] + set + { + Verify.NotNullOrWhiteSpace(value); + this._renderedPrompt = value; + } + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/PromptRenderingEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/PromptRenderingEventArgs.cs new file mode 100644 index 000000000000..b808a6e8c293 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Events/PromptRenderingEventArgs.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides a used in events raised just before a prompt is rendered. +/// +[Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] +public sealed class PromptRenderingEventArgs : KernelEventArgs +{ + /// + /// Initializes a new instance of the class. + /// + /// The with which this event is associated. + /// The arguments associated with the operation. + public PromptRenderingEventArgs(KernelFunction function, KernelArguments arguments) : + base(function, arguments, metadata: null) + { + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/SKCancelEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/SKCancelEventArgs.cs deleted file mode 100644 index a3cca0bd5f16..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Events/SKCancelEventArgs.cs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Events; -#pragma warning disable CA1001 // Types that own disposable fields should be disposable - -/// -/// Base arguments for cancellable events. -/// -public abstract class SKCancelEventArgs : SKEventArgs -{ - private readonly CancellationTokenSource _cancelTokenSource = new(); - - internal SKCancelEventArgs(FunctionView functionView, SKContext context) : base(functionView, context) - { - } - - /// - /// Cancellation token to be used to cancel further execution. - /// - public CancellationToken CancelToken => this._cancelTokenSource.Token; - - /// - /// Cancel all further execution. - /// - public void Cancel() - { - this._cancelTokenSource.Cancel(); - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Events/SKEventArgs.cs b/dotnet/src/SemanticKernel.Abstractions/Events/SKEventArgs.cs deleted file mode 100644 index e04f0d7bbc03..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Events/SKEventArgs.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Events; - -/// -/// Base arguments for events. -/// -public abstract class SKEventArgs : EventArgs -{ - /// - /// Initializes a new instance of the class. - /// - /// Function view details - /// Context related to the event - internal SKEventArgs(FunctionView functionView, SKContext context) - { - Verify.NotNull(context); - Verify.NotNull(functionView); - - this.FunctionView = functionView; - this.SKContext = context; - } - - /// - /// Function view details. - /// - public FunctionView FunctionView { get; } - - /// - /// Context related to the event. - /// - public SKContext SKContext { get; } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionFilterContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionFilterContext.cs new file mode 100644 index 000000000000..e75093b7a678 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionFilterContext.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Base class with data related to function invocation. +/// +[Experimental("SKEXP0004")] +public abstract class FunctionFilterContext +{ + /// + /// Initializes a new instance of the class. + /// + /// The with which this filter is associated. + /// The arguments associated with the operation. + /// A dictionary of metadata associated with the operation. + internal FunctionFilterContext(KernelFunction function, KernelArguments arguments, IReadOnlyDictionary? metadata) + { + Verify.NotNull(function); + Verify.NotNull(arguments); + + this.Function = function; + this.Arguments = arguments; + this.Metadata = metadata; + } + + /// + /// Gets the with which this filter is associated. + /// + public KernelFunction Function { get; } + + /// + /// Gets the arguments associated with the operation. + /// + public KernelArguments Arguments { get; } + + /// + /// Gets a dictionary of metadata associated with the operation. + /// + public IReadOnlyDictionary? Metadata { get; } + + /// + /// Gets or sets a value indicating whether the operation associated with + /// the filter should be canceled. + /// + /// + /// The filter may set to true to indicate that the operation should + /// be canceled. If there are multiple filters registered, subsequent filters + /// may see and change a value set by a previous filter. The final result is what will + /// be considered by the component that triggers filter. + /// + public bool Cancel { get; set; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvokedContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvokedContext.cs new file mode 100644 index 000000000000..beefaab2b9b1 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvokedContext.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Class with data related to function after invocation. +/// +[Experimental("SKEXP0004")] +public sealed class FunctionInvokedContext : FunctionFilterContext +{ + /// + /// Initializes a new instance of the class. + /// + /// The arguments associated with the operation. + /// The result of the function's invocation. + public FunctionInvokedContext(KernelArguments arguments, FunctionResult result) + : base(result.Function, arguments, (result ?? throw new ArgumentNullException(nameof(result))).Metadata) + { + this.Result = result; + this.ResultValue = result.Value; + } + + /// + /// Gets the result of the function's invocation. + /// + public FunctionResult Result { get; } + + /// + /// Gets the raw result of the function's invocation. + /// + internal object? ResultValue { get; private set; } + + /// + /// Sets an object to use as the overridden new result for the function's invocation. + /// + /// The value to use as the new result of the function's invocation. + public void SetResultValue(object? value) + { + this.ResultValue = value; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvokingContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvokingContext.cs new file mode 100644 index 000000000000..47067ded0389 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvokingContext.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Class with data related to function before invocation. +/// +[Experimental("SKEXP0004")] +public sealed class FunctionInvokingContext : FunctionFilterContext +{ + /// + /// Initializes a new instance of the class. + /// + /// The with which this filter is associated. + /// The arguments associated with the operation. + public FunctionInvokingContext(KernelFunction function, KernelArguments arguments) + : base(function, arguments, metadata: null) + { + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Function/IFunctionFilter.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/IFunctionFilter.cs new file mode 100644 index 000000000000..8914f10ca675 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/IFunctionFilter.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Interface for filtering actions during function invocation. +/// +[Experimental("SKEXP0004")] +public interface IFunctionFilter +{ + /// + /// Method which is executed before function invocation. + /// + /// Data related to function before invocation. + void OnFunctionInvoking(FunctionInvokingContext context); + + /// + /// Method which is executed after function invocation. + /// + /// Data related to function after invocation. + void OnFunctionInvoked(FunctionInvokedContext context); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/IPromptFilter.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/IPromptFilter.cs new file mode 100644 index 000000000000..824fc18dd817 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/IPromptFilter.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Interface for filtering actions during prompt rendering. +/// +[Experimental("SKEXP0004")] +public interface IPromptFilter +{ + /// + /// Method which is executed before prompt rendering. + /// + /// Data related to prompt before rendering. + void OnPromptRendering(PromptRenderingContext context); + + /// + /// Method which is executed after prompt rendering. + /// + /// Data related to prompt after rendering. + void OnPromptRendered(PromptRenderedContext context); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptFilterContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptFilterContext.cs new file mode 100644 index 000000000000..7b4090404afe --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptFilterContext.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Base class with data related to prompt rendering. +/// +[Experimental("SKEXP0004")] +public abstract class PromptFilterContext +{ + /// + /// Initializes a new instance of the class. + /// + /// The with which this filter is associated. + /// The arguments associated with the operation. + /// A dictionary of metadata associated with the operation. + internal PromptFilterContext(KernelFunction function, KernelArguments arguments, IReadOnlyDictionary? metadata) + { + Verify.NotNull(function); + Verify.NotNull(arguments); + + this.Function = function; + this.Arguments = arguments; + this.Metadata = metadata; + } + + /// + /// Gets the with which this filter is associated. + /// + public KernelFunction Function { get; } + + /// + /// Gets the arguments associated with the operation. + /// + public KernelArguments Arguments { get; } + + /// + /// Gets a dictionary of metadata associated with the operation. + /// + public IReadOnlyDictionary? Metadata { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderedContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderedContext.cs new file mode 100644 index 000000000000..e14e685c9181 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderedContext.cs @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Class with data related to prompt after rendering. +/// +[Experimental("SKEXP0004")] +public sealed class PromptRenderedContext : PromptFilterContext +{ + private string _renderedPrompt; + + /// + /// Initializes a new instance of the class. + /// + /// The with which this filter is associated. + /// The arguments associated with the operation. + /// The prompt that was rendered by the associated operation. + public PromptRenderedContext(KernelFunction function, KernelArguments arguments, string renderedPrompt) + : base(function, arguments, metadata: null) + { + this.RenderedPrompt = renderedPrompt; + } + + /// + /// Gets or sets a value indicating whether the operation associated with + /// the filter should be canceled. + /// + /// + /// The filter may set to true to indicate that the operation should + /// be canceled. If there are multiple filters registered, subsequent filters + /// may see and change a value set by a previous filter. The final result is what will + /// be considered by the component that triggers filter. + /// + public bool Cancel { get; set; } + + /// + /// Gets or sets the rendered prompt. + /// + /// + /// The filter may view the rendered prompt and change it, if desired. + /// If there are multiple filters registered, subsequent filters may + /// overwrite a value set by a previous filter. The final result is what will + /// be the prompt used by the system. + /// + public string RenderedPrompt + { + get => this._renderedPrompt; + [MemberNotNull(nameof(_renderedPrompt))] + set + { + Verify.NotNullOrWhiteSpace(value); + this._renderedPrompt = value; + } + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderingContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderingContext.cs new file mode 100644 index 000000000000..93ac57b32151 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderingContext.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Class with data related to prompt before rendering. +/// +[Experimental("SKEXP0004")] +public sealed class PromptRenderingContext : PromptFilterContext +{ + /// + /// Initializes a new instance of the class. + /// + /// The with which this filter is associated. + /// The arguments associated with the operation. + public PromptRenderingContext(KernelFunction function, KernelArguments arguments) + : base(function, arguments, metadata: null) + { + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/FromKernelServicesAttribute.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/FromKernelServicesAttribute.cs new file mode 100644 index 000000000000..ad9c5ba33581 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/FromKernelServicesAttribute.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel; + +/// +/// Specifies that an argument to a should be supplied from the associated +/// 's rather than from . +/// +[AttributeUsage(AttributeTargets.Parameter, AllowMultiple = false)] +public sealed class FromKernelServicesAttribute : Attribute +{ + /// Initializes the attribute. + public FromKernelServicesAttribute() { } + + /// Initializes the attribute with the specified service key. + /// The optional service key to use when resolving a service. + public FromKernelServicesAttribute(object? serviceKey) => this.ServiceKey = serviceKey; + + /// Gets the key to use when searching . + public object? ServiceKey { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/FunctionResult.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/FunctionResult.cs new file mode 100644 index 000000000000..b852ef9e32d6 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/FunctionResult.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents the result of a invocation. +/// +public sealed class FunctionResult +{ + /// + /// Initializes a new instance of the class. + /// + /// The whose result is represented by this instance. + /// The resulting object of the function's invocation. + /// The culture configured on the that executed the function. + /// Metadata associated with the function's execution + public FunctionResult(KernelFunction function, object? value = null, CultureInfo? culture = null, IReadOnlyDictionary? metadata = null) + { + Verify.NotNull(function); + + this.Function = function; + this.Value = value; + this.Culture = culture ?? CultureInfo.InvariantCulture; + this.Metadata = metadata; + } + + /// + /// Gets the whose result is represented by this instance. + /// + public KernelFunction Function { get; } + + /// + /// Gets any metadata associated with the function's execution. + /// + public IReadOnlyDictionary? Metadata { get; } + + /// + /// Gets the of the function's result. + /// + /// + /// This or a base type is the type expected to be passed as the generic + /// argument to . + /// + public Type? ValueType => this.Value?.GetType(); + + /// + /// Returns function result value. + /// + /// Target type for result value casting. + /// Thrown when it's not possible to cast result value to . + public T? GetValue() + { + if (this.Value is null) + { + return default; + } + + if (this.Value is T typedResult) + { + return typedResult; + } + + if (this.Value is KernelContent content) + { + if (typeof(T) == typeof(string)) + { + return (T?)(object?)content.ToString(); + } + + if (content.InnerContent is T innerContent) + { + return innerContent; + } + } + + throw new InvalidCastException($"Cannot cast {this.Value.GetType()} to {typeof(T)}"); + } + + /// + public override string ToString() => + InternalTypeConverter.ConvertToString(this.Value, this.Culture) ?? string.Empty; + + /// + /// Function result object. + /// + internal object? Value { get; } + + /// + /// The culture configured on the Kernel that executed the function. + /// + internal CultureInfo Culture { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/FunctionView.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/FunctionView.cs deleted file mode 100644 index a6d0818c9f1c..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/FunctionView.cs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// A function view is a read-only representation of a function. -/// -/// Name of the function. The name is used by the function collection and in prompt templates e.g. {{pluginName.functionName}} -/// Name of the plugin containing the function. The name is used by the function collection and in prompt templates e.g. {{pluginName.functionName}} -/// Function description. The description is used in combination with embeddings when searching relevant functions. -/// Optional list of function parameters -public sealed record FunctionView( - string Name, - string PluginName, - string Description = "", - IReadOnlyList? Parameters = null) -{ - /// - /// List of function parameters - /// - public IReadOnlyList Parameters { get; init; } = Parameters ?? Array.Empty(); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/IFunctionCollection.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/IFunctionCollection.cs deleted file mode 100644 index dc6fad98e3cf..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/IFunctionCollection.cs +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Function collection interface. -/// -[SuppressMessage("Naming", "CA1711:Identifiers should not have incorrect suffix")] -public interface IFunctionCollection : IReadOnlyFunctionCollection -{ - /// - /// Add a function to the collection - /// - /// Function delegate - /// Self instance - IFunctionCollection AddFunction(ISKFunction functionInstance); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/IReadOnlyFunctionCollection.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/IReadOnlyFunctionCollection.cs deleted file mode 100644 index 67360a0cd9c0..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/IReadOnlyFunctionCollection.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel.Diagnostics; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Read-only function collection interface. -/// -[SuppressMessage("Naming", "CA1711:Identifiers should not have incorrect suffix")] -public interface IReadOnlyFunctionCollection -{ - /// - /// Gets the function stored in the collection. - /// - /// The name of the function to retrieve. - /// The function retrieved from the collection. - /// The specified function could not be found in the collection. - ISKFunction GetFunction(string functionName); - - /// - /// Gets the function stored in the collection. - /// - /// The name of the plugin with which the function is associated. - /// The name of the function to retrieve. - /// The function retrieved from the collection. - /// The specified function could not be found in the collection. - ISKFunction GetFunction(string pluginName, string functionName); - - /// - /// Check if a function is available in the current context, and return it. - /// - /// The name of the function to retrieve. - /// When this method returns, the function that was retrieved if one with the specified name was found; otherwise, . - /// if the function was found; otherwise, . - bool TryGetFunction(string functionName, [NotNullWhen(true)] out ISKFunction? availableFunction); - - /// - /// Check if a function is available in the current context, and return it. - /// - /// The name of the plugin with which the function is associated. - /// The name of the function to retrieve. - /// When this method returns, the function that was retrieved if one with the specified name was found; otherwise, . - /// if the function was found; otherwise, . - bool TryGetFunction(string pluginName, string functionName, [NotNullWhen(true)] out ISKFunction? availableFunction); - - /// - /// Get a snapshot all registered functions details, minus the delegates - /// - /// An object containing all the functions details - IReadOnlyList GetFunctionViews(); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/IReadOnlyKernelPluginCollection.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/IReadOnlyKernelPluginCollection.cs new file mode 100644 index 000000000000..51392e5038d8 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/IReadOnlyKernelPluginCollection.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// Provides a read-only collection of s. +public interface IReadOnlyKernelPluginCollection : IReadOnlyCollection +{ + /// Gets a plugin from the collection by name. + /// The name of the plugin. + /// The plugin. + KernelPlugin this[string name] { get; } + + /// Gets a plugin from the collection by name. + /// The name of the plugin. + /// The plugin if found in the collection. + /// true if the collection contains the plugin; otherwise, false. + bool TryGetPlugin(string name, [NotNullWhen(true)] out KernelPlugin? plugin); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/ISKFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/ISKFunction.cs deleted file mode 100644 index 02565b767265..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/ISKFunction.cs +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Semantic Kernel callable function interface -/// -public interface ISKFunction -{ - /// - /// Name of the function. The name is used by the function collection and in prompt templates e.g. {{pluginName.functionName}} - /// - string Name { get; } - - /// - /// Name of the plugin containing the function. The name is used by the function collection and in prompt templates e.g. {{pluginName.functionName}} - /// - string PluginName { get; } - - /// - /// Function description. The description is used in combination with embeddings when searching relevant functions. - /// - string Description { get; } - - /// - /// Returns a description of the function, including parameters. - /// - /// An instance of describing the function - FunctionView Describe(); - - /// - /// Invoke the . - /// - /// SK context - /// LLM completion settings (for semantic functions only) - /// The updated context, potentially a new one if context switching is implemented. - /// The to monitor for cancellation requests. The default is . - Task InvokeAsync( - SKContext context, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default); - - #region Obsolete - - /// - /// AI service settings - /// - [Obsolete("Use PromptTemplateConfig.ModelSettings instead. This will be removed in a future release.")] - AIRequestSettings? RequestSettings { get; } - - /// - /// Set the AI service used by the semantic function, passing a factory method. - /// The factory allows to lazily instantiate the client and to properly handle its disposal. - /// - /// AI service factory - /// Self instance - [Obsolete("Use implementation of IAIServiceConfigurationProvider instead. This will be removed in a future release.")] - ISKFunction SetAIService(Func serviceFactory); - - /// - /// Set the AI completion settings used with LLM requests - /// - /// LLM completion settings - /// Self instance - [Obsolete("Use implementation of IAIServiceConfigurationProvider instead. This will be removed in a future release.")] - ISKFunction SetAIConfiguration(AIRequestSettings? requestSettings); - - /// - /// Name of the plugin containing the function. The name is used by the function collection and in prompt templates e.g. {{skillName.functionName}} - /// - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use ISKFunction.SkillName instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - string SkillName { get; } - - /// - /// Set the default function collection to use when the function is invoked - /// without a context or with a context that doesn't have a collection. - /// - /// Kernel's function collection - /// Self instance - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - ISKFunction SetDefaultSkillCollection(IReadOnlyFunctionCollection skills); - - /// - /// Set the default function collection to use when the function is invoked - /// without a context or with a context that doesn't have a collection. - /// - /// Kernel's function collection - /// Self instance - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - ISKFunction SetDefaultFunctionCollection(IReadOnlyFunctionCollection functions); - - /// - /// Whether the function is defined using a prompt template. - /// IMPORTANT: native functions might use semantic functions internally, - /// so when this property is False, executing the function might still involve AI calls. - /// - [Obsolete("Kernel no longer differentiates between Semantic and Native functions. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - bool IsSemantic { get; } - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs new file mode 100644 index 000000000000..4f77ab473909 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; + +#pragma warning disable CA1710 // Identifiers should have correct suffix + +namespace Microsoft.SemanticKernel; + +/// +/// Provides a collection of arguments for operations such as 's InvokeAsync +/// and 's RenderAsync. +/// +/// +/// A is a dictionary of argument names and values. It also carries a +/// , accessible via the property. +/// +public sealed class KernelArguments : IDictionary, IReadOnlyDictionary +{ + /// Dictionary of name/values for all the arguments in the instance. + private readonly Dictionary _arguments; + + /// + /// Initializes a new instance of the class with the specified AI execution settings. + /// + /// The prompt execution settings. + public KernelArguments(PromptExecutionSettings? executionSettings = null) + { + this._arguments = new(StringComparer.OrdinalIgnoreCase); + + if (executionSettings is not null) + { + this.ExecutionSettings = new Dictionary() { { PromptExecutionSettings.DefaultServiceId, executionSettings } }; + } + } + + /// + /// Initializes a new instance of the class that contains elements copied from the specified . + /// + /// The whose elements are copied the new . + /// The prompt execution settings. + /// + /// If is non-null, it is used as the for this new instance. + /// Otherwise, if the source is a , its are used. + /// + public KernelArguments(IDictionary source, Dictionary? executionSettings = null) + { + Verify.NotNull(source); + + this._arguments = new(source, StringComparer.OrdinalIgnoreCase); + this.ExecutionSettings = executionSettings ?? (source as KernelArguments)?.ExecutionSettings; + } + + /// + /// Gets or sets the prompt execution settings. + /// + public IReadOnlyDictionary? ExecutionSettings { get; set; } + + /// + /// Gets the number of arguments contained in the . + /// + public int Count => this._arguments.Count; + + /// Adds the specified argument name and value to the . + /// The name of the argument to add. + /// The value of the argument to add. + /// is null. + /// An argument with the same name already exists in the . + public void Add(string name, object? value) + { + Verify.NotNull(name); + this._arguments.Add(name, value); + } + + /// Removes the argument value with the specified name from the . + /// The name of the argument value to remove. + /// is null. + public bool Remove(string name) + { + Verify.NotNull(name); + return this._arguments.Remove(name); + } + + /// Removes all arguments names and values from the . + /// + /// This does not affect the property. To clear it as well, set it to null. + /// + public void Clear() => this._arguments.Clear(); + + /// Determines whether the contains an argument with the specified name. + /// The name of the argument to locate. + /// true if the arguments contains an argument with the specified named; otherwise, false. + /// is null. + public bool ContainsName(string name) + { + Verify.NotNull(name); + return this._arguments.ContainsKey(name); + } + + /// Gets the value associated with the specified argument name. + /// The name of the argument value to get. + /// + /// When this method returns, contains the value associated with the specified name, + /// if the name is found; otherwise, null. + /// + /// true if the arguments contains an argument with the specified name; otherwise, false. + /// is null. + public bool TryGetValue(string name, out object? value) + { + Verify.NotNull(name); + return this._arguments.TryGetValue(name, out value); + } + + /// Gets or sets the value associated with the specified argument name. + /// The name of the argument value to get or set. + /// is null. + public object? this[string name] + { + get + { + Verify.NotNull(name); + return this._arguments[name]; + } + set + { + Verify.NotNull(name); + this._arguments[name] = value; + } + } + + /// Gets an of all of the arguments' names. + public ICollection Names => this._arguments.Keys; + + /// Gets an of all of the arguments' values. + public ICollection Values => this._arguments.Values; + + #region Interface implementations + /// + ICollection IDictionary.Keys => this._arguments.Keys; + + /// + IEnumerable IReadOnlyDictionary.Keys => this._arguments.Keys; + + /// + IEnumerable IReadOnlyDictionary.Values => this._arguments.Values; + + /// + bool ICollection>.IsReadOnly => false; + + /// + object? IReadOnlyDictionary.this[string key] => this._arguments[key]; + + /// + object? IDictionary.this[string key] + { + get => this._arguments[key]; + set => this._arguments[key] = value; + } + + /// + void IDictionary.Add(string key, object? value) => this._arguments.Add(key, value); + + /// + bool IDictionary.ContainsKey(string key) => this._arguments.ContainsKey(key); + + /// + bool IDictionary.Remove(string key) => this._arguments.Remove(key); + + /// + bool IDictionary.TryGetValue(string key, out object? value) => this._arguments.TryGetValue(key, out value); + + /// + void ICollection>.Add(KeyValuePair item) => this._arguments.Add(item.Key, item.Value); + + /// + bool ICollection>.Contains(KeyValuePair item) => ((ICollection>)this._arguments).Contains(item); + + /// + void ICollection>.CopyTo(KeyValuePair[] array, int arrayIndex) => ((ICollection>)this._arguments).CopyTo(array, arrayIndex); + + /// + bool ICollection>.Remove(KeyValuePair item) => this._arguments.Remove(item.Key); + + /// + IEnumerator> IEnumerable>.GetEnumerator() => this._arguments.GetEnumerator(); + + /// + IEnumerator IEnumerable.GetEnumerator() => this._arguments.GetEnumerator(); + + /// + bool IReadOnlyDictionary.ContainsKey(string key) => this._arguments.ContainsKey(key); + + /// + bool IReadOnlyDictionary.TryGetValue(string key, out object? value) => this._arguments.TryGetValue(key, out value); + #endregion +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs new file mode 100644 index 000000000000..30cace107c82 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs @@ -0,0 +1,394 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a function that can be invoked as part of a Semantic Kernel workload. +/// +public abstract class KernelFunction +{ + /// The measurement tag name for the function name. + private protected const string MeasurementFunctionTagName = "semantic_kernel.function.name"; + + /// The measurement tag name for the function error type. + private protected const string MeasurementErrorTagName = "error.type"; + + /// for function-related activities. + private static readonly ActivitySource s_activitySource = new("Microsoft.SemanticKernel"); + + /// for function-related metrics. + private protected static readonly Meter s_meter = new("Microsoft.SemanticKernel"); + + /// to record function invocation duration. + private static readonly Histogram s_invocationDuration = s_meter.CreateHistogram( + name: "semantic_kernel.function.invocation.duration", + unit: "s", + description: "Measures the duration of a function’s execution"); + + /// to record function streaming duration. + /// + /// As this metric spans the full async iterator's lifecycle, it is inclusive of any time + /// spent in the consuming code between MoveNextAsync calls on the enumerator. + /// + private static readonly Histogram s_streamingDuration = s_meter.CreateHistogram( + name: "semantic_kernel.function.streaming.duration", + unit: "s", + description: "Measures the duration of a function’s streaming execution"); + + /// + /// Gets the name of the function. + /// + /// + /// The name is used anywhere the function needs to be identified, such as in plans describing what functions + /// should be invoked when, or as part of lookups in a plugin's function collection. Function names are generally + /// handled in an ordinal case-insensitive manner. + /// + public string Name => this.Metadata.Name; + + /// + /// Gets a description of the function. + /// + /// + /// The description may be supplied to a model in order to elaborate on the function's purpose, + /// in case it may be beneficial for the model to recommend invoking the function. + /// + public string Description => this.Metadata.Description; + + /// + /// Gets the metadata describing the function. + /// + /// An instance of describing the function + public KernelFunctionMetadata Metadata { get; init; } + + /// + /// Gets the prompt execution settings. + /// + internal IReadOnlyDictionary? ExecutionSettings { get; } + + /// + /// Initializes a new instance of the class. + /// + /// A name of the function to use as its . + /// The description of the function to use as its . + /// The metadata describing the parameters to the function. + /// The metadata describing the return parameter of the function. + /// + /// The to use with the function. These will apply unless they've been + /// overridden by settings passed into the invocation of the function. + /// + internal KernelFunction(string name, string description, IReadOnlyList parameters, KernelReturnParameterMetadata? returnParameter = null, Dictionary? executionSettings = null) + { + Verify.NotNull(name); + Verify.ParametersUniqueness(parameters); + + this.Metadata = new KernelFunctionMetadata(name) + { + Description = description, + Parameters = parameters, + ReturnParameter = returnParameter ?? KernelReturnParameterMetadata.Empty, + }; + this.ExecutionSettings = executionSettings; + } + + /// + /// Invokes the . + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The result of the function's execution. + /// is null. + /// The 's invocation was canceled. + public async Task InvokeAsync( + Kernel kernel, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + + using var activity = s_activitySource.StartActivity(this.Name); + ILogger logger = kernel.LoggerFactory.CreateLogger(this.Name) ?? NullLogger.Instance; + + // Ensure arguments are initialized. + arguments ??= new KernelArguments(); + logger.LogFunctionInvoking(this.Name); + logger.LogFunctionArguments(arguments); + + TagList tags = new() { { MeasurementFunctionTagName, this.Name } }; + long startingTimestamp = Stopwatch.GetTimestamp(); + FunctionResult? functionResult = null; + try + { + // Quick check for cancellation after logging about function start but before doing any real work. + cancellationToken.ThrowIfCancellationRequested(); + + // Invoke pre-invocation event handler. If it requests cancellation, throw. +#pragma warning disable CS0618 // Events are deprecated + var invokingEventArgs = kernel.OnFunctionInvoking(this, arguments); +#pragma warning restore CS0618 // Events are deprecated + + // Invoke pre-invocation filter. If it requests cancellation, throw. + var invokingContext = kernel.OnFunctionInvokingFilter(this, arguments); + + if (invokingEventArgs?.Cancel is true) + { + throw new OperationCanceledException($"A {nameof(Kernel)}.{nameof(Kernel.FunctionInvoking)} event handler requested cancellation before function invocation."); + } + + if (invokingContext?.Cancel is true) + { + throw new OperationCanceledException("A function filter requested cancellation before function invocation."); + } + + // Invoke the function. + functionResult = await this.InvokeCoreAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + // Invoke the post-invocation event handler. If it requests cancellation, throw. +#pragma warning disable CS0618 // Events are deprecated + var invokedEventArgs = kernel.OnFunctionInvoked(this, arguments, functionResult); +#pragma warning restore CS0618 // Events are deprecated + + // Invoke the post-invocation filter. If it requests cancellation, throw. + var invokedContext = kernel.OnFunctionInvokedFilter(arguments, functionResult); + + if (invokedEventArgs is not null) + { + // Apply any changes from the event handlers to final result. + functionResult = new FunctionResult(this, invokedEventArgs.ResultValue, functionResult.Culture, invokedEventArgs.Metadata ?? functionResult.Metadata); + } + + if (invokedContext is not null) + { + // Apply any changes from the function filters to final result. + functionResult = new FunctionResult(this, invokedContext.ResultValue, functionResult.Culture, invokedContext.Metadata ?? functionResult.Metadata); + } + + if (invokedEventArgs?.Cancel is true) + { + throw new OperationCanceledException($"A {nameof(Kernel)}.{nameof(Kernel.FunctionInvoked)} event handler requested cancellation after function invocation."); + } + + if (invokedContext?.Cancel is true) + { + throw new OperationCanceledException("A function filter requested cancellation after function invocation."); + } + + logger.LogFunctionInvokedSuccess(this.Name); + logger.LogFunctionResultValue(functionResult.Value); + + return functionResult; + } + catch (Exception ex) + { + HandleException(ex, logger, activity, this, kernel, arguments, functionResult, ref tags); + throw; + } + finally + { + // Record the invocation duration metric and log the completion. + TimeSpan duration = new((long)((Stopwatch.GetTimestamp() - startingTimestamp) * (10_000_000.0 / Stopwatch.Frequency))); + s_invocationDuration.Record(duration.TotalSeconds, in tags); + logger.LogFunctionComplete(duration.TotalSeconds); + } + } + + /// + /// Invokes the . + /// + /// Specifies the type of the result value of the function. + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The result of the function's execution, cast to . + /// is null. + /// The 's invocation was canceled. + /// The function's result could not be cast to . + public async Task InvokeAsync( + Kernel kernel, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + FunctionResult result = await this.InvokeAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + return result.GetValue(); + } + + /// + /// Invokes the and streams its results. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// An for streaming the results of the function's invocation. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public IAsyncEnumerable InvokeStreamingAsync( + Kernel kernel, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) => + this.InvokeStreamingAsync(kernel, arguments, cancellationToken); + + /// + /// Invokes the and streams its results. + /// + /// Specifies the type of the result values of the function. + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// An for streaming the results of the function's invocation. + /// is null. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public async IAsyncEnumerable InvokeStreamingAsync( + Kernel kernel, + KernelArguments? arguments = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + + using var activity = s_activitySource.StartActivity(this.Name); + ILogger logger = kernel.LoggerFactory.CreateLogger(this.Name) ?? NullLogger.Instance; + + arguments ??= new KernelArguments(); + logger.LogFunctionStreamingInvoking(this.Name); + logger.LogFunctionArguments(arguments); + + TagList tags = new() { { MeasurementFunctionTagName, this.Name } }; + long startingTimestamp = Stopwatch.GetTimestamp(); + try + { + IAsyncEnumerator enumerator; + try + { + // Quick check for cancellation after logging about function start but before doing any real work. + cancellationToken.ThrowIfCancellationRequested(); + + // Invoke pre-invocation event handler. If it requests cancellation, throw. +#pragma warning disable CS0618 // Events are deprecated + var invokingEventArgs = kernel.OnFunctionInvoking(this, arguments); +#pragma warning restore CS0618 // Events are deprecated + + // Invoke pre-invocation filter. If it requests cancellation, throw. + var invokingContext = kernel.OnFunctionInvokingFilter(this, arguments); + + if (invokingEventArgs?.Cancel is true) + { + throw new OperationCanceledException($"A {nameof(Kernel)}.{nameof(Kernel.FunctionInvoking)} event handler requested cancellation before function invocation."); + } + + if (invokingContext?.Cancel is true) + { + throw new OperationCanceledException("A function filter requested cancellation before function invocation."); + } + + // Invoke the function and get its streaming enumerator. + enumerator = this.InvokeStreamingCoreAsync(kernel, arguments, cancellationToken).GetAsyncEnumerator(cancellationToken); + + // yielding within a try/catch isn't currently supported, so we break out of the try block + // in order to then wrap the actual MoveNextAsync in its own try/catch and allow the yielding + // to be lifted to be outside of the try/catch. + } + catch (Exception ex) + { + HandleException(ex, logger, activity, this, kernel, arguments, result: null, ref tags); + throw; + } + + // Ensure we clean up after the enumerator. + await using (enumerator.ConfigureAwait(false)) + { + while (true) + { + try + { + // Move to the next streaming result. + if (!await enumerator.MoveNextAsync().ConfigureAwait(false)) + { + break; + } + } + catch (Exception ex) + { + HandleException(ex, logger, activity, this, kernel, arguments, result: null, ref tags); + throw; + } + + // Yield the next streaming result. + yield return enumerator.Current; + } + } + + // The FunctionInvoked hook and filter are not used when streaming. + } + finally + { + // Record the streaming duration metric and log the completion. + TimeSpan duration = new((long)((Stopwatch.GetTimestamp() - startingTimestamp) * (10_000_000.0 / Stopwatch.Frequency))); + s_streamingDuration.Record(duration.TotalSeconds, in tags); + logger.LogFunctionStreamingComplete(duration.TotalSeconds); + } + } + + /// + /// Invokes the . + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments to pass to the function's invocation, including any . + /// The updated context, potentially a new one if context switching is implemented. + /// The to monitor for cancellation requests. The default is . + protected abstract ValueTask InvokeCoreAsync( + Kernel kernel, + KernelArguments arguments, + CancellationToken cancellationToken); + + /// + /// Invokes the and streams its results. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments to pass to the function's invocation, including any . + /// The updated context, potentially a new one if context switching is implemented. + /// The to monitor for cancellation requests. The default is . + protected abstract IAsyncEnumerable InvokeStreamingCoreAsync(Kernel kernel, + KernelArguments arguments, + CancellationToken cancellationToken); + + /// Handles special-cases for exception handling when invoking a function. + private static void HandleException( + Exception ex, + ILogger logger, + Activity? activity, + KernelFunction kernelFunction, + Kernel kernel, + KernelArguments arguments, + FunctionResult? result, + ref TagList tags) + { + // Log the exception and add its type to the tags that'll be included with recording the invocation duration. + tags.Add(MeasurementErrorTagName, ex.GetType().FullName); + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + logger.LogFunctionError(ex, ex.Message); + + // If the exception is an OperationCanceledException, wrap it in a KernelFunctionCanceledException + // in order to convey additional details about what function was canceled. This is particularly + // important for cancellation that occurs in response to the FunctionInvoked event, in which case + // there may be a result from a successful function invocation, and we want that result to be + // visible to a consumer if that's needed. + if (ex is OperationCanceledException cancelEx) + { + throw new KernelFunctionCanceledException(kernel, kernelFunction, arguments, result, cancelEx); + } + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionAttribute.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionAttribute.cs new file mode 100644 index 000000000000..927c68b70840 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionAttribute.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Globalization; +using System.Reflection; +using System.Threading; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel; + +/// +/// Specifies that a method on a class imported as a plugin should be included as a in the resulting . +/// +/// +/// +/// When the system imports functions from an object, it searches for all public methods tagged with this attribute. +/// If a method is not tagged with this attribute, it may still be imported directly via a +/// or referencing the method directly. +/// +/// +/// A description of the method should be supplied using the . +/// That description will be used both with LLM prompts and embedding comparisons; the quality of +/// the description affects the planner's ability to reason about complex tasks. A +/// should also be provided on each parameter to provide a description of the parameter suitable for consumption +/// by an LLM or embedding. +/// +/// +/// Functions may have any number of parameters. In general, arguments to parameters are supplied via the +/// used to invoke the function, with the arguments matched by name to the parameters of the method. If no argument of the given name +/// is present, but a default value was specified in the method's definition, that default value will be used. If the argument value in +/// is not of the same type as the parameter, the system will attempt to convert the value to the parameter's +/// type using a . +/// +/// +/// However, parameters of the following types are treated specially and are supplied from a source other than from the arguments dictionary: +/// +/// +/// +/// The supplied when invoking the function. +/// +/// +/// +/// The supplied when invoking the function. +/// +/// +/// +/// The that represents this function being invoked. +/// +/// +/// +/// The supplied when invoking the function. +/// +/// +/// or +/// The result of from the used when invoking the function. +/// +/// +/// or +/// The result of from the (or an created from it) used when invoking the function. +/// +/// +/// +/// The result of from the used when invoking the function. +/// +/// +/// +/// +/// Arguments may also be fulfilled from the associated 's service provider. If a parameter is attributed +/// with , the system will attempt to resolve the parameter by querying the service provider for a service of the +/// parameter's type. If the service provider does not contain a service of the parameter's type and the parameter is not optional, the invocation will fail. +/// +/// +/// If no value can be derived from any of these means for all parameters, the invocation will fail. +/// +/// +[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)] +public sealed class KernelFunctionAttribute : Attribute +{ + /// Initializes the attribute. + public KernelFunctionAttribute() { } + + /// Initializes the attribute. + /// The name to use for the function. + public KernelFunctionAttribute(string? name) => this.Name = name; + + /// Gets the function's name. + /// If null, a name will based on the name of the attributed method will be used. + public string? Name { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionCanceledException.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionCanceledException.cs new file mode 100644 index 000000000000..be3c5b0f7659 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionCanceledException.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +#pragma warning disable RCS1194 // Implement exception constructors. + +namespace Microsoft.SemanticKernel; + +/// +/// Provides an -derived exception type +/// that's thrown from a invocation when a +/// event handler (e.g. ) requests cancellation. +/// +public sealed class KernelFunctionCanceledException : OperationCanceledException +{ + /// Initializes the exception instance. + /// The passed to the invocation of . + /// The whose invocation was canceled. + /// The arguments collection supplied to the invocation of . + /// + /// The result of the invocation, potentially modified by the event handler, + /// if cancellation was requested after the function's successful completion. + /// + /// The exception that is the cause of the current exception. + public KernelFunctionCanceledException( + Kernel kernel, KernelFunction function, KernelArguments arguments, + FunctionResult? functionResult, Exception? innerException = null) : + base($"The invocation of function '{function.Name}' was canceled.", innerException, (innerException as OperationCanceledException)?.CancellationToken ?? default) + { + this.Kernel = kernel; + this.Function = function; + this.Arguments = arguments; + this.FunctionResult = functionResult; + } + + /// Gets the that was passed to the invocation of . + public Kernel Kernel { get; } + + /// Gets the whose invocation was canceled. + public KernelFunction Function { get; } + + /// Gets the arguments collection that was supplied to the invocation of . + public KernelArguments Arguments { get; } + + /// Gets the result of the if it had completed execution before cancellation was requested. + public FunctionResult? FunctionResult { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionLogMessages.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionLogMessages.cs new file mode 100644 index 000000000000..e45d81112b03 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionLogMessages.cs @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft. All rights reserved. + +#pragma warning disable SYSLIB1006 // Multiple logging methods cannot use the same event id within a class + +using System; +using System.Text.Json; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel; + +/// +/// Extensions for logging invocations. +/// This extension uses the to +/// generate logging code at compile time to achieve optimized code. +/// +internal static partial class KernelFunctionLogMessages +{ + /// + /// Logs invocation of a . + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Function {FunctionName} invoking.")] + public static partial void LogFunctionInvoking( + this ILogger logger, + string functionName); + + /// + /// Logs arguments to a . + /// The action provides the benefit of caching the template parsing result for better performance. + /// And the public method is a helper to serialize the arguments. + /// + private static readonly Action s_logFunctionArguments = + LoggerMessage.Define( + logLevel: LogLevel.Trace, // Sensitive data, logging as trace, disabled by default + eventId: 0, + "Function arguments: {Arguments}"); + public static void LogFunctionArguments(this ILogger logger, KernelArguments arguments) + { + if (logger.IsEnabled(LogLevel.Trace)) + { + try + { + var jsonString = JsonSerializer.Serialize(arguments); + s_logFunctionArguments(logger, jsonString, null); + } + catch (NotSupportedException ex) + { + s_logFunctionArguments(logger, "Failed to serialize arguments to Json", ex); + } + } + } + + /// + /// Logs successful invocation of a . + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Function {FunctionName} succeeded.")] + public static partial void LogFunctionInvokedSuccess(this ILogger logger, string functionName); + + /// + /// Logs result of a . + /// The action provides the benefit of caching the template parsing result for better performance. + /// And the public method is a helper to serialize the result. + /// + private static readonly Action s_logFunctionResultValue = + LoggerMessage.Define( + logLevel: LogLevel.Trace, // Sensitive data, logging as trace, disabled by default + eventId: 0, + "Function result: {ResultValue}"); + public static void LogFunctionResultValue(this ILogger logger, object? resultValue) + { + if (logger.IsEnabled(LogLevel.Trace)) + { + try + { + var jsonString = resultValue?.GetType() == typeof(string) + ? resultValue.ToString() + : JsonSerializer.Serialize(resultValue); + s_logFunctionResultValue(logger, jsonString ?? string.Empty, null); + } + catch (NotSupportedException ex) + { + s_logFunctionResultValue(logger, "Failed to serialize result value to Json", ex); + } + } + } + + /// + /// Logs error. + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Error, + Message = "Function failed. Error: {Message}")] + public static partial void LogFunctionError( + this ILogger logger, + Exception exception, + string message); + + /// + /// Logs complete. + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Function completed. Duration: {Duration}s")] + public static partial void LogFunctionComplete( + this ILogger logger, + double duration); + + /// + /// Logs streaming invocation of a . + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Function {FunctionName} streaming.")] + public static partial void LogFunctionStreamingInvoking( + this ILogger logger, + string functionName); + + /// + /// Logs streaming complete. + /// + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Function streaming completed. Duration: {Duration}s.")] + public static partial void LogFunctionStreamingComplete( + this ILogger logger, + double duration); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs new file mode 100644 index 000000000000..7f6d3796217d --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunctionMetadata.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides read-only metadata for a . +/// +public sealed class KernelFunctionMetadata +{ + /// The name of the function. + private string _name = string.Empty; + /// The description of the function. + private string _description = string.Empty; + /// The function's parameters. + private IReadOnlyList _parameters = Array.Empty(); + /// The function's return parameter. + private KernelReturnParameterMetadata? _returnParameter; + + /// Initializes the for a function with the specified name. + /// The name of the function. + /// The was null. + /// An invalid name was supplied. + public KernelFunctionMetadata(string name) + { + this.Name = name; + } + + /// Initializes a as a copy of another . + /// The was null. + /// + /// This creates a shallow clone of . The new instance's and + /// properties will return the same objects as in the original instance. + /// + public KernelFunctionMetadata(KernelFunctionMetadata metadata) + { + Verify.NotNull(metadata); + this.Name = metadata.Name; + this.PluginName = metadata.PluginName; + this.Description = metadata.Description; + this.Parameters = metadata.Parameters; + this.ReturnParameter = metadata.ReturnParameter; + } + + /// Gets the name of the function. + public string Name + { + get => this._name; + init + { + Verify.NotNull(value); + Verify.ValidFunctionName(value); + this._name = value; + } + } + + /// Gets the name of the plugin containing the function. + public string? PluginName { get; init; } + + /// Gets a description of the function, suitable for use in describing the purpose to a model. + [AllowNull] + public string Description + { + get => this._description; + init => this._description = value ?? string.Empty; + } + + /// Gets the metadata for the parameters to the function. + /// If the function has no parameters, the returned list will be empty. + public IReadOnlyList Parameters + { + get => this._parameters; + init + { + Verify.NotNull(value); + this._parameters = value; + } + } + + /// Gets parameter metadata for the return parameter. + /// If the function has no return parameter, the returned value will be a default instance of a . + public KernelReturnParameterMetadata ReturnParameter + { + get => this._returnParameter ??= KernelReturnParameterMetadata.Empty; + init + { + Verify.NotNull(value); + this._returnParameter = value; + } + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelJsonSchema.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelJsonSchema.cs new file mode 100644 index 000000000000..c7e74f2ac935 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelJsonSchema.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; +using Json.Schema; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel; + +/// Represents JSON Schema for describing types used in s. +[JsonConverter(typeof(KernelJsonSchema.JsonConverter))] +public sealed class KernelJsonSchema +{ + /// Converter for serializing/deserializing JsonSchema instances. + private static readonly SchemaJsonConverter s_jsonSchemaConverter = new(); + /// Serialization settings for + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { MaxDepth = 128 }; + /// The schema stored as a string. + private string? _schemaAsString; + + /// Parses a JSON Schema for a parameter type. + /// The JSON Schema as a string. + /// A parsed , or null if is null or empty. + internal static KernelJsonSchema? ParseOrNull(string? jsonSchema) => + !string.IsNullOrEmpty(jsonSchema) ? new(JsonSerializer.Deserialize(jsonSchema!, s_jsonSerializerOptions)) : + null; + + /// Parses a JSON Schema for a parameter type. + /// The JSON Schema as a string. + /// A parsed . + /// is null. + /// The JSON is invalid. + public static KernelJsonSchema Parse(string jsonSchema) => + new(JsonSerializer.SerializeToElement(JsonSchema.FromText(jsonSchema, s_jsonSerializerOptions), s_jsonSerializerOptions)); + + /// Parses a JSON Schema for a parameter type. + /// The JSON Schema as a sequence of UTF16 chars. + /// A parsed . + /// The JSON is invalid. + public static KernelJsonSchema Parse(ReadOnlySpan jsonSchema) => + new(JsonSerializer.SerializeToElement(JsonSerializer.Deserialize(jsonSchema, s_jsonSerializerOptions), s_jsonSerializerOptions)); + + /// Parses a JSON Schema for a parameter type. + /// The JSON Schema as a sequence of UTF8 bytes. + /// A parsed . + /// The JSON is invalid. + public static KernelJsonSchema Parse(ReadOnlySpan utf8JsonSchema) => + new(JsonSerializer.SerializeToElement(JsonSerializer.Deserialize(utf8JsonSchema, s_jsonSerializerOptions), s_jsonSerializerOptions)); + + /// Initializes a new instance from the specified . + /// The schema to be stored. + /// + /// The is not validated, which is why this constructor is internal. + /// All callers must ensure JSON Schema validity. + /// + internal KernelJsonSchema(JsonElement jsonSchema) => + this.RootElement = jsonSchema; + + /// Gets a representing the root element of the schema. + public JsonElement RootElement { get; } + + /// Gets the JSON Schema as a string. + public override string ToString() => this._schemaAsString ??= JsonSerializer.Serialize(this.RootElement, JsonOptionsCache.WriteIndented); + + /// Converter for reading/writing the schema. + public sealed class JsonConverter : JsonConverter + { + /// + public override KernelJsonSchema? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) => + new(JsonSerializer.SerializeToElement(s_jsonSchemaConverter.Read(ref reader, typeToConvert, options))); + + /// + public override void Write(Utf8JsonWriter writer, KernelJsonSchema value, JsonSerializerOptions options) => + value.RootElement.WriteTo(writer); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelParameterMetadata.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelParameterMetadata.cs new file mode 100644 index 000000000000..8bd41fa6e660 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelParameterMetadata.cs @@ -0,0 +1,174 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using Json.Schema; +using Json.Schema.Generation; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides read-only metadata for a parameter. +/// +public sealed class KernelParameterMetadata +{ + /// The name of the parameter. + private string _name = string.Empty; + /// The description of the parameter. + private string _description = string.Empty; + /// The default value of the parameter. + private object? _defaultValue; + /// The .NET type of the parameter. + private Type? _parameterType; + /// The schema of the parameter, potentially lazily-initialized. + private InitializedSchema? _schema; + + /// Initializes the for a parameter with the specified name. + /// The name of the parameter. + /// The was null. + /// The was empty or composed entirely of whitespace. + public KernelParameterMetadata(string name) => this.Name = name; + + /// Initializes a as a copy of another . + /// The was null. + /// This creates a shallow clone of . + public KernelParameterMetadata(KernelParameterMetadata metadata) + { + Verify.NotNull(metadata); + this._name = metadata._name; + this._description = metadata._description; + this._defaultValue = metadata._defaultValue; + this.IsRequired = metadata.IsRequired; + this._parameterType = metadata._parameterType; + this._schema = metadata._schema; + } + + /// Gets the name of the function. + public string Name + { + get => this._name; + init + { + Verify.NotNullOrWhiteSpace(value); + this._name = value; + } + } + + /// Gets a description of the function, suitable for use in describing the purpose to a model. + [AllowNull] + public string Description + { + get => this._description; + init + { + string newDescription = value ?? string.Empty; + if (value != this._description && this._schema?.Inferred is true) + { + this._schema = null; + } + this._description = newDescription; + } + } + + /// Gets the default value of the parameter. + public object? DefaultValue + { + get => this._defaultValue; + init + { + if (value != this._defaultValue && this._schema?.Inferred is true) + { + this._schema = null; + } + this._defaultValue = value; + } + } + + /// Gets whether the parameter is required. + public bool IsRequired { get; init; } + + /// Gets the .NET type of the parameter. + public Type? ParameterType + { + get => this._parameterType; + init + { + if (value != this._parameterType && this._schema?.Inferred is true) + { + this._schema = null; + } + this._parameterType = value; + } + } + + /// Gets a JSON Schema describing the parameter's type. + public KernelJsonSchema? Schema + { + get => (this._schema ??= InferSchema(this.ParameterType, this.DefaultValue, this.Description)).Schema; + init => this._schema = value is null ? null : new() { Inferred = false, Schema = value }; + } + + /// Infers a JSON schema from a and description. + /// The parameter type. If null, no schema can be inferred. + /// The parameter's default value, if any. + /// The parameter description. If null, it won't be included in the schema. + internal static InitializedSchema InferSchema(Type? parameterType, object? defaultValue, string? description) + { + KernelJsonSchema? schema = null; + + // If no schema was provided but a type was provided, try to generate a schema from the type. + if (parameterType is not null) + { + // Type must be usable as a generic argument to be used with JsonSchemaBuilder. + bool invalidAsGeneric = + // from RuntimeType.ThrowIfTypeNeverValidGenericArgument +#if NET_8_OR_GREATER + parameterType.IsFunctionPointer || +#endif + parameterType.IsPointer || + parameterType.IsByRef || + parameterType == typeof(void); + + if (!invalidAsGeneric) + { + try + { + if (InternalTypeConverter.ConvertToString(defaultValue) is string stringDefault && !string.IsNullOrWhiteSpace(stringDefault)) + { + bool needsSpace = !string.IsNullOrWhiteSpace(description); + description += $"{(needsSpace ? " " : "")}(default value: {stringDefault})"; + } + + var builder = new JsonSchemaBuilder().FromType(parameterType); + if (!string.IsNullOrWhiteSpace(description)) + { + builder = builder.Description(description!); + } + schema = new KernelJsonSchema(JsonSerializer.SerializeToElement(builder.Build())); + } + catch (ArgumentException) + { + // Invalid type; ignore, and leave schema as null. + // This should be exceedingly rare, as we checked for all known category of + // problematic types above. If it becomes more common that schema creation + // could fail expensively, we'll want to track whether inference was already + // attempted and avoid doing so on subsequent accesses if it was. + } + } + } + + // Always return an instance so that subsequent reads of the Schema don't try to regenerate + // it again. If inference failed, we just leave the Schema null in the instance. + return new InitializedSchema { Inferred = true, Schema = schema }; + } + + /// A wrapper for a and whether it was inferred or set explicitly by the user. + internal sealed class InitializedSchema + { + /// true if the was inferred; false if it was set explicitly by the user. + public bool Inferred { get; set; } + /// The schema, if one exists. + public KernelJsonSchema? Schema { get; set; } + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs new file mode 100644 index 000000000000..30ecd85a9309 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; + +#pragma warning disable CA1716 // Identifiers should not match keywords + +namespace Microsoft.SemanticKernel; + +/// Represents a plugin that may be registered with a . +/// +/// A plugin is a named read-only collection of functions. There is a many-to-many relationship between +/// plugins and functions: a plugin may contain any number of functions, and a function may +/// exist in any number of plugins. +/// +[DebuggerDisplay("Name = {Name}, Functions = {FunctionCount}")] +[DebuggerTypeProxy(typeof(KernelPlugin.TypeProxy))] +public abstract class KernelPlugin : IEnumerable +{ + /// Initializes the new plugin from the provided name, description, and function collection. + /// The name for the plugin. + /// A description of the plugin. + /// is null. + /// is an invalid plugin name. + protected KernelPlugin(string name, string? description = null) + { + Verify.ValidPluginName(name); + + this.Name = name; + this.Description = !string.IsNullOrWhiteSpace(description) ? description! : ""; + } + + /// Gets the name of the plugin. + public string Name { get; } + + /// Gets a description of the plugin. + public string Description { get; } + + /// Gets the function in the plugin with the specified name. + /// The name of the function. + /// The function. + /// The plugin does not contain a function with the specified name. + public KernelFunction this[string functionName] => + this.TryGetFunction(functionName, out KernelFunction? function) ? function : throw new KeyNotFoundException($"The plugin does not contain a function with the specified name. Plugin name - '{this.Name}', function name - '{functionName}'."); + + /// Gets whether the plugin contains a function with the specified name. + /// The name of the function. + /// true if the plugin contains the specified function; otherwise, false. + public bool Contains(string functionName) + { + Verify.NotNull(functionName); + + return this.TryGetFunction(functionName, out _); + } + + /// Gets whether the plugin contains a function. + /// The function. + /// true if the plugin contains the specified function; otherwise, false. + public bool Contains(KernelFunction function) + { + Verify.NotNull(function); + + return this.Contains(function.Name); + } + + /// Gets the number of functions in this plugin. + public abstract int FunctionCount { get; } + + /// Finds a function in the plugin by name. + /// The name of the function to find. + /// If the plugin contains the requested function, the found function instance; otherwise, null. + /// true if the function was found in the plugin; otherwise, false. + public abstract bool TryGetFunction(string name, [NotNullWhen(true)] out KernelFunction? function); + + /// Gets a collection of instances, one for every function in this plugin. + /// A list of metadata over every function in this plugin. + public IList GetFunctionsMetadata() + { + List metadata = new(this.FunctionCount); + foreach (KernelFunction function in this) + { + metadata.Add(new KernelFunctionMetadata(function.Metadata) { PluginName = this.Name }); + } + + return metadata; + } + + /// + public abstract IEnumerator GetEnumerator(); + + /// + IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); + + /// Debugger type proxy for the kernel plugin. + private sealed class TypeProxy + { + private readonly KernelPlugin _plugin; + + public TypeProxy(KernelPlugin plugin) => this._plugin = plugin; + + public string Name => this._plugin.Name; + + public string Description => this._plugin.Description; + + public KernelFunction[] Functions => this._plugin.OrderBy(f => f.Name, StringComparer.OrdinalIgnoreCase).ToArray(); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPluginCollection.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPluginCollection.cs new file mode 100644 index 000000000000..a1671a99cbd8 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPluginCollection.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; + +#pragma warning disable RCS1168 // Parameter name differs from base name. +#pragma warning disable CA1725 // Parameter names should match base declaration + +namespace Microsoft.SemanticKernel; + +/// Provides a collection of s. +/// +/// All plugins stored in the collection must have a unique, ordinal case-insensitive name. +/// All name lookups are performed using ordinal case-insensitive comparisons. +/// +[DebuggerDisplay("Count = {Count}")] +[DebuggerTypeProxy(typeof(KernelPluginCollection.TypeProxy))] +public sealed class KernelPluginCollection : ICollection, IReadOnlyKernelPluginCollection +{ + /// The underlying dictionary of plugins. + private readonly Dictionary _plugins; + + /// Initializes a collection of plugins. + public KernelPluginCollection() => this._plugins = new(StringComparer.OrdinalIgnoreCase); + + /// Initializes a collection of plugins that contains all of the plugins from the provided collection. + /// The initial collection of plugins to populate this collection. + /// is null. + /// contains multiple plugins with the same name. + public KernelPluginCollection(IEnumerable plugins) + { + Verify.NotNull(plugins); + + if (plugins is KernelPluginCollection existing) + { + this._plugins = new(existing._plugins, StringComparer.OrdinalIgnoreCase); + } + else + { + this._plugins = new(plugins is ICollection c ? c.Count : 0, StringComparer.OrdinalIgnoreCase); + this.AddRange(plugins); + } + } + + /// Gets the number of plugins in the collection. + public int Count => this._plugins.Count; + + /// Adds the plugin to the plugin collection. + /// The plugin to add. + /// is null. + /// . is null. + /// A plugin with the same name already exists in the collection. + public void Add(KernelPlugin plugin) + { + Verify.NotNull(plugin); + + string name = plugin.Name; + Verify.NotNull(name, "plugin.Name"); + + this._plugins.Add(name, plugin); + } + + /// Adds a collection of plugins to this plugin collection. + /// The plugins to add. + /// is null. + /// contains a null plugin. + /// A plugin in has a null . + /// A plugin with the same name as a plugin in already exists in the collection. + public void AddRange(IEnumerable plugins) + { + Verify.NotNull(plugins); + + foreach (KernelPlugin plugin in plugins) + { + this.Add(plugin); + } + } + + /// Removes the specified plugin from the collection. + /// The plugin to remove. + /// true if was in the collection and could be removed; otherwise, false. + public bool Remove(KernelPlugin plugin) + { + Verify.NotNull(plugin); + + if (this._plugins.TryGetValue(plugin.Name, out KernelPlugin? existing) && existing == plugin) + { + return this._plugins.Remove(plugin.Name); + } + + return false; + } + + /// Removes all plugins from the collection. + public void Clear() => this._plugins.Clear(); + + /// Gets an enumerable of all plugins stored in this collection. + public IEnumerator GetEnumerator() => this._plugins.Values.GetEnumerator(); + + /// Gets an enumerable of all plugins stored in this collection. + IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); + + /// Gets whether the collection contains the specified plugin. + /// The plugin. + /// true if the collection contains the plugin; otherwise, false. + public bool Contains(KernelPlugin plugin) + { + Verify.NotNull(plugin); + + return this._plugins.TryGetValue(plugin.Name, out KernelPlugin? existing) && plugin == existing; + } + + /// + public KernelPlugin this[string name] + { + get + { + if (!this.TryGetPlugin(name, out KernelPlugin? plugin)) + { + throw new KeyNotFoundException($"Plugin {name} not found."); + } + + return plugin; + } + } + + /// Gets a plugin from the collection by name. + /// The name of the plugin. + /// The plugin if found in the collection. + /// true if the collection contains the plugin; otherwise, false. + public bool TryGetPlugin(string name, [NotNullWhen(true)] out KernelPlugin? plugin) => + this._plugins.TryGetValue(name, out plugin); + + void ICollection.CopyTo(KernelPlugin[] array, int arrayIndex) => + ((IDictionary)this._plugins).Values.CopyTo(array, arrayIndex); + + bool ICollection.IsReadOnly => false; + + /// Debugger type proxy for nicer interaction with the collection in a debugger. + private sealed class TypeProxy + { + private readonly KernelPluginCollection _collection; + + public TypeProxy(KernelPluginCollection collection) => this._collection = collection; + + [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] + public KernelPlugin[] Plugins => this._collection._plugins.Values.ToArray(); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPluginExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPluginExtensions.cs new file mode 100644 index 000000000000..e334e4d00fe7 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPluginExtensions.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// Provides extension methods for working with s and collections of them. +public static class KernelPluginExtensions +{ + /// Gets whether the plugins collection contains a plugin with the specified name. + /// The plugins collections. + /// The name of the plugin. + /// true if the plugins contains a plugin with the specified name; otherwise, false. + public static bool Contains(this IReadOnlyKernelPluginCollection plugins, string pluginName) + { + Verify.NotNull(plugins); + Verify.NotNull(pluginName); + + return plugins.TryGetPlugin(pluginName, out _); + } + + /// Gets a function from the collection by plugin and function names. + /// The collection. + /// The name of the plugin storing the function. + /// The name of the function. + /// The function from the collection. + public static KernelFunction GetFunction(this IReadOnlyKernelPluginCollection plugins, string? pluginName, string functionName) + { + Verify.NotNull(plugins); + Verify.NotNull(functionName); + + if (!TryGetFunction(plugins, pluginName, functionName, out KernelFunction? function)) + { + throw new KeyNotFoundException($"The plugin collection does not contain a plugin and/or function with the specified names. Plugin name - '{pluginName}', function name - '{functionName}'."); + } + + return function; + } + + /// Gets a function from the collection by plugin and function names. + /// The collection. + /// The name of the plugin storing the function. + /// The name of the function. + /// The function, if found. + /// true if the specified plugin was found and the specified function was found in that plugin; otherwise, false. + /// + /// If is null or entirely whitespace, all plugins are searched for a function with the specified name, + /// and the first one found is returned. + /// + public static bool TryGetFunction(this IReadOnlyKernelPluginCollection plugins, string? pluginName, string functionName, [NotNullWhen(true)] out KernelFunction? func) + { + Verify.NotNull(plugins); + Verify.NotNull(functionName); + + if (string.IsNullOrWhiteSpace(pluginName)) + { + foreach (KernelPlugin p in plugins) + { + if (p.TryGetFunction(functionName, out func)) + { + return true; + } + } + } + else + { + if (plugins.TryGetPlugin(pluginName!, out KernelPlugin? plugin) && + plugin.TryGetFunction(functionName, out func)) + { + return true; + } + } + + func = null; + return false; + } + + /// Gets a collection of instances, one for every function in every plugin in the plugins collection. + /// The plugins collection. + /// A list of metadata over every function in the plugins collection + public static IList GetFunctionsMetadata(this IEnumerable plugins) + { + Verify.NotNull(plugins); + + List metadata = new(); + foreach (KernelPlugin plugin in plugins) + { + metadata.AddRange(plugin.GetFunctionsMetadata()); + } + + return metadata; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelReturnParameterMetadata.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelReturnParameterMetadata.cs new file mode 100644 index 000000000000..b7d086117d6f --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelReturnParameterMetadata.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using static Microsoft.SemanticKernel.KernelParameterMetadata; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides read-only metadata for a 's return parameter. +/// +public sealed class KernelReturnParameterMetadata +{ + internal static readonly KernelReturnParameterMetadata Empty = new(); + + /// The description of the return parameter. + private string _description = string.Empty; + /// The .NET type of the return parameter. + private Type? _parameterType; + /// The schema of the return parameter, potentially lazily-initialized. + private KernelParameterMetadata.InitializedSchema? _schema; + + /// Initializes the . + public KernelReturnParameterMetadata() { } + + /// Initializes a as a copy of another . + public KernelReturnParameterMetadata(KernelReturnParameterMetadata metadata) + { + this._description = metadata._description; + this._parameterType = metadata._parameterType; + this._schema = metadata._schema; + } + + /// Gets a description of the return parameter, suitable for use in describing the purpose to a model. + [AllowNull] + public string Description + { + get => this._description; + init + { + string newDescription = value ?? string.Empty; + if (value != this._description && this._schema?.Inferred is true) + { + this._schema = null; + } + this._description = newDescription; + } + } + + /// Gets the .NET type of the return parameter. + public Type? ParameterType + { + get => this._parameterType; + init + { + if (value != this._parameterType && this._schema?.Inferred is true) + { + this._schema = null; + } + this._parameterType = value; + } + } + + /// Gets a JSON Schema describing the type of the return parameter. + public KernelJsonSchema? Schema + { + get => (this._schema ??= InferSchema(this.ParameterType, defaultValue: null, this.Description)).Schema; + init => this._schema = value is null ? null : new() { Inferred = false, Schema = value }; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/NullReadOnlyFunctionCollection.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/NullReadOnlyFunctionCollection.cs deleted file mode 100644 index f89db9e471fa..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/NullReadOnlyFunctionCollection.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel.Diagnostics; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -[DebuggerDisplay("Count = 0")] -internal sealed class NullReadOnlyFunctionCollection : IReadOnlyFunctionCollection -{ - public static readonly NullReadOnlyFunctionCollection Instance = new(); - - /// - public ISKFunction GetFunction(string functionName) - { - throw new SKException($"Function not available: {functionName}"); - } - - /// - public ISKFunction GetFunction(string pluginName, string functionName) - { - throw new SKException($"Function not available: {pluginName}.{functionName}"); - } - - /// - public bool TryGetFunction(string functionName, [NotNullWhen(true)] out ISKFunction? availableFunction) - { - availableFunction = null; - return false; - } - - /// - public bool TryGetFunction(string pluginName, string functionName, [NotNullWhen(true)] out ISKFunction? availableFunction) - { - availableFunction = null; - return false; - } - - /// - public IReadOnlyList GetFunctionViews() - { - return Array.Empty(); - } - - private NullReadOnlyFunctionCollection() - { - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/ParameterView.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/ParameterView.cs deleted file mode 100644 index 909f3321b99f..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/ParameterView.cs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Class used to copy and export data about parameters for planner and related scenarios. -/// -/// Parameter name. The name must be alphanumeric (underscore is the only special char allowed). -/// Parameter description -/// Default parameter value, if not provided -/// Parameter type. -/// Whether the parameter is required. -public sealed record ParameterView( - string Name, - string? Description = null, - string? DefaultValue = null, - ParameterViewType? Type = null, - bool? IsRequired = null); diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/ParameterViewType.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/ParameterViewType.cs deleted file mode 100644 index ff89b8d69060..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/ParameterViewType.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -#pragma warning disable CA1720 // Identifier contains type name - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Represents the type for the parameter view. -/// -public readonly record struct ParameterViewType(string Name) -{ - /// - /// Represents the "string" parameter view type. - /// - public static readonly ParameterViewType String = new("string"); - - /// - /// Represents the "number" parameter view type. - /// - public static readonly ParameterViewType Number = new("number"); - - /// - /// Represents the "object" parameter view type. - /// - public static readonly ParameterViewType Object = new("object"); - - /// - /// Represents the "array" parameter view type. - /// - public static readonly ParameterViewType Array = new("array"); - - /// - /// Represents the "boolean" parameter view type. - /// - public static readonly ParameterViewType Boolean = new("boolean"); - - /// - /// Gets the name of the parameter view type. - /// - public string Name { get; init; } = !string.IsNullOrEmpty(Name) ? Name : throw new ArgumentNullException(nameof(Name)); - - /// - /// Returns a string representation of the parameter view type. - /// - /// A string representing the parameter view type. - public override string ToString() => this.Name; - - /// - /// Returns the hash code for this instance. - /// - /// A hash code for the current instance. - public override int GetHashCode() - { - return this.Name.GetHashCode(); - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponse.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponse.cs new file mode 100644 index 000000000000..d4e4b5790f4b --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponse.cs @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; + +namespace Microsoft.SemanticKernel; + +/// +/// The REST API operation response. +/// +[TypeConverterAttribute(typeof(RestApiOperationResponseConverter))] +public sealed class RestApiOperationResponse +{ + /// + /// Gets the content of the response. + /// + public object Content { get; } + + /// + /// Gets the content type of the response. + /// + public string ContentType { get; } + + /// + /// The expected schema of the response as advertised in the OpenAPI operation. + /// + public KernelJsonSchema? ExpectedSchema { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// The content of the response. + /// The content type of the response. + /// The schema against which the response body should be validated. + public RestApiOperationResponse(object content, string contentType, KernelJsonSchema? expectedSchema = null) + { + this.Content = content; + this.ContentType = contentType; + this.ExpectedSchema = expectedSchema; + } + + /// + public override string ToString() => this.Content?.ToString() ?? string.Empty; +} diff --git a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationResponseConverter.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponseConverter.cs similarity index 82% rename from dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationResponseConverter.cs rename to dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponseConverter.cs index 23ef33812bc0..b600019c71d7 100644 --- a/dotnet/src/Functions/Functions.OpenAPI/Model/RestApiOperationResponseConverter.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponseConverter.cs @@ -4,7 +4,7 @@ using System.ComponentModel; using System.Globalization; -namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; +namespace Microsoft.SemanticKernel; /// /// Converts a object of type to string type. @@ -12,13 +12,13 @@ namespace Microsoft.SemanticKernel.Functions.OpenAPI.Model; public class RestApiOperationResponseConverter : TypeConverter { /// - public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) + public override bool CanConvertTo(ITypeDescriptorContext? context, Type? destinationType) { return destinationType == typeof(string) || base.CanConvertTo(context, destinationType); } /// - public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) + public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) { // Convert object content to a string based on the type of the `Content` property. // More granular conversion logic can be built based on the value of the `ContentType` property, if needed. diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/SKFunctionAttribute.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/SKFunctionAttribute.cs deleted file mode 100644 index adad4b5d357a..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/SKFunctionAttribute.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Specifies that a method is a native function available to Semantic Kernel. -/// -/// -/// -/// When the kernel imports native functions, it searches all public methods tagged with this attribute. -/// If a method is not tagged with this attribute, it may still be imported directly via a -/// or referencing the method directly. -/// -/// -/// A description of the method should be supplied using the . -/// That description will be used both with LLM prompts and embedding comparisons; the quality of -/// the description affects the planner's ability to reason about complex tasks. A -/// should also be provided on each parameter to provide a description of the parameter suitable for consumption -/// by an LLM or embedding. -/// -/// -/// Functions may have any number of parameters. Parameters of type and -/// are filled in from the corresponding members of the ; -/// itself may also be a parameter. A given native function may declare at -/// most one parameter of each of these types. All other parameters must be of a primitive .NET type or -/// a type attributed with . Functions may return a , -/// , any primitive .NET type or a type attributed with , -/// or a or of such a type. -/// -/// -/// Parameters are populated based on a context variable of the same name, unless an is -/// used to override which context variable is targeted. If no context variable of the given name is present, but -/// a default value was specified via either a or an optional value in the siguatre, -/// that default value is used instead. If no default value was specified and it's the first parameter, the "input" -/// context variable will be used. If no value is available, the invocation will fail. -/// -/// -/// For non-string parameters, the context variable value is automatically converted to the appropriate type to be passed -/// in based on the for the specified type. Similarly, return values are automatically converted -/// back to strings via the associated . -/// -/// -[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)] -public sealed class SKFunctionAttribute : Attribute -{ - /// Initializes the attribute. - public SKFunctionAttribute() - { - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/SKNameAttribute.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/SKNameAttribute.cs deleted file mode 100644 index 99f8ef585683..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/SKNameAttribute.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// Overrides the default name used by a Semantic Kernel native function name or parameter. -/// -/// By default, the method or parameter's name is used. If the method returns a task and ends with -/// "Async", by default the suffix is removed. This attribute can be used to override such heuristics. -/// -[AttributeUsage(AttributeTargets.Method | AttributeTargets.Parameter, AllowMultiple = false)] -public sealed class SKNameAttribute : Attribute -{ - /// - /// Initializes the attribute with the name to use. - /// - /// The name. - public SKNameAttribute(string name) => this.Name = name; - - /// Gets the specified name. - public string Name { get; } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Http/HttpHandlerFactory{THandler}.cs b/dotnet/src/SemanticKernel.Abstractions/Http/HttpHandlerFactory{THandler}.cs deleted file mode 100644 index c8c9836e6022..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Http/HttpHandlerFactory{THandler}.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Microsoft.Extensions.Logging; - -namespace Microsoft.SemanticKernel.Http; - -/// -/// A factory for creating instances of . -/// -/// -public abstract class HttpHandlerFactory : IDelegatingHandlerFactory where THandler : DelegatingHandler -{ - /// - /// Creates a new instance of . - /// - /// - /// - public virtual DelegatingHandler Create(ILoggerFactory? loggerFactory = null) - { - return (DelegatingHandler)Activator.CreateInstance(typeof(THandler), loggerFactory); - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/HttpOperationException.cs b/dotnet/src/SemanticKernel.Abstractions/Http/HttpOperationException.cs similarity index 96% rename from dotnet/src/SemanticKernel.Abstractions/Diagnostics/HttpOperationException.cs rename to dotnet/src/SemanticKernel.Abstractions/Http/HttpOperationException.cs index 58581fedf3cb..d09215267987 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Diagnostics/HttpOperationException.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Http/HttpOperationException.cs @@ -3,7 +3,7 @@ using System; using System.Net; -namespace Microsoft.SemanticKernel.Diagnostics; +namespace Microsoft.SemanticKernel; /// /// Represents an exception specific to HTTP operations. @@ -13,7 +13,7 @@ public class HttpOperationException : Exception /// /// Initializes a new instance of the class. /// - public HttpOperationException() : base() + public HttpOperationException() { } diff --git a/dotnet/src/SemanticKernel.Abstractions/Http/IDelegatingHandlerFactory.cs b/dotnet/src/SemanticKernel.Abstractions/Http/IDelegatingHandlerFactory.cs deleted file mode 100644 index fbb19a834015..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Http/IDelegatingHandlerFactory.cs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Microsoft.Extensions.Logging; - -namespace Microsoft.SemanticKernel.Http; - -/// -/// Factory for creating instances. -/// -public interface IDelegatingHandlerFactory -{ - /// - /// Creates a new instance with the specified logger. - /// - /// The to use for logging. If null, no logging will be performed. - /// A new instance. - DelegatingHandler Create(ILoggerFactory? loggerFactory); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Http/NullHttpHandler.cs b/dotnet/src/SemanticKernel.Abstractions/Http/NullHttpHandler.cs deleted file mode 100644 index 3ed5113f26ae..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Http/NullHttpHandler.cs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; - -namespace Microsoft.SemanticKernel.Http; - -/// -/// A http retry handler that does nothing. -/// -public sealed class NullHttpHandler : DelegatingHandler -{ -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Http/NullHttpHandlerFactory.cs b/dotnet/src/SemanticKernel.Abstractions/Http/NullHttpHandlerFactory.cs deleted file mode 100644 index 07c5d5ccd73a..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Http/NullHttpHandlerFactory.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Microsoft.Extensions.Logging; - -namespace Microsoft.SemanticKernel.Http; - -/// -/// Implementation of that creates instances. -/// -public sealed class NullHttpHandlerFactory : IDelegatingHandlerFactory -{ - /// - /// Gets the singleton instance of . - /// - public static NullHttpHandlerFactory Instance => new(); - - /// - /// Creates a new instance. - /// - /// The logger factory to use. - /// A new instance. - public DelegatingHandler Create(ILoggerFactory? loggerFactory) - { - return new NullHttpHandler(); - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/IKernel.cs b/dotnet/src/SemanticKernel.Abstractions/IKernel.cs deleted file mode 100644 index 29b10e9e1f65..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/IKernel.cs +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Globalization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Events; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine; - -namespace Microsoft.SemanticKernel; - -/// -/// Interface for the semantic kernel. -/// -public interface IKernel -{ - /// - /// The ILoggerFactory used to create a logger for logging. - /// - ILoggerFactory LoggerFactory { get; } - - /// - /// Reference to the engine rendering prompt templates - /// - IPromptTemplateEngine PromptTemplateEngine { get; } - - /// - /// Reference to the read-only function collection containing all the imported functions - /// - IReadOnlyFunctionCollection Functions { get; } - - /// - /// Reference to Http handler factory - /// - IDelegatingHandlerFactory HttpHandlerFactory { get; } - - /// - /// Registers a custom function in the internal function collection. - /// - /// The custom function to register. - /// A C# function wrapping the function execution logic. - ISKFunction RegisterCustomFunction(ISKFunction customFunction); - - /// - /// Run a pipeline composed of synchronous and asynchronous functions. - /// - /// Input to process - /// The to monitor for cancellation requests. The default is . - /// List of functions - /// Result of the function composition - Task RunAsync( - ContextVariables variables, - CancellationToken cancellationToken, - params ISKFunction[] pipeline); - - /// - /// Create a new instance of a context, linked to the kernel internal state. - /// - /// Initializes the context with the provided variables - /// Provide specific scoped functions. Defaults to all existing in the kernel - /// Logged factory used within the context - /// Optional culture info related to the context - /// SK context - SKContext CreateNewContext( - ContextVariables? variables = null, - IReadOnlyFunctionCollection? functions = null, - ILoggerFactory? loggerFactory = null, - CultureInfo? culture = null); - - /// - /// Get one of the configured services. Currently limited to AI services. - /// - /// Optional name. If the name is not provided, returns the default T available - /// Service type - /// Instance of T - T GetService(string? name = null) where T : IAIService; - - /// - /// Used for registering a function invoking event handler. - /// Triggers before each function invocation. - /// - event EventHandler? FunctionInvoking; - - /// - /// Used for registering a function invoked event handler. - /// Triggers after each function invocation. - /// - event EventHandler? FunctionInvoked; - - #region Obsolete - - /// - /// Semantic memory instance - /// - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - ISemanticTextMemory Memory { get; } - - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use Kernel.Functions instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - IReadOnlyFunctionCollection Skills { get; } -#pragma warning restore CS1591 - - /// - /// Access registered functions by plugin name and function name. Not case sensitive. - /// The function might be native or semantic, it's up to the caller handling it. - /// - /// Plugin name - /// Function name - /// Delegate to execute the function - [Obsolete("Func shorthand no longer no longer supported. Use Kernel.Plugins collection instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - ISKFunction Func(string pluginName, string functionName); - - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use Kernel.ImportFunctions instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - IDictionary ImportSkill(object functionsInstance, string? pluginName = null); -#pragma warning restore CS1591 - - /// - /// Set the semantic memory to use - /// - /// Semantic memory instance - /// - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - void RegisterMemory(ISemanticTextMemory memory); - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Abstractions/IKernelBuilder.cs b/dotnet/src/SemanticKernel.Abstractions/IKernelBuilder.cs new file mode 100644 index 000000000000..4e9fffd1aab6 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/IKernelBuilder.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; + +namespace Microsoft.SemanticKernel; + +/// Provides a builder for constructing instances of . +public interface IKernelBuilder +{ + /// Gets the collection of services to be built into the . + IServiceCollection Services { get; } + + /// Gets a builder for adding collections as singletons to . + IKernelBuilderPlugins Plugins { get; } +} + +/// Provides a builder for adding plugins as singletons to a service collection. +public interface IKernelBuilderPlugins +{ + /// Gets the collection of services to which plugins should be added. + IServiceCollection Services { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Kernel.cs b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs new file mode 100644 index 000000000000..46a60aa4a11a --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs @@ -0,0 +1,643 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides state for use throughout a Semantic Kernel workload. +/// +/// +/// An instance of is passed through to every function invocation and service call +/// throughout the system, providing to each the ability to access shared state and services. +/// +public sealed class Kernel +{ + /// Key used by KernelBuilder to store type information into the service provider. + internal const string KernelServiceTypeToKeyMappings = nameof(KernelServiceTypeToKeyMappings); + + /// Dictionary containing ambient data stored in the kernel, lazily-initialized on first access. + private Dictionary? _data; + /// to be used by any operations that need access to the culture, a format provider, etc. + private CultureInfo _culture = CultureInfo.InvariantCulture; + /// The collection of plugins, initialized via the constructor or lazily-initialized on first access via . + private KernelPluginCollection? _plugins; + /// The collection of function filters, initialized via the constructor or lazily-initialized on first access via . + private NonNullCollection? _functionFilters; + /// The collection of prompt filters, initialized via the constructor or lazily-initialized on first access via . + private NonNullCollection? _promptFilters; + + /// + /// Initializes a new instance of . + /// + /// The used to query for services available through the kernel. + /// + /// The collection of plugins available through the kernel. If null, an empty collection will be used. + /// If non-null, the supplied collection instance is used, not a copy; if it's desired for the + /// to have a copy, the caller is responsible for supplying it. + /// + /// + /// The KernelBuilder class provides a fluent API for constructing a instance. + /// + public Kernel( + IServiceProvider? services = null, + KernelPluginCollection? plugins = null) + { + // Store the provided services, or an empty singleton if there aren't any. + this.Services = services ?? EmptyServiceProvider.Instance; + + // Store the provided plugins. If there weren't any, look in DI to see if there's a plugin collection. + this._plugins = plugins ?? this.Services.GetService(); + + if (this._plugins is null) + { + // Otherwise, enumerate any plugins that may have been registered directly. + IEnumerable e = this.Services.GetServices(); + + // It'll be common not to have any plugins directly registered as a service. + // If we can efficiently tell there aren't any, avoid proactively allocating + // the plugins collection. + if (e is not ICollection c || c.Count != 0) + { + this._plugins = new(e); + } + } + + // Enumerate any function filters that may have been registered. + IEnumerable functionFilters = this.Services.GetServices(); + + if (functionFilters is not ICollection functionFilterCollection || functionFilterCollection.Count != 0) + { + this._functionFilters = new(functionFilters); + } + + // Enumerate any prompt filters that may have been registered. + IEnumerable promptFilters = this.Services.GetServices(); + + if (promptFilters is not ICollection promptFilterCollection || promptFilterCollection.Count != 0) + { + this._promptFilters = new(promptFilters); + } + } + + /// Creates a builder for constructing instances. + /// A new instance. + public static IKernelBuilder CreateBuilder() => new KernelBuilder(); + + /// + /// Clone the object to create a new instance that may be mutated without affecting the current instance. + /// + /// + /// The current instance is unmodified by this operation. The new will be initialized with: + /// + /// + /// The same reference as is returned by the current instance's . + /// + /// + /// A new instance initialized with the same instances as are stored by the current instance's collection. + /// Changes to the new instance's plugin collection will not affect the current instance's plugin collection, and vice versa. + /// + /// + /// All of the delegates registered with each event. Delegates are immutable (every time an additional delegate is added or removed, a new one is created), + /// so changes to the new instance's event delegates will not affect the current instance's event delegates, and vice versa. + /// + /// + /// A new containing all of the key/value pairs from the current instance's dictionary. + /// Any changes made to the new instance's dictionary will not affect the current instance's dictionary, and vice versa. + /// + /// The same reference as is returned by the current instance's . + /// + /// + public Kernel Clone() => + new(this.Services, this._plugins is { Count: > 0 } ? new KernelPluginCollection(this._plugins) : null) + { + FunctionInvoking = this.FunctionInvoking, + FunctionInvoked = this.FunctionInvoked, + PromptRendering = this.PromptRendering, + PromptRendered = this.PromptRendered, + _data = this._data is { Count: > 0 } ? new Dictionary(this._data) : null, + _culture = this._culture, + }; + + /// + /// Gets the collection of plugins available through the kernel. + /// + public KernelPluginCollection Plugins => + this._plugins ?? + Interlocked.CompareExchange(ref this._plugins, new KernelPluginCollection(), null) ?? + this._plugins; + + /// + /// Gets the collection of function filters available through the kernel. + /// + [Experimental("SKEXP0004")] + public IList FunctionFilters => + this._functionFilters ?? + Interlocked.CompareExchange(ref this._functionFilters, new NonNullCollection(), null) ?? + this._functionFilters; + + /// + /// Gets the collection of function filters available through the kernel. + /// + [Experimental("SKEXP0004")] + public IList PromptFilters => + this._promptFilters ?? + Interlocked.CompareExchange(ref this._promptFilters, new NonNullCollection(), null) ?? + this._promptFilters; + + /// + /// Gets the service provider used to query for services available through the kernel. + /// + public IServiceProvider Services { get; } + + /// + /// Gets the culture currently associated with this . + /// + /// + /// The culture defaults to if not explicitly set. + /// It may be set to another culture, such as , + /// and any functions invoked within the context can consult this property for use in + /// operations like formatting and parsing. + /// + [AllowNull] + public CultureInfo Culture + { + get => this._culture; + set => this._culture = value ?? CultureInfo.InvariantCulture; + } + + /// + /// Gets the associated with this . + /// + /// + /// This returns any in . If there is + /// none, it returns an that won't perform any logging. + /// + public ILoggerFactory LoggerFactory => + this.Services.GetService() ?? + NullLoggerFactory.Instance; + + /// + /// Gets the associated with this . + /// + public IAIServiceSelector ServiceSelector => + this.Services.GetService() ?? + OrderedAIServiceSelector.Instance; + + /// + /// Gets a dictionary for ambient data associated with the kernel. + /// + /// + /// This may be used to flow arbitrary data in and out of operations performed with this kernel instance. + /// + public IDictionary Data => + this._data ?? + Interlocked.CompareExchange(ref this._data, new Dictionary(), null) ?? + this._data; + + #region GetServices + /// Gets a required service from the provider. + /// Specifies the type of the service to get. + /// An object that specifies the key of the service to get. + /// The found service instance. + /// A service of the specified type and name could not be found. + public T GetRequiredService(object? serviceKey = null) where T : class + { + T? service = null; + + if (serviceKey is not null) + { + if (this.Services is IKeyedServiceProvider) + { + // We were given a service ID, so we need to use the keyed service lookup. + service = this.Services.GetKeyedService(serviceKey); + } + } + else + { + // No ID was given. We first want to use non-keyed lookup, in order to match against + // a service registered without an ID. If we can't find one, then we try to match with + // a service registered with an ID. In both cases, if there were multiple, this will match + // with whichever was registered last. + service = this.Services.GetService(); + if (service is null && this.Services is IKeyedServiceProvider) + { + service = this.GetAllServices().LastOrDefault(); + } + } + + // If we couldn't find the service, throw an exception. + if (service is null) + { + string message = + serviceKey is null ? $"Service of type '{typeof(T)}' not registered." : + this.Services is not IKeyedServiceProvider ? $"Key '{serviceKey}' specified but service provider '{this.Services}' is not a {nameof(IKeyedServiceProvider)}." : + $"Service of type '{typeof(T)}' and key '{serviceKey}' not registered."; + + throw new KernelException(message); + } + + // Return the found service. + return service; + } + + /// Gets all services of the specified type. + /// Specifies the type of the services to retrieve. + /// An enumerable of all instances of the specified service that are registered. + /// There is no guaranteed ordering on the results. + public IEnumerable GetAllServices() where T : class + { + if (this.Services is IKeyedServiceProvider) + { + // M.E.DI doesn't support querying for a service without a key, and it also doesn't + // support AnyKey currently: https://github.com/dotnet/runtime/issues/91466 + // As a workaround, KernelBuilder injects a service containing the type-to-all-keys + // mapping. We can query for that service and and then use it to try to get a service. + if (this.Services.GetKeyedService>>(KernelServiceTypeToKeyMappings) is { } typeToKeyMappings) + { + if (typeToKeyMappings.TryGetValue(typeof(T), out HashSet? keys)) + { + return keys.SelectMany(key => this.Services.GetKeyedServices(key)); + } + + return Enumerable.Empty(); + } + } + + return this.Services.GetServices(); + } + + #endregion + + #region Internal Filtering + + [Experimental("SKEXP0004")] + internal FunctionInvokingContext? OnFunctionInvokingFilter(KernelFunction function, KernelArguments arguments) + { + FunctionInvokingContext? context = null; + + if (this._functionFilters is { Count: > 0 }) + { + context = new(function, arguments); + + for (int i = 0; i < this._functionFilters.Count; i++) + { + this._functionFilters[i].OnFunctionInvoking(context); + } + } + + return context; + } + + [Experimental("SKEXP0004")] + internal FunctionInvokedContext? OnFunctionInvokedFilter(KernelArguments arguments, FunctionResult result) + { + FunctionInvokedContext? context = null; + + if (this._functionFilters is { Count: > 0 }) + { + context = new(arguments, result); + + for (int i = 0; i < this._functionFilters.Count; i++) + { + this._functionFilters[i].OnFunctionInvoked(context); + } + } + + return context; + } + + [Experimental("SKEXP0004")] + internal PromptRenderingContext? OnPromptRenderingFilter(KernelFunction function, KernelArguments arguments) + { + PromptRenderingContext? context = null; + + if (this._promptFilters is { Count: > 0 }) + { + context = new(function, arguments); + + for (int i = 0; i < this._promptFilters.Count; i++) + { + this._promptFilters[i].OnPromptRendering(context); + } + } + + return context; + } + + [Experimental("SKEXP0004")] + internal PromptRenderedContext? OnPromptRenderedFilter(KernelFunction function, KernelArguments arguments, string renderedPrompt) + { + PromptRenderedContext? context = null; + + if (this._promptFilters is { Count: > 0 }) + { + context = new(function, arguments, renderedPrompt); + + for (int i = 0; i < this._promptFilters.Count; i++) + { + this._promptFilters[i].OnPromptRendered(context); + } + } + + return context; + } + + #endregion + + #region InvokeAsync + + /// + /// Invokes the . + /// + /// The to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The result of the function's execution. + /// is null. + /// The 's invocation was canceled. + /// + /// This behaves identically to invoking the specified with this as its argument. + /// + public Task InvokeAsync( + KernelFunction function, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(function); + + return function.InvokeAsync(this, arguments, cancellationToken); + } + + /// + /// Invokes a function from using the specified arguments. + /// + /// The name of the plugin containing the function to invoke. If null, all plugins will be searched for the first function of the specified name. + /// The name of the function to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The result of the function's execution. + /// is null. + /// is composed entirely of whitespace. + /// The 's invocation was canceled. + /// + /// This behaves identically to using to find the desired and then + /// invoking it with this as its argument. + /// + public Task InvokeAsync( + string? pluginName, + string functionName, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + Verify.NotNullOrWhiteSpace(functionName); + + var function = this.Plugins.GetFunction(pluginName, functionName); + + return function.InvokeAsync(this, arguments, cancellationToken); + } + + /// + /// Invokes the . + /// + /// Specifies the type of the result value of the function. + /// The to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The result of the function's execution, cast to . + /// is null. + /// The 's invocation was canceled. + /// The function's result could not be cast to . + /// + /// This behaves identically to invoking the specified with this as its argument. + /// + public async Task InvokeAsync( + KernelFunction function, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + FunctionResult result = await this.InvokeAsync(function, arguments, cancellationToken).ConfigureAwait(false); + return result.GetValue(); + } + + /// + /// Invokes a function from using the specified arguments. + /// + /// Specifies the type of the result value of the function. + /// The name of the plugin containing the function to invoke. If null, all plugins will be searched for the first function of the specified name. + /// The name of the function to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The result of the function's execution, cast to . + /// is null. + /// is composed entirely of whitespace. + /// The 's invocation was canceled. + /// The function's result could not be cast to . + /// + /// This behaves identically to using to find the desired and then + /// invoking it with this as its argument. + /// + public async Task InvokeAsync( + string? pluginName, + string functionName, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + FunctionResult result = await this.InvokeAsync(pluginName, functionName, arguments, cancellationToken).ConfigureAwait(false); + return result.GetValue(); + } + + #endregion + + #region InvokeStreamingAsync + /// + /// Invokes the and streams its results. + /// + /// The to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// An for streaming the results of the function's invocation. + /// is null. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public IAsyncEnumerable InvokeStreamingAsync( + KernelFunction function, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(function); + + return function.InvokeStreamingAsync(this, arguments, cancellationToken); + } + + /// + /// Invokes the and streams its results. + /// + /// The name of the plugin containing the function to invoke. If null, all plugins will be searched for the first function of the specified name. + /// The name of the function to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// An for streaming the results of the function's invocation. + /// is null. + /// is composed entirely of whitespace. + /// The 's invocation was canceled. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public IAsyncEnumerable InvokeStreamingAsync( + string? pluginName, + string functionName, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + Verify.NotNullOrWhiteSpace(functionName); + + var function = this.Plugins.GetFunction(pluginName, functionName); + + return function.InvokeStreamingAsync(this, arguments, cancellationToken); + } + + /// + /// Invokes the and streams its results. + /// + /// The to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// An for streaming the results of the function's invocation. + /// is null. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public IAsyncEnumerable InvokeStreamingAsync( + KernelFunction function, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + Verify.NotNull(function); + + return function.InvokeStreamingAsync(this, arguments, cancellationToken); + } + + /// + /// Invokes the and streams its results. + /// + /// The name of the plugin containing the function to invoke. If null, all plugins will be searched for the first function of the specified name. + /// The name of the function to invoke. + /// The arguments to pass to the function's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// An for streaming the results of the function's invocation. + /// is null. + /// is composed entirely of whitespace. + /// The 's invocation was canceled. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public IAsyncEnumerable InvokeStreamingAsync( + string? pluginName, + string functionName, + KernelArguments? arguments = null, + CancellationToken cancellationToken = default) + { + Verify.NotNullOrWhiteSpace(functionName); + + var function = this.Plugins.GetFunction(pluginName, functionName); + + return function.InvokeStreamingAsync(this, arguments, cancellationToken); + } + #endregion + + #region Obsolete + + /// + /// Provides an event that's raised prior to a function's invocation. + /// + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + public event EventHandler? FunctionInvoking; + + /// + /// Provides an event that's raised after a function's invocation. + /// + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + public event EventHandler? FunctionInvoked; + + /// + /// Provides an event that's raised prior to a prompt being rendered. + /// + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + public event EventHandler? PromptRendering; + + /// + /// Provides an event that's raised after a prompt is rendered. + /// + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + public event EventHandler? PromptRendered; + + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + internal FunctionInvokingEventArgs? OnFunctionInvoking(KernelFunction function, KernelArguments arguments) + { + FunctionInvokingEventArgs? eventArgs = null; + if (this.FunctionInvoking is { } functionInvoking) + { + eventArgs = new(function, arguments); + functionInvoking.Invoke(this, eventArgs); + } + + return eventArgs; + } + + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + internal FunctionInvokedEventArgs? OnFunctionInvoked(KernelFunction function, KernelArguments arguments, FunctionResult result) + { + FunctionInvokedEventArgs? eventArgs = null; + if (this.FunctionInvoked is { } functionInvoked) + { + eventArgs = new(function, arguments, result); + functionInvoked.Invoke(this, eventArgs); + } + + return eventArgs; + } + + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + internal PromptRenderingEventArgs? OnPromptRendering(KernelFunction function, KernelArguments arguments) + { + PromptRenderingEventArgs? eventArgs = null; + if (this.PromptRendering is { } promptRendering) + { + eventArgs = new(function, arguments); + promptRendering.Invoke(this, eventArgs); + } + + return eventArgs; + } + + [Obsolete("Events are deprecated in favor of filters. Example in dotnet/samples/KernelSyntaxExamples/Getting_Started/Step7_Observability.cs of Semantic Kernel repository.")] + internal PromptRenderedEventArgs? OnPromptRendered(KernelFunction function, KernelArguments arguments, string renderedPrompt) + { + PromptRenderedEventArgs? eventArgs = null; + if (this.PromptRendered is { } promptRendered) + { + eventArgs = new(function, arguments, renderedPrompt); + promptRendered.Invoke(this, eventArgs); + } + + return eventArgs; + } + + #endregion +} diff --git a/dotnet/src/SemanticKernel.Abstractions/KernelBuilder.cs b/dotnet/src/SemanticKernel.Abstractions/KernelBuilder.cs new file mode 100644 index 000000000000..a792944bab82 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/KernelBuilder.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; + +namespace Microsoft.SemanticKernel; + +/// Provides a builder for constructing instances of . +internal sealed class KernelBuilder : IKernelBuilder, IKernelBuilderPlugins +{ + /// The collection of services to be available through the . + private IServiceCollection? _services; + + /// Initializes a new instance of the . + public KernelBuilder() + { + this.AllowBuild = true; + } + + /// Initializes a new instance of the . + /// + /// The to wrap and use for building the . + /// + public KernelBuilder(IServiceCollection services) + { + Verify.NotNull(services); + + this._services = services; + } + + /// Whether to allow a call to Build. + /// As a minor aid to help avoid misuse, we try to prevent Build from being called on instances returned from AddKernel. + internal bool AllowBuild { get; } + + /// Gets the collection of services to be built into the . + public IServiceCollection Services => this._services ??= new ServiceCollection(); + + /// Gets a builder for plugins to be built as services into the . + public IKernelBuilderPlugins Plugins => this; +} diff --git a/dotnet/src/SemanticKernel.Abstractions/KernelException.cs b/dotnet/src/SemanticKernel.Abstractions/KernelException.cs new file mode 100644 index 000000000000..ea62aa07ae81 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/KernelException.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents the base exception from which all Semantic Kernel exceptions derive. +/// +public class KernelException : Exception +{ + /// + /// Initializes a new instance of the class. + /// + public KernelException() + { + } + + /// + /// Initializes a new instance of the class with a specified error message. + /// + /// The error message that explains the reason for the exception. + public KernelException(string? message) : base(message) + { + } + + /// + /// Initializes a new instance of the class with a specified error message and a reference to the inner exception that is the cause of this exception. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference if no inner exception is specified. + public KernelException(string? message, Exception? innerException) : base(message, innerException) + { + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/DataEntryBase.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/DataEntryBase.cs index 6d6294306423..acad3098e5c2 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/DataEntryBase.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/DataEntryBase.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Memory; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Memory; /// /// A base class for data entries. /// +[Experimental("SKEXP0003")] public class DataEntryBase { /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/IMemoryStore.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/IMemoryStore.cs index 72825192c64e..96233162e7b8 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/IMemoryStore.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/IMemoryStore.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -10,6 +11,7 @@ namespace Microsoft.SemanticKernel.Memory; /// /// An interface for storing and retrieving indexed objects in a data store. /// +[Experimental("SKEXP0003")] public interface IMemoryStore { /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs index 76aac625b98c..dce49c6f708d 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/ISemanticTextMemory.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel.Memory; /// /// An interface for semantic memory that creates and recalls memories associated with text. /// +[Experimental("SKEXP0003")] public interface ISemanticTextMemory { /// @@ -19,6 +21,7 @@ public interface ISemanticTextMemory /// Unique identifier. /// Optional description. /// Optional string for saving custom metadata. + /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Unique identifier of the saved memory record. public Task SaveInformationAsync( @@ -27,6 +30,7 @@ public Task SaveInformationAsync( string id, string? description = null, string? additionalMetadata = null, + Kernel? kernel = null, CancellationToken cancellationToken = default); /// @@ -38,6 +42,7 @@ public Task SaveInformationAsync( /// Name of the external service, e.g. "MSTeams", "GitHub", "WebSite", "Outlook IMAP", etc. /// Optional description. /// Optional string for saving custom metadata. + /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Unique identifier of the saved memory record. public Task SaveReferenceAsync( @@ -47,6 +52,7 @@ public Task SaveReferenceAsync( string externalSourceName, string? description = null, string? additionalMetadata = null, + Kernel? kernel = null, CancellationToken cancellationToken = default); /// @@ -57,9 +63,10 @@ public Task SaveReferenceAsync( /// Collection to search. /// Unique memory record identifier. /// Whether to return the embedding of the memory found. + /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Memory record, or null when nothing is found - public Task GetAsync(string collection, string key, bool withEmbedding = false, CancellationToken cancellationToken = default); + public Task GetAsync(string collection, string key, bool withEmbedding = false, Kernel? kernel = null, CancellationToken cancellationToken = default); /// /// Remove a memory by key. @@ -68,8 +75,9 @@ public Task SaveReferenceAsync( /// /// Collection to search. /// Unique memory record identifier. + /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . - public Task RemoveAsync(string collection, string key, CancellationToken cancellationToken = default); + public Task RemoveAsync(string collection, string key, Kernel? kernel = null, CancellationToken cancellationToken = default); /// /// Find some information in memory @@ -79,6 +87,7 @@ public Task SaveReferenceAsync( /// How many results to return /// Minimum relevance score, from 0 to 1, where 1 means exact match. /// Whether to return the embeddings of the memories found. + /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// Memories found public IAsyncEnumerable SearchAsync( @@ -87,12 +96,14 @@ public IAsyncEnumerable SearchAsync( int limit = 1, double minRelevanceScore = 0.7, bool withEmbeddings = false, + Kernel? kernel = null, CancellationToken cancellationToken = default); /// /// Gets a group of all available collection names. /// + /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . /// A group of collection names. - public Task> GetCollectionsAsync(CancellationToken cancellationToken = default); + public Task> GetCollectionsAsync(Kernel? kernel = null, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs index 76efda1317ad..83c3d856dcfb 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Text; @@ -9,6 +10,7 @@ namespace Microsoft.SemanticKernel.Memory; /// /// Copy of metadata associated with a memory entry. /// +[Experimental("SKEXP0003")] public class MemoryQueryResult { /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs index d87c7a876ed3..e8d3a6c86df6 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.Text.Json; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Diagnostics; using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Memory; @@ -11,6 +11,7 @@ namespace Microsoft.SemanticKernel.Memory; /// /// IMPORTANT: this is a storage schema. Changing the fields will invalidate existing metadata stored in persistent vector DBs. /// +[Experimental("SKEXP0003")] public class MemoryRecord : DataEntryBase { /// @@ -122,7 +123,7 @@ public static MemoryRecord LocalRecord( /// Optional existing database key. /// optional timestamp. /// Memory record - /// + /// public static MemoryRecord FromJsonMetadata( string json, ReadOnlyMemory embedding, @@ -132,7 +133,7 @@ public static MemoryRecord FromJsonMetadata( var metadata = JsonSerializer.Deserialize(json); return metadata != null ? new MemoryRecord(metadata, embedding, key, timestamp) - : throw new SKException("Unable to create memory record from serialized metadata"); + : throw new KernelException("Unable to create memory record from serialized metadata"); } /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecordMetadata.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecordMetadata.cs index caae2875a0db..baa56dc222dc 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecordMetadata.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecordMetadata.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Memory; @@ -8,6 +9,7 @@ namespace Microsoft.SemanticKernel.Memory; /// /// Class representing the metadata associated with a Semantic Kernel memory. /// +[Experimental("SKEXP0003")] public class MemoryRecordMetadata : ICloneable { /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/NullMemory.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/NullMemory.cs index 68189244a433..2a49fb94c3ec 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/NullMemory.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/NullMemory.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Threading; using System.Threading.Tasks; @@ -10,6 +11,7 @@ namespace Microsoft.SemanticKernel.Memory; /// /// Implementation of that stores nothing. /// +[Experimental("SKEXP0003")] public sealed class NullMemory : ISemanticTextMemory { private static readonly Task s_emptyStringTask = Task.FromResult(string.Empty); @@ -26,6 +28,7 @@ public Task SaveInformationAsync( string id, string? description = null, string? additionalMetadata = null, + Kernel? kernel = null, CancellationToken cancellationToken = default) { return s_emptyStringTask; @@ -39,6 +42,7 @@ public Task SaveReferenceAsync( string externalSourceName, string? description = null, string? additionalMetadata = null, + Kernel? kernel = null, CancellationToken cancellationToken = default) { return s_emptyStringTask; @@ -49,6 +53,7 @@ public Task SaveReferenceAsync( string collection, string key, bool withEmbedding = false, + Kernel? kernel = null, CancellationToken cancellationToken = default) { return Task.FromResult(null); @@ -58,6 +63,7 @@ public Task SaveReferenceAsync( public Task RemoveAsync( string collection, string key, + Kernel? kernel = null, CancellationToken cancellationToken = default) { return Task.CompletedTask; @@ -70,6 +76,7 @@ public IAsyncEnumerable SearchAsync( int limit = 1, double minRelevanceScore = 0.0, bool withEmbeddings = false, + Kernel? kernel = null, CancellationToken cancellationToken = default) { return AsyncEnumerable.Empty(); @@ -77,6 +84,7 @@ public IAsyncEnumerable SearchAsync( /// public Task> GetCollectionsAsync( + Kernel? kernel = null, CancellationToken cancellationToken = default) { return Task.FromResult>(new List()); diff --git a/dotnet/src/SemanticKernel.Abstractions/Orchestration/ContextVariables.cs b/dotnet/src/SemanticKernel.Abstractions/Orchestration/ContextVariables.cs deleted file mode 100644 index 42369f39c7d1..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Orchestration/ContextVariables.cs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Context Variables is a data structure that holds temporary data while a task is being performed. -/// It is accessed and manipulated by functions in the pipeline. -/// -[DebuggerDisplay("{DebuggerDisplay,nq}")] -[DebuggerTypeProxy(typeof(ContextVariables.TypeProxy))] -public sealed class ContextVariables : Dictionary -{ - /// - /// Constructor for context variables. - /// - /// Optional value for the main variable of the context including trust information. - public ContextVariables(string? value = null) - : base(StringComparer.OrdinalIgnoreCase) - { - this.Set(MainKey, value); - } - - /// - /// Create a copy of the current instance with a copy of the internal data - /// - /// Copy of the current instance - public ContextVariables Clone() - { - var clone = new ContextVariables(); - foreach (KeyValuePair x in this) - { - clone.Set(x.Key, x.Value); - } - - return clone; - } - - /// Gets the main input string. - /// If the main input string was removed from the collection, an empty string will be returned. - public string Input => this.TryGetValue(MainKey, out string? value) ? value : string.Empty; - - /// - /// Updates the main input text with the new value after a function is complete. - /// - /// The new input value, for the next function in the pipeline, or as a result for the user - /// if the pipeline reached the end. - /// The current instance - public ContextVariables Update(string? value) - { - this.Set(MainKey, value); - return this; - } - - /// - /// This method allows to store additional data in the context variables, e.g. variables needed by functions in the - /// pipeline. These "variables" are visible also to semantic functions using the "{{varName}}" syntax, allowing - /// to inject more information into prompt templates. - /// - /// Variable name - /// Value to store. If the value is NULL the variable is deleted. - public void Set(string name, string? value) - { - Verify.NotNullOrWhiteSpace(name); - if (value != null) - { - this[name] = value; - } - else - { - this.Remove(name); - } - } - - /// - /// Print the processed input, aka the current data after any processing occurred. - /// - /// Processed input, aka result - public override string ToString() => this.Input; - - internal const string MainKey = "INPUT"; - - [DebuggerBrowsable(DebuggerBrowsableState.Never)] - internal string DebuggerDisplay => - this.TryGetValue(MainKey, out string? input) && !string.IsNullOrEmpty(input) - ? $"Variables = {this.Count}, Input = {input}" - : $"Variables = {this.Count}"; - - #region private ================================================================================ - - private sealed class TypeProxy - { - private readonly ContextVariables _variables; - - public TypeProxy(ContextVariables variables) => this._variables = variables; - - [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] - public KeyValuePair[] Items => this._variables.ToArray(); - } - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Orchestration/FunctionResult.cs b/dotnet/src/SemanticKernel.Abstractions/Orchestration/FunctionResult.cs deleted file mode 100644 index eaac4d7ae3e1..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Orchestration/FunctionResult.cs +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Function result after execution. -/// -public sealed class FunctionResult -{ - internal Dictionary? _metadata; - - /// - /// Name of executed function. - /// - public string FunctionName { get; internal set; } - - /// - /// Name of the plugin containing the function. - /// - public string PluginName { get; internal set; } - - /// - /// Metadata for storing additional information about function execution result. - /// - public Dictionary Metadata - { - get => this._metadata ??= new(); - internal set => this._metadata = value; - } - - /// - /// Function result object. - /// - internal object? Value { get; private set; } = null; - - /// - /// Instance of to pass in function pipeline. - /// - internal SKContext Context { get; private set; } - - /// - /// Initializes a new instance of the class. - /// - /// Name of executed function. - /// Name of the plugin containing the function. - /// Instance of to pass in function pipeline. - public FunctionResult(string functionName, string pluginName, SKContext context) - { - this.FunctionName = functionName; - this.PluginName = pluginName; - this.Context = context; - } - - /// - /// Initializes a new instance of the class. - /// - /// Name of executed function. - /// Name of the plugin containing the function. - /// Instance of to pass in function pipeline. - /// Function result object. - public FunctionResult(string functionName, string pluginName, SKContext context, object? value) - : this(functionName, pluginName, context) - { - this.Value = value; - } - - /// - /// Returns function result value. - /// - /// Target type for result value casting. - /// Thrown when it's not possible to cast result value to . - public T? GetValue() - { - if (this.Value is null) - { - return default; - } - - if (this.Value is T typedResult) - { - return typedResult; - } - - throw new InvalidCastException($"Cannot cast {this.Value.GetType()} to {typeof(T)}"); - } - - /// - /// Get typed value from metadata. - /// - public bool TryGetMetadataValue(string key, out T value) - { - if (this._metadata is { } metadata && - metadata.TryGetValue(key, out object? valueObject) && - valueObject is T typedValue) - { - value = typedValue; - return true; - } - - value = default!; - return false; - } - - /// - public override string ToString() => this.Value?.ToString() ?? base.ToString(); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Orchestration/IFunctionRunner.cs b/dotnet/src/SemanticKernel.Abstractions/Orchestration/IFunctionRunner.cs deleted file mode 100644 index 71357d2f304a..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Orchestration/IFunctionRunner.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Function runner interface. -/// -public interface IFunctionRunner -{ - /// - /// Execute a function using the resources loaded in the context. - /// - /// Target function to run - /// Input to process - /// The to monitor for cancellation requests. The default is . - /// Result of the function composition - Task RunAsync( - ISKFunction skFunction, - ContextVariables? variables = null, - CancellationToken cancellationToken = default); - - /// - /// Execute a function using the resources loaded in the context. - /// - /// The name of the plugin containing the function to run - /// The name of the function to run - /// Input to process - /// The to monitor for cancellation requests. The default is . - /// Result of the function composition - Task RunAsync( - string pluginName, - string functionName, - ContextVariables? variables = null, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Orchestration/KernelResult.cs b/dotnet/src/SemanticKernel.Abstractions/Orchestration/KernelResult.cs deleted file mode 100644 index d4537579e286..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Orchestration/KernelResult.cs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Kernel result after execution. -/// -public sealed class KernelResult -{ - /// - /// Results from all functions in pipeline. - /// - public IReadOnlyCollection FunctionResults { get; internal set; } = Array.Empty(); - - /// - /// Kernel result object. - /// - internal object? Value { get; private set; } = null; - - /// - /// Returns kernel result value. - /// - /// Target type for result value casting. - /// Thrown when it's not possible to cast result value to . - public T? GetValue() - { - if (this.Value is null) - { - return default; - } - - if (this.Value is T typedResult) - { - return typedResult; - } - - throw new InvalidCastException($"Cannot cast {this.Value.GetType()} to {typeof(T)}"); - } - - /// - /// Creates instance of based on function results. - /// - /// Kernel result object. - /// Results from all functions in pipeline. - public static KernelResult FromFunctionResults(object? value, IReadOnlyCollection functionResults) - { - return new KernelResult - { - Value = value, - FunctionResults = functionResults - }; - } - - /// - public override string ToString() => this.Value?.ToString() ?? base.ToString(); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Orchestration/ModelResult.cs b/dotnet/src/SemanticKernel.Abstractions/Orchestration/ModelResult.cs deleted file mode 100644 index 0f97d4f4ee40..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Orchestration/ModelResult.cs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Text.Json; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Text; - -#pragma warning disable CA1024 - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Represents a result from a model execution. -/// -public sealed class ModelResult -{ - private readonly object _result; - - /// - /// Initializes a new instance of the class with the specified result object. - /// - /// The result object to be stored in the ModelResult instance. - public ModelResult(object result) - { - Verify.NotNull(result); - - this._result = result; - } - - /// - /// Gets the raw result object stored in the instance. - /// - /// The raw result object. - public object GetRawResult() => this._result; - - /// - /// Gets the result object stored in the instance, cast to the specified type. - /// - /// The type to cast the result object to. - /// The result object cast to the specified type. - /// Thrown when the result object cannot be cast to the specified type. - public T GetResult() - { - if (this._result is T typedResult) - { - return typedResult; - } - - throw new InvalidCastException($"Cannot cast {this._result.GetType()} to {typeof(T)}"); - } - - /// - /// Gets the result object stored in the ModelResult instance as a JSON element. - /// - /// The result object as a JSON element. - public JsonElement GetJsonResult() - { - return Json.Deserialize(Json.Serialize(this._result)); - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Orchestration/SKContext.cs b/dotnet/src/SemanticKernel.Abstractions/Orchestration/SKContext.cs deleted file mode 100644 index f99f48fb7dcc..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Orchestration/SKContext.cs +++ /dev/null @@ -1,161 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Semantic Kernel context. -/// -[DebuggerDisplay("{DebuggerDisplay,nq}")] -public sealed class SKContext -{ - /// - /// Print the processed input, aka the current data after any processing occurred. - /// - /// Processed input, aka result - public string Result => this.Variables.ToString(); - - /// - /// When a prompt is processed, aka the current data after any model results processing occurred. - /// (One prompt can have multiple results). - /// - [Obsolete($"ModelResults are now part of {nameof(FunctionResult.Metadata)} property. Use 'ModelResults' key or available extension methods to get model results.")] - public IReadOnlyCollection ModelResults => Array.Empty(); - - /// - /// The culture currently associated with this context. - /// - public CultureInfo Culture - { - get => this._culture; - set => this._culture = value ?? CultureInfo.CurrentCulture; - } - - /// - /// User variables - /// - public ContextVariables Variables { get; } - - /// - /// Read only functions collection - /// - public IReadOnlyFunctionCollection Functions { get; } - - /// - /// App logger - /// - public ILoggerFactory LoggerFactory { get; } - - /// - /// Executes functions using the current resources loaded in the context - /// - public IFunctionRunner Runner { get; } - - /// - /// AI service provider - /// - internal IAIServiceProvider ServiceProvider { get; } - - /// - /// AIService selector implementation - /// - internal IAIServiceSelector ServiceSelector { get; } - - /// - /// Constructor for the context. - /// - /// Function runner reference - /// AI service provider - /// AI service selector - /// Context variables to include in context. - /// Functions to include in context. - /// Logger factory to be used in context - /// Culture related to the context - internal SKContext( - IFunctionRunner functionRunner, - IAIServiceProvider serviceProvider, - IAIServiceSelector serviceSelector, - ContextVariables? variables = null, - IReadOnlyFunctionCollection? functions = null, - ILoggerFactory? loggerFactory = null, - CultureInfo? culture = null) - { - Verify.NotNull(functionRunner, nameof(functionRunner)); - - this.Runner = functionRunner; - this.ServiceProvider = serviceProvider; - this.ServiceSelector = serviceSelector; - this.Variables = variables ?? new(); - this.Functions = functions ?? NullReadOnlyFunctionCollection.Instance; - this.LoggerFactory = loggerFactory ?? NullLoggerFactory.Instance; - this._culture = culture ?? CultureInfo.CurrentCulture; - } - - /// - /// Print the processed input, aka the current data after any processing occurred. - /// - /// Processed input, aka result. - public override string ToString() - { - return this.Result; - } - - /// - /// Create a clone of the current context, using the same kernel references (memory, functions, logger) - /// and a new set variables, so that variables can be modified without affecting the original context. - /// - /// A new context cloned from the current one - public SKContext Clone() - => this.Clone(null, null); - - /// - /// Create a clone of the current context, using the same kernel references (memory, functions, logger) - /// and optionally allows overriding the variables and functions. - /// - /// Override the variables with the provided ones - /// Override the functions with the provided ones - /// A new context cloned from the current one - public SKContext Clone(ContextVariables? variables, IReadOnlyFunctionCollection? functions) - { - return new SKContext( - this.Runner, - this.ServiceProvider, - this.ServiceSelector, - variables ?? this.Variables.Clone(), - functions ?? this.Functions, - this.LoggerFactory, - this.Culture); - } - - /// - /// The culture currently associated with this context. - /// - private CultureInfo _culture; - - [DebuggerBrowsable(DebuggerBrowsableState.Never)] - private string DebuggerDisplay - { - get - { - string display = this.Variables.DebuggerDisplay; - - if (this.Functions is IReadOnlyFunctionCollection functions) - { - var view = functions.GetFunctionViews(); - display += $", Functions = {view.Count}"; - } - - display += $", Culture = {this.Culture.EnglishName}"; - - return display; - } - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplate.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplate.cs new file mode 100644 index 000000000000..a819fc18f42f --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplate.cs @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a prompt template that can be rendered to a string. +/// +public interface IPromptTemplate +{ + /// + /// Renders the template using the supplied and . + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments to use to satisfy any input variables in the prompt template. + /// The to monitor for cancellation requests. The default is . + /// The prompt rendered to a string. + /// is null. + Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplateFactory.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplateFactory.cs new file mode 100644 index 000000000000..146137f00d8d --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplateFactory.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a factory for prompt templates for one or more prompt template formats. +/// +public interface IPromptTemplateFactory +{ + /// + /// Creates an instance of from a . + /// + /// Prompt template configuration + /// The created template, or null if the specified template format is not supported. + /// true if the format is supported and the template can be created; otherwise, false. + bool TryCreate(PromptTemplateConfig templateConfig, [NotNullWhen(true)] out IPromptTemplate? result); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/InputVariable.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/InputVariable.cs new file mode 100644 index 000000000000..9f0d3e19594b --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/InputVariable.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents an input variable for prompt functions. +/// +public sealed class InputVariable +{ + /// The name of the variable. + private string _name = string.Empty; + /// The description of the variable. + private string _description = string.Empty; + + /// + /// Gets or sets the name of the variable. + /// + /// + /// As an example, when using "{{$style}}", the name is "style". + /// + [JsonPropertyName("name")] + public string Name + { + get => this._name; + set + { + Verify.NotNull(value); + this._name = value; + } + } + + /// + /// Gets or sets a description of the variable. + /// + [JsonPropertyName("description")] + [AllowNull] + public string Description + { + get => this._description; + set => this._description = value ?? string.Empty; + } + + /// + /// Gets or sets a default value for the variable. + /// + [JsonPropertyName("default")] + public object? Default { get; set; } + + /// + /// Gets or sets whether the variable is considered required (rather than optional). + /// + /// + /// The default is true. + /// + [JsonPropertyName("is_required")] + public bool IsRequired { get; set; } = true; + + /// + /// Gets or sets JSON Schema describing this variable. + /// + /// + /// This string will be deserialized into an instance of . + /// + [JsonPropertyName("json_schema")] + public string? JsonSchema { get; set; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/OutputVariable.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/OutputVariable.cs new file mode 100644 index 000000000000..e374ba9f7385 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/OutputVariable.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents an output variable returned from a prompt function. +/// +public sealed class OutputVariable +{ + /// The description of the variable. + private string _description = string.Empty; + + /// + /// Gets or sets a description of this output. + /// + [JsonPropertyName("description")] + [AllowNull] + public string Description + { + get => this._description; + set => this._description = value ?? string.Empty; + } + + /// + /// Gets or sets JSON Schema describing this output. + /// + /// + /// This string will be deserialized into an instance of . + /// + [JsonPropertyName("json_schema")] + public string? JsonSchema { get; set; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs new file mode 100644 index 000000000000..f650ae7b1c3a --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs @@ -0,0 +1,260 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides the configuration information necessary to create a prompt template. +/// +/// +/// A prompt template is a template that can be used to generate a prompt to be submitted to an AI service. +/// For basic prompts, the template may be supplied as a simple string. For more complex prompts, more information +/// is desirable for describing the prompt template, such as details on input variables expected by the template. +/// This can all be provided by a , where its +/// is the prompt template string itself, then with other properties set with additional information. To create the +/// actual prompt template, a is used to create an ; +/// this is done automatically by the APIs that accept a , using a default template +/// factory that understands the format, but with the +/// ability to supply other factories for interpreting other formats. +/// +public sealed class PromptTemplateConfig +{ + /// The format of the prompt template. + private string? _templateFormat; + /// The prompt template string. + private string _template = string.Empty; + /// Lazily-initialized input variables. + private List? _inputVariables; + /// Lazily-initialized execution settings. The key is the service ID, or for the default execution settings. + private Dictionary? _executionSettings; + + /// + /// Initializes a new instance of the class. + /// + public PromptTemplateConfig() + { + } + + /// + /// Initializes a new instance of the class using the specified prompt template string. + /// + /// The prompt template string that defines the prompt. + /// is null. + public PromptTemplateConfig(string template) + { + this.Template = template; + } + + /// + /// Creates a from the specified JSON. + /// + /// A string containing a JSON representation of the . + /// The deserialized . + /// is null. + /// is an invalid JSON representation of a . + public static PromptTemplateConfig FromJson(string json) + { + Verify.NotNullOrWhiteSpace(json); + + Exception? innerException = null; + PromptTemplateConfig? config = null; + try + { + config = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); + if (config is null) + { + throw new ArgumentException($"Unable to deserialize {nameof(PromptTemplateConfig)} from the specified JSON.", nameof(json)); + } + + // Prevent the default value from being any type other than a string. + // It's a temporary limitation that helps shape the public API surface + // (changing the type of the Default property to object) now, before the release. + // This helps avoid a breaking change while a proper solution for + // dealing with the different deserialization outputs of JSON/YAML prompt configurations is being evaluated. + foreach (var inputVariable in config.InputVariables) + { + // The value of the default property becomes a JsonElement after deserialization because that is how the JsonSerializer handles properties of the object type. + if (inputVariable.Default is JsonElement element) + { + if (element.ValueKind == JsonValueKind.String) + { + inputVariable.Default = element.ToString(); + } + else + { + throw new NotSupportedException($"Default value for input variable '{inputVariable.Name}' must be a string. " + + $"This is a temporary limitation; future updates are expected to remove this constraint. Prompt function - '{config.Name ?? config.Description}'."); + } + } + } + } + catch (JsonException e) + { + innerException = e; + } + + return + config ?? + throw new ArgumentException($"Unable to deserialize {nameof(PromptTemplateConfig)} from the specified JSON.", nameof(json), innerException); + } + + /// + /// Gets or sets the function name to use by default when creating prompt functions using this configuration. + /// + /// + /// If the name is null or empty, a random name will be generated dynamically when creating a function. + /// + [JsonPropertyName("name")] + public string? Name { get; set; } + + /// + /// Gets or sets a function description to use by default when creating prompt functions using this configuration. + /// + [JsonPropertyName("description")] + public string? Description { get; set; } + + /// + /// Gets the identifier of the Semantic Kernel template format. + /// + public static string SemanticKernelTemplateFormat => "semantic-kernel"; + + /// + /// Gets or sets the format of the prompt template. + /// + /// + /// If no template format is specified, a default format of is used. + /// + [JsonPropertyName("template_format")] + [AllowNull] + public string TemplateFormat + { + get => this._templateFormat ?? SemanticKernelTemplateFormat; + set => this._templateFormat = value; + } + + /// + /// Gets or sets the prompt template string that defines the prompt. + /// + /// is null. + [JsonPropertyName("template")] + public string Template + { + get => this._template; + set + { + Verify.NotNull(value); + this._template = value; + } + } + + /// + /// Gets or sets the collection of input variables used by the prompt template. + /// + [JsonPropertyName("input_variables")] + public List InputVariables + { + get => this._inputVariables ??= new(); + set + { + Verify.NotNull(value); + this._inputVariables = value; + } + } + + /// + /// Gets or sets the output variable used by the prompt template. + /// + [JsonPropertyName("output_variable")] + public OutputVariable? OutputVariable { get; set; } + + /// + /// Gets or sets the collection of execution settings used by the prompt template. + /// + /// + /// The settings dictionary is keyed by the service ID, or for the default execution settings. + /// + [JsonPropertyName("execution_settings")] + public Dictionary ExecutionSettings + { + get => this._executionSettings ??= new(); + set + { + Verify.NotNull(value); + this._executionSettings = value; + } + } + + /// + /// Gets the default execution settings from . + /// + /// + /// If no default is specified, this will return null. + /// + public PromptExecutionSettings? DefaultExecutionSettings => this._executionSettings?.TryGetValue(PromptExecutionSettings.DefaultServiceId, out PromptExecutionSettings? settings) is true ? settings : null; + + /// + /// Adds the specified to the dictionary. + /// + /// + /// The key is the service ID, or for the default execution settings. + /// If the service ID is null, will be used. + /// + /// The to add to the dictionary. + /// The service ID with which to associated , or null if this should be the default settings. + public void AddExecutionSettings(PromptExecutionSettings settings, string? serviceId = null) + { + Verify.NotNull(settings); + + var key = serviceId ?? PromptExecutionSettings.DefaultServiceId; + if (this.ExecutionSettings.ContainsKey(key)) + { + throw new ArgumentException($"Execution settings for service id '{key}' already exists.", nameof(serviceId)); + } + + this.ExecutionSettings[key] = settings; + } + + /// + /// Converts the collection into a collection of . + /// + internal IReadOnlyList GetKernelParametersMetadata() + { + KernelParameterMetadata[] result = Array.Empty(); + if (this._inputVariables is List inputVariables) + { + result = new KernelParameterMetadata[inputVariables.Count]; + for (int i = 0; i < result.Length; i++) + { + InputVariable p = inputVariables[i]; + result[i] = new KernelParameterMetadata(p.Name) + { + Description = p.Description, + DefaultValue = p.Default, + IsRequired = p.IsRequired, + ParameterType = !string.IsNullOrWhiteSpace(p.JsonSchema) ? null : p.Default?.GetType() ?? typeof(string), + Schema = !string.IsNullOrWhiteSpace(p.JsonSchema) ? KernelJsonSchema.Parse(p.JsonSchema!) : null, + }; + } + } + + return result; + } + + /// + /// Converts any into a . + /// + internal KernelReturnParameterMetadata? GetKernelReturnParameterMetadata() => + this.OutputVariable is OutputVariable outputVariable ? + new KernelReturnParameterMetadata + { + Description = outputVariable.Description, + Schema = KernelJsonSchema.ParseOrNull(outputVariable.JsonSchema) + } : + null; +} diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateFactoryExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateFactoryExtensions.cs new file mode 100644 index 000000000000..b891098a4ccd --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateFactoryExtensions.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for operating on instances. +/// +public static class PromptTemplateFactoryExtensions +{ + /// + /// Creates an instance of from a . + /// + /// The factory with which to create the template. + /// Prompt template configuration + /// The created template. + /// The factory does not support the specified configuration. + public static IPromptTemplate Create(this IPromptTemplateFactory factory, PromptTemplateConfig templateConfig) + { + Verify.NotNull(factory); + Verify.NotNull(templateConfig); + + if (!factory.TryCreate(templateConfig, out IPromptTemplate? result)) + { + throw new KernelException($"Prompt template format {templateConfig.TemplateFormat} is not supported."); + } + + return result; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Reliability/DefaultHttpRetryHandler.cs b/dotnet/src/SemanticKernel.Abstractions/Reliability/DefaultHttpRetryHandler.cs deleted file mode 100644 index c9997c6c2a1d..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Reliability/DefaultHttpRetryHandler.cs +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; - -namespace Microsoft.SemanticKernel.Reliability; -/// -/// A delegating handler that provides retry logic for HTTP requests. -/// -[Obsolete("Usage of Semantic Kernel internal retry abstractions is deprecated.\nCheck KernelSyntaxExamples.Example42_KernelBuilder.cs for alternatives")] -public sealed class DefaultHttpRetryHandler : DelegatingHandler -{ - /// - /// Initializes a new instance of the class. - /// - /// The retry configuration. - /// The to use for logging. If null, no logging will be performed. - public DefaultHttpRetryHandler(HttpRetryConfig? config = null, ILoggerFactory? loggerFactory = null) - : this(config ?? new HttpRetryConfig(), loggerFactory, null, null) - { - } - - internal DefaultHttpRetryHandler( - HttpRetryConfig config, - ILoggerFactory? loggerFactory = null, - IDelayProvider? delayProvider = null, - ITimeProvider? timeProvider = null) - { - this._config = config; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(DefaultHttpRetryHandler)) : NullLogger.Instance; - this._delayProvider = delayProvider ?? new TaskDelayProvider(); - this._timeProvider = timeProvider ?? new DefaultTimeProvider(); - } - - /// - /// Executes the action with retry logic - /// - /// - /// The request is retried if it throws an exception that is a retryable exception. - /// If the request throws an exception that is not a retryable exception, it is not retried. - /// If the request returns a response with a retryable error code, it is retried. - /// If the request returns a response with a non-retryable error code, it is not retried. - /// If the exception contains a RetryAfter header, the request is retried after the specified delay. - /// If configured to use exponential backoff, the delay is doubled for each retry. - /// - /// The request. - /// The cancellation token. - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - int retryCount = 0; - - var start = this._timeProvider.GetCurrentTime(); - while (true) - { - cancellationToken.ThrowIfCancellationRequested(); - - TimeSpan waitFor; - string reason; - HttpResponseMessage? response = null; - try - { - response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); - - // If the request does not require a retry then we're done - if (!this.ShouldRetry(response.StatusCode)) - { - return response; - } - - reason = response.StatusCode.ToString(); - - // If the retry count is greater than the max retry count then we'll - // just return - if (retryCount >= this._config.MaxRetryCount) - { - this._logger.LogError( - "Error executing request, max retry count reached. Reason: {0}", reason); - return response; - } - - // If the retry delay is longer than the total timeout, then we'll - // just return - if (!this.HasTimeForRetry(start, retryCount, response, out waitFor)) - { - var timeTaken = this._timeProvider.GetCurrentTime() - start; - this._logger.LogError( - "Error executing request, max total retry time reached. Reason: {0}. Time spent: {1}ms", reason, - timeTaken.TotalMilliseconds); - return response; - } - } - catch (Exception e) when (this.ShouldRetry(e) || this.ShouldRetry(e.InnerException)) - { - reason = e.GetType().ToString(); - if (retryCount >= this._config.MaxRetryCount) - { - this._logger.LogError(e, - "Error executing request, max retry count reached. Reason: {0}", reason); - throw; - } - else if (!this.HasTimeForRetry(start, retryCount, response, out waitFor)) - { - var timeTaken = this._timeProvider.GetCurrentTime() - start; - this._logger.LogError( - "Error executing request, max total retry time reached. Reason: {0}. Time spent: {1}ms", reason, - timeTaken.TotalMilliseconds); - throw; - } - } - - // If the request requires a retry then we'll retry - this._logger.LogWarning( - "Error executing action [attempt {0} of {1}]. Reason: {2}. Will retry after {3}ms", - retryCount + 1, - this._config.MaxRetryCount, - reason, - waitFor.TotalMilliseconds); - - // Increase retryCount - retryCount++; - - response?.Dispose(); - - // Delay - await this._delayProvider.DelayAsync(waitFor, cancellationToken).ConfigureAwait(false); - } - } - - /// - /// Interface for a delay provider, primarily to enable unit testing. - /// - internal interface IDelayProvider - { - Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken); - } - - internal sealed class TaskDelayProvider : IDelayProvider - { - public Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) - { - return Task.Delay(delay, cancellationToken); - } - } - - /// - /// Interface for a time provider, primarily to enable unit testing. - /// - internal interface ITimeProvider - { - DateTimeOffset GetCurrentTime(); - } - - internal sealed class DefaultTimeProvider : ITimeProvider - { - public DateTimeOffset GetCurrentTime() - { - return DateTimeOffset.UtcNow; - } - } - - private readonly HttpRetryConfig _config; - private readonly ILogger _logger; - private readonly IDelayProvider _delayProvider; - private readonly ITimeProvider _timeProvider; - - /// - /// Get the wait time for the next retry. - /// - /// Current retry count - /// The response message that potentially contains RetryAfter header. - private TimeSpan GetWaitTime(int retryCount, HttpResponseMessage? response) - { - // If the response contains a RetryAfter header, use that value - // Otherwise, use the configured min retry delay - var retryAfter = response?.Headers.RetryAfter?.Date.HasValue == true - ? response?.Headers.RetryAfter?.Date - this._timeProvider.GetCurrentTime() - : (response?.Headers.RetryAfter?.Delta) ?? this._config.MinRetryDelay; - retryAfter ??= this._config.MinRetryDelay; - - // If the retry delay is longer than the max retry delay, use the max retry delay - var timeToWait = retryAfter > this._config.MaxRetryDelay - ? this._config.MaxRetryDelay - : retryAfter < this._config.MinRetryDelay - ? this._config.MinRetryDelay - : retryAfter ?? default; - - // If exponential backoff is enabled, double the delay for each retry - if (this._config.UseExponentialBackoff) - { - for (var backoffRetryCount = 1; backoffRetryCount < retryCount + 1; backoffRetryCount++) - { - timeToWait = timeToWait.Add(timeToWait); - } - } - - return timeToWait; - } - - /// - /// Determines if there is time left for a retry. - /// - /// The start time of the original request. - /// The current retry count. - /// The response message that potentially contains RetryAfter header. - /// The wait time for the next retry. - /// True if there is time left for a retry, false otherwise. - private bool HasTimeForRetry(DateTimeOffset start, int retryCount, HttpResponseMessage? response, out TimeSpan waitFor) - { - waitFor = this.GetWaitTime(retryCount, response); - var currentTIme = this._timeProvider.GetCurrentTime(); - var result = currentTIme - start + waitFor; - - return result < this._config.MaxTotalRetryTime; - } - - private bool ShouldRetry(HttpStatusCode statusCode) - { - return this._config.RetryableStatusCodes.Contains(statusCode); - } - - private bool ShouldRetry(Exception? exception) - { - return exception != null && this._config.RetryableExceptionTypes.Contains(exception.GetType()); - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Reliability/DefaultHttpRetryHandlerFactory.cs b/dotnet/src/SemanticKernel.Abstractions/Reliability/DefaultHttpRetryHandlerFactory.cs deleted file mode 100644 index 5dd07072837f..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Reliability/DefaultHttpRetryHandlerFactory.cs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Reliability; - -/// -/// Deprecated A factory class for creating instances of . -/// Implements the interface. -/// -[Obsolete("Usage of Semantic Kernel internal retry abstractions is deprecated.\nCheck KernelSyntaxExamples.Example42_KernelBuilder.cs for alternatives")] -public class DefaultHttpRetryHandlerFactory : IDelegatingHandlerFactory -{ - /// - /// Initializes a new instance of the class. - /// - /// An optional instance to configure the retry behavior. If not provided, default configuration will be used. - public DefaultHttpRetryHandlerFactory(HttpRetryConfig? config = null) - { - this.Config = config; - } - - /// - /// Creates a new instance of with the specified logger. - /// - /// The to use for logging. If null, no logging will be performed. - /// A new instance of . - public DelegatingHandler Create(ILoggerFactory? loggerFactory) - { - return new DefaultHttpRetryHandler(this.Config, loggerFactory); - } - - /// - /// Gets the instance used to configure the retry behavior. - /// - public HttpRetryConfig? Config { get; } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Reliability/HttpRetryConfig.cs b/dotnet/src/SemanticKernel.Abstractions/Reliability/HttpRetryConfig.cs deleted file mode 100644 index e18677abd853..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Reliability/HttpRetryConfig.cs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net; -using System.Net.Http; -using Microsoft.SemanticKernel.Diagnostics; - -namespace Microsoft.SemanticKernel.Reliability; - -/// -/// Retry configuration for IHttpRetryPolicy that uses RetryAfter header when present. -/// -[Obsolete("Usage of Semantic Kernel internal retry abstractions is deprecated.\nCheck KernelSyntaxExamples.Example42_KernelBuilder.cs for alternatives")] -public sealed class HttpRetryConfig -{ - /// - /// Maximum number of retries. - /// - /// Thrown when value is negative. - public int MaxRetryCount - { - get => this._maxRetryCount; - set - { - if (value < 0) - { - throw new ArgumentOutOfRangeException(nameof(this.MaxRetryCount), "Max retry count cannot be negative."); - } - - this._maxRetryCount = value; - } - } - - /// - /// Minimum delay between retries. - /// - public TimeSpan MinRetryDelay { get; set; } = TimeSpan.FromSeconds(2); - - /// - /// Maximum delay between retries. - /// - public TimeSpan MaxRetryDelay { get; set; } = TimeSpan.FromSeconds(60); - - /// - /// Maximum total time spent retrying. - /// - public TimeSpan MaxTotalRetryTime { get; set; } = TimeSpan.FromMinutes(2); - - /// - /// Whether to use exponential backoff or not. - /// - public bool UseExponentialBackoff { get; set; } - - /// - /// List of status codes that should be retried. - /// - public List RetryableStatusCodes { get; set; } = new() - { - (HttpStatusCode)HttpStatusCodeType.RequestTimeout, - (HttpStatusCode)HttpStatusCodeType.ServiceUnavailable, - (HttpStatusCode)HttpStatusCodeType.GatewayTimeout, - (HttpStatusCode)HttpStatusCodeType.TooManyRequests, - (HttpStatusCode)HttpStatusCodeType.BadGateway, - }; - - /// - /// List of exception types that should be retried. - /// - public List RetryableExceptionTypes { get; set; } = new() - { - typeof(HttpRequestException) - }; - - private int _maxRetryCount = 1; -} diff --git a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj index c11cf51e2215..8cd6926c20fb 100644 --- a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj +++ b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj @@ -4,6 +4,8 @@ Microsoft.SemanticKernel.Abstractions Microsoft.SemanticKernel netstandard2.0 + $(NoWarn);SKEXP0004 + true @@ -18,15 +20,17 @@ + + + - diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs new file mode 100644 index 000000000000..a9e1266a2512 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Services/AIServiceExtensions.cs @@ -0,0 +1,112 @@ +// Copyright (c) Microsoft. All rights reserved. + +#pragma warning disable CA1716 // Identifiers should not match keywords + +using System.Linq; +using System.Text; +using Microsoft.Extensions.DependencyInjection; + +namespace Microsoft.SemanticKernel.Services; + +/// +/// Extension methods for . +/// +public static class AIServiceExtensions +{ + /// + /// Gets the key used to store the model identifier in the dictionary. + /// + public static string ModelIdKey => "ModelId"; + + /// + /// Gets the key used to store the endpoint key in the dictionary. + /// + public static string EndpointKey => "Endpoint"; + + /// + /// Gets the key used to store the API version in the dictionary. + /// + public static string ApiVersionKey => "ApiVersion"; + + /// + /// Gets the model identifier from 's . + /// + /// The service from which to get the model identifier. + /// The model identifier if it was specified in the service's attributes; otherwise, null. + public static string? GetModelId(this IAIService service) => service.GetAttribute(ModelIdKey); + + /// + /// Gets the endpoint from 's . + /// + /// The service from which to get the endpoint. + /// The endpoint if it was specified in the service's attributes; otherwise, null. + public static string? GetEndpoint(this IAIService service) => service.GetAttribute(EndpointKey); + + /// + /// Gets the API version from 's + /// + /// The service from which to get the API version. + /// The API version if it was specified in the service's attributes; otherwise, null. + public static string? GetApiVersion(this IAIService service) => service.GetAttribute(ApiVersionKey); + + /// + /// Gets the specified attribute. + /// + private static string? GetAttribute(this IAIService service, string key) + { + Verify.NotNull(service); + return service.Attributes?.TryGetValue(key, out object? value) == true ? value as string : null; + } + + /// + /// Resolves an and associated from the specified + /// based on a and associated . + /// + /// + /// Specifies the type of the required. This must be the same type + /// with which the service was registered in the orvia + /// the . + /// + /// The to use to select a service from the . + /// The containing services, plugins, and other state for use throughout the operation. + /// The function. + /// The function arguments. + /// A tuple of the selected service and the settings associated with the service (the settings may be null). + /// An appropriate service could not be found. + public static (T?, PromptExecutionSettings?) SelectAIService( + this IAIServiceSelector selector, + Kernel kernel, + KernelFunction function, + KernelArguments arguments) where T : class, IAIService + { + Verify.NotNull(selector); + Verify.NotNull(kernel); + Verify.NotNull(function); + Verify.NotNull(arguments); + + if (selector.TrySelectAIService( + kernel, function, arguments, + out T? service, out PromptExecutionSettings? settings)) + { + return (service, settings); + } + + var message = new StringBuilder($"Required service of type {typeof(T)} not registered."); + if (function.ExecutionSettings is not null) + { + string serviceIds = string.Join("|", function.ExecutionSettings.Keys); + if (!string.IsNullOrEmpty(serviceIds)) + { + message.Append($" Expected serviceIds: {serviceIds}."); + } + + string modelIds = string.Join("|", function.ExecutionSettings.Values.Select(model => model.ModelId)); + if (!string.IsNullOrEmpty(modelIds)) + { + message.Append($" Expected modelIds: {modelIds}."); + } + } + + throw new KernelException(message.ToString()); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/EmptyServiceProvider.cs b/dotnet/src/SemanticKernel.Abstractions/Services/EmptyServiceProvider.cs new file mode 100644 index 000000000000..4328d6041023 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Services/EmptyServiceProvider.cs @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using Microsoft.Extensions.DependencyInjection; + +namespace Microsoft.SemanticKernel; + +/// Empty implementation that returns null from all calls. +internal sealed class EmptyServiceProvider : IServiceProvider, IKeyedServiceProvider +{ + private static readonly ConcurrentDictionary s_results = new(); + + /// Singleton instance of . + public static IServiceProvider Instance { get; } = new EmptyServiceProvider(); + + /// + public object? GetService(Type serviceType) => s_results.GetOrAdd(serviceType, GetEmpty); + + /// + public object? GetKeyedService(Type serviceType, object? serviceKey) => s_results.GetOrAdd(serviceType, GetEmpty); + + /// + public object GetRequiredKeyedService(Type serviceType, object? serviceKey) => + throw new InvalidOperationException(serviceKey is null ? + $"No service for type '{serviceType}' has been registered." : + $"No service for type '{serviceType}' and service key '{serviceKey}' has been registered."); + + private static object? GetEmpty(Type serviceType) + { + if (serviceType.IsConstructedGenericType && + serviceType.GetGenericTypeDefinition() == typeof(IEnumerable<>)) + { + return Array.CreateInstance(serviceType.GenericTypeArguments[0], 0); + } + + return null; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/IAIService.cs b/dotnet/src/SemanticKernel.Abstractions/Services/IAIService.cs index 54085c6f5b4b..53a47ccb83c2 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Services/IAIService.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Services/IAIService.cs @@ -1,13 +1,16 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; +using System.Collections.Generic; namespace Microsoft.SemanticKernel.Services; /// -/// Represents an empty interface for AI services. +/// Represents an AI service. /// -[SuppressMessage("Design", "CA1040:Avoid empty interfaces")] public interface IAIService { + /// + /// Gets the AI service attributes. + /// + IReadOnlyDictionary Attributes { get; } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/IAIServiceProvider.cs b/dotnet/src/SemanticKernel.Abstractions/Services/IAIServiceProvider.cs deleted file mode 100644 index 293973f6ed14..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Services/IAIServiceProvider.cs +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Services; - -/// -/// Represents an interface for AI service providers that implements the INamedServiceProvider interface. -/// -public interface IAIServiceProvider : INamedServiceProvider -{ -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/IAIServiceSelector.cs b/dotnet/src/SemanticKernel.Abstractions/Services/IAIServiceSelector.cs index f35a52ffaf34..93064508d118 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Services/IAIServiceSelector.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Services/IAIServiceSelector.cs @@ -1,26 +1,36 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Collections.Generic; -using Microsoft.SemanticKernel.AI; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel.Services; -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 /// -/// Selector which will return a tuple containing instances of and from the specified provider based on the model settings. +/// Represents a selector which will return a tuple containing instances of and from the specified provider based on the model settings. /// public interface IAIServiceSelector { /// - /// Return the AI service and requesting settings from the specified provider based on the model settings. - /// The returned value is a tuple containing instances of and + /// Resolves an and associated from the specified + /// based on a and associated . /// - /// Type of AI service to return - /// Rendered prompt - /// AI service provider - /// Collection of model settings - /// - (T?, AIRequestSettings?) SelectAIService(string renderedPrompt, IAIServiceProvider serviceProvider, IReadOnlyList? modelSettings) where T : IAIService; + /// + /// Specifies the type of the required. This must be the same type + /// with which the service was registered in the orvia + /// the . + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The function. + /// The function arguments. + /// The selected service, or null if none was selected. + /// The settings associated with the selected service. This may be null even if a service is selected. + /// true if a matching service was selected; otherwise, false. +#pragma warning disable CA1716 // Identifiers should not match keywords + bool TrySelectAIService( + Kernel kernel, + KernelFunction function, + KernelArguments arguments, + [NotNullWhen(true)] out T? service, + out PromptExecutionSettings? serviceSettings) where T : class, IAIService; +#pragma warning restore CA1716 } diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/INamedServiceProvider.cs b/dotnet/src/SemanticKernel.Abstractions/Services/INamedServiceProvider.cs deleted file mode 100644 index 2cb9263f8b86..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Services/INamedServiceProvider.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Services; - -/// -/// Represents a named service provider that can retrieve services by type and name. -/// -/// The base type of the services provided by this provider. -public interface INamedServiceProvider -{ - /// - /// Gets the service of the specified type and name, or null if not found. - /// - /// The type of the service. - /// The name of the service, or null for the default service. - /// The service instance, or null if not found. - T? GetService(string? name = null) where T : TService; -} diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/KernelServiceCollectionExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/Services/KernelServiceCollectionExtensions.cs new file mode 100644 index 000000000000..79a6f6116e9a --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Services/KernelServiceCollectionExtensions.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; + +namespace Microsoft.Extensions.DependencyInjection; + +/// Extension methods for interacting with . +public static class KernelServiceCollectionExtensions +{ + /// Adds a and services to the services collection. + /// The service collection. + /// + /// A that can be used to add additional services to the same . + /// + /// + /// Both services are registered as transient, as both objects are mutable. + /// + public static IKernelBuilder AddKernel(this IServiceCollection services) + { + Verify.NotNull(services); + + // Register a KernelPluginCollection to be populated with any IKernelPlugins that have been + // directly registered in DI. It's transient because the Kernel will store the collection + // directly, and we don't want two Kernel instances to hold on to the same mutable collection. + services.AddTransient(); + + // Register the Kernel as transient. It's mutable and expected to be mutated by consumers, + // such as via adding event handlers, adding plugins, storing state in its Data collection, etc. + services.AddTransient(); + + // Create and return a builder that can be used for adding services and plugins + // to the IServiceCollection. + return new KernelBuilder(services); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/OrderedAIServiceSelector.cs b/dotnet/src/SemanticKernel.Abstractions/Services/OrderedAIServiceSelector.cs new file mode 100644 index 000000000000..1200acd3a803 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Services/OrderedAIServiceSelector.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using Microsoft.Extensions.DependencyInjection; + +namespace Microsoft.SemanticKernel.Services; + +/// +/// Implementation of that selects the AI service based on the order of the execution settings. +/// Uses the service id or model id to select the preferred service provider and then returns the service and associated execution settings. +/// +internal sealed class OrderedAIServiceSelector : IAIServiceSelector +{ + public static OrderedAIServiceSelector Instance { get; } = new(); + + /// + public bool TrySelectAIService( + Kernel kernel, KernelFunction function, KernelArguments arguments, + [NotNullWhen(true)] out T? service, + out PromptExecutionSettings? serviceSettings) where T : class, IAIService + { + // Allow the execution settings from the kernel arguments to take precedence + var executionSettings = arguments.ExecutionSettings ?? function.ExecutionSettings; + if (executionSettings is null || executionSettings.Count == 0) + { + service = GetAnyService(kernel); + if (service is not null) + { + serviceSettings = null; + return true; + } + } + else + { + PromptExecutionSettings? defaultExecutionSettings = null; + // Search by service id first + foreach (var keyValue in executionSettings) + { + var settings = keyValue.Value; + var serviceId = keyValue.Key; + if (string.IsNullOrEmpty(serviceId) || serviceId!.Equals(PromptExecutionSettings.DefaultServiceId, StringComparison.OrdinalIgnoreCase)) + { + defaultExecutionSettings ??= settings; + } + else if (!string.IsNullOrEmpty(serviceId)) + { + service = (kernel.Services as IKeyedServiceProvider)?.GetKeyedService(serviceId); + if (service is not null) + { + serviceSettings = settings; + return true; + } + } + } + + // Search by model id next + foreach (var keyValue in executionSettings) + { + var settings = keyValue.Value; + var serviceId = keyValue.Key; + if (!string.IsNullOrEmpty(settings.ModelId)) + { + service = this.GetServiceByModelId(kernel, settings.ModelId!); + if (service is not null) + { + serviceSettings = settings; + return true; + } + } + } + + // Search for default service id last + if (defaultExecutionSettings is not null) + { + service = GetAnyService(kernel); + if (service is not null) + { + serviceSettings = defaultExecutionSettings; + return true; + } + } + } + + service = null; + serviceSettings = null; + return false; + + // Get's a non-required service, regardless of service key + static T? GetAnyService(Kernel kernel) => + kernel.Services is IKeyedServiceProvider ? + kernel.GetAllServices().LastOrDefault() : // see comments in Kernel/KernelBuilder for why we can't use GetKeyedService + kernel.Services.GetService(); + } + + private T? GetServiceByModelId(Kernel kernel, string modelId) where T : class, IAIService + { + foreach (var service in kernel.GetAllServices()) + { + string? serviceModelId = service.GetModelId(); + if (!string.IsNullOrEmpty(serviceModelId) && serviceModelId == modelId) + { + return service; + } + } + + return null; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Services/ServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/Services/ServiceExtensions.cs deleted file mode 100644 index 16f10184d248..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/Services/ServiceExtensions.cs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; - -namespace Microsoft.SemanticKernel.Services; - -internal static class AIServiceProviderExtensions -{ - /// - /// Tries to get the service of the specified type and name, and returns a value indicating whether the operation succeeded. - /// - /// The type of the service. - /// The service provider. - /// The output parameter to receive the service instance, or null if not found. - /// True if the service was found, false otherwise. - public static bool TryGetService(this IAIServiceProvider serviceProvider, - [NotNullWhen(true)] out T? service) where T : IAIService - { - service = serviceProvider.GetService(); - return service != null; - } - - /// - /// Tries to get the service of the specified type and name, and returns a value indicating whether the operation succeeded. - /// - /// The type of the service. - /// The service provider. - /// The name of the service, or null for the default service. - /// The output parameter to receive the service instance, or null if not found. - /// True if the service was found, false otherwise. - public static bool TryGetService(this IAIServiceProvider serviceProvider, - string? name, [NotNullWhen(true)] out T? service) where T : IAIService - { - service = serviceProvider.GetService(name); - return service != null; - } -} diff --git a/dotnet/src/SemanticKernel.Abstractions/TemplateEngine/IPromptTemplate.cs b/dotnet/src/SemanticKernel.Abstractions/TemplateEngine/IPromptTemplate.cs deleted file mode 100644 index e0cc43e31814..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/TemplateEngine/IPromptTemplate.cs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.TemplateEngine; - -/// -/// Interface for prompt template. -/// -public interface IPromptTemplate -{ - /// - /// The list of parameters required by the template, using configuration and template info. - /// - IReadOnlyList Parameters { get; } - - /// - /// Render the template using the information in the context - /// - /// Kernel execution context helpers - /// The to monitor for cancellation requests. The default is . - /// Prompt rendered to string - public Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Abstractions/TemplateEngine/IPromptTemplateEngine.cs b/dotnet/src/SemanticKernel.Abstractions/TemplateEngine/IPromptTemplateEngine.cs deleted file mode 100644 index 2ae1f7b2c566..000000000000 --- a/dotnet/src/SemanticKernel.Abstractions/TemplateEngine/IPromptTemplateEngine.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.TemplateEngine; - -/// -/// Prompt template engine interface. -/// -public interface IPromptTemplateEngine -{ - /// - /// Given a prompt template, replace the variables with their values and execute the functions replacing their - /// reference with the function result. - /// - /// Prompt template (see skprompt.txt files) - /// Access into the current kernel execution context - /// The to monitor for cancellation requests. The default is . - /// The prompt template ready to be used for an AI request - Task RenderAsync( - string templateText, - SKContext context, - CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/CosineSimilarityOperation.cs b/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/CosineSimilarityOperation.cs deleted file mode 100644 index dd07a01594e1..000000000000 --- a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/CosineSimilarityOperation.cs +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Numerics; -using System.Runtime.InteropServices; - -namespace Microsoft.SemanticKernel.AI.Embeddings.VectorOperations; - -/// -/// Extension methods to calculate the cosine similarity between two vectors. -/// -/// -/// https://en.wikipedia.org/wiki/Cosine_similarity -/// -[Obsolete("Numerical operations will be removed in a future release. Use System.Numerics.Tensors.TensorPrimitives instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class CosineSimilarityOperation -{ - /// - /// Calculate the cosine similarity between two vectors of type . - /// - /// The unmanaged data type (, currently supported). - /// The first vector. - /// The second vector. - public static double CosineSimilarity(this ReadOnlySpan x, ReadOnlySpan y) - where TNumber : unmanaged - { - if (typeof(TNumber) == typeof(float)) - { - ReadOnlySpan floatSpanX = MemoryMarshal.Cast(x); - ReadOnlySpan floatSpanY = MemoryMarshal.Cast(y); - return CosineSimilarityImplementation(floatSpanX, floatSpanY); - } - else if (typeof(TNumber) == typeof(double)) - { - ReadOnlySpan doubleSpanX = MemoryMarshal.Cast(x); - ReadOnlySpan doubleSpanY = MemoryMarshal.Cast(y); - return CosineSimilarityImplementation(doubleSpanX, doubleSpanY); - } - - throw new NotSupportedException(); - } - - /// - /// Calculate the cosine similarity between two vectors of type . - /// - /// The unmanaged data type (, currently supported). - /// The first vector. - /// The second vector. - public static double CosineSimilarity(this Span x, Span y) - where TNumber : unmanaged - { - return x.AsReadOnlySpan().CosineSimilarity(y.AsReadOnlySpan()); - } - - /// - /// Calculate the cosine similarity between two vectors of type . - /// - /// The unmanaged data type (, currently supported). - /// The first vector. - /// The second vector. - public static double CosineSimilarity(this TNumber[] x, TNumber[] y) - where TNumber : unmanaged - { - return x.AsReadOnlySpan().CosineSimilarity(y.AsReadOnlySpan()); - } - - #region private ================================================================================ - - private static unsafe double CosineSimilarityImplementation(ReadOnlySpan x, ReadOnlySpan y) - { - if (x.Length != y.Length) - { - throw new ArgumentException("Array lengths must be equal"); - } - - fixed (double* pxBuffer = x, pyBuffer = y) - { - double dotSum = 0, lenXSum = 0, lenYSum = 0; - - double* px = pxBuffer, py = pyBuffer; - double* pxEnd = px + x.Length; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - double* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - Vector xVec = *(Vector*)px; - Vector yVec = *(Vector*)py; - - dotSum += Vector.Dot(xVec, yVec); // Dot product - lenXSum += Vector.Dot(xVec, xVec); // For magnitude of x - lenYSum += Vector.Dot(yVec, yVec); // For magnitude of y - - px += Vector.Count; - py += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - double xVal = *px; - double yVal = *py; - - dotSum += xVal * yVal; // Dot product - lenXSum += xVal * xVal; // For magnitude of x - lenYSum += yVal * yVal; // For magnitude of y - - ++px; - ++py; - } - - // Cosine Similarity of X, Y - // Sum(X * Y) / |X| * |Y| - return dotSum / (Math.Sqrt(lenXSum) * Math.Sqrt(lenYSum)); - } - } - - private static unsafe double CosineSimilarityImplementation(ReadOnlySpan x, ReadOnlySpan y) - { - if (x.Length != y.Length) - { - throw new ArgumentException("Array lengths must be equal"); - } - - fixed (float* pxBuffer = x, pyBuffer = y) - { - double dotSum = 0, lenXSum = 0, lenYSum = 0; - - float* px = pxBuffer, py = pyBuffer; - float* pxEnd = px + x.Length; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - float* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - Vector xVec = *(Vector*)px; - Vector yVec = *(Vector*)py; - - dotSum += Vector.Dot(xVec, yVec); // Dot product - lenXSum += Vector.Dot(xVec, xVec); // For magnitude of x - lenYSum += Vector.Dot(yVec, yVec); // For magnitude of y - - px += Vector.Count; - py += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - float xVal = *px; - float yVal = *py; - - dotSum += xVal * yVal; // Dot product - lenXSum += xVal * xVal; // For magnitude of x - lenYSum += yVal * yVal; // For magnitude of y - - ++px; - ++py; - } - - // Cosine Similarity of X, Y - // Sum(X * Y) / |X| * |Y| - return dotSum / (Math.Sqrt(lenXSum) * Math.Sqrt(lenYSum)); - } - } - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/DivideOperation.cs b/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/DivideOperation.cs deleted file mode 100644 index 420728402bd4..000000000000 --- a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/DivideOperation.cs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Numerics; -using System.Runtime.InteropServices; - -namespace Microsoft.SemanticKernel.AI.Embeddings.VectorOperations; - -/// -/// Extension methods for vector division. -/// -[Obsolete("Numerical operations will be removed in a future release. Use System.Numerics.Tensors.TensorPrimitives instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class DivideOperation -{ - /// - /// Divide all elements of of type by . - /// - /// The unmanaged data type (, currently supported). - /// The data vector - /// The value to divide by. - public static void DivideByInPlace(this Span span, double divisor) - where TNumber : unmanaged - { - if (typeof(TNumber) == typeof(float)) - { - Span floatSpan = MemoryMarshal.Cast(span); - DivideByInPlaceImplementation(floatSpan, (float)divisor); - } - else if (typeof(TNumber) == typeof(double)) - { - Span doubleSpan = MemoryMarshal.Cast(span); - DivideByInPlaceImplementation(doubleSpan, divisor); - } - else - { - throw new NotSupportedException(); - } - } - - /// - /// Divide all elements of an array of type by . - /// - /// The unmanaged data type (, currently supported). - /// The data vector - /// The value to divide by. - public static void DivideByInPlace(this TNumber[] vector, double divisor) - where TNumber : unmanaged - { - vector.AsSpan().DivideByInPlace(divisor); - } - - #region private ================================================================================ - - private static unsafe void DivideByInPlaceImplementation(Span x, float divisor) - { - fixed (float* pxBuffer = x) - { - float* px = pxBuffer; - float* pxEnd = px + x.Length; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - Vector divisorVec = new(divisor); - float* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - *(Vector*)px /= divisorVec; - px += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - *px /= divisor; - px++; - } - } - } - - private static unsafe void DivideByInPlaceImplementation(Span x, double divisor) - { - fixed (double* pxBuffer = x) - { - double* px = pxBuffer; - double* pxEnd = px + x.Length; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - Vector divisorVec = new(divisor); - double* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - *(Vector*)px /= divisorVec; - px += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - *px /= divisor; - px++; - } - } - } - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/DotProductOperation.cs b/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/DotProductOperation.cs deleted file mode 100644 index 6264a9ed47c6..000000000000 --- a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/DotProductOperation.cs +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Numerics; -using System.Runtime.InteropServices; - -namespace Microsoft.SemanticKernel.AI.Embeddings.VectorOperations; - -/// -/// Extension methods for vector dot product. -/// -/// -/// https://en.wikipedia.org/wiki/Dot_product -/// -[Obsolete("Numerical operations will be removed in a future release. Use System.Numerics.Tensors.TensorPrimitives instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class DotProductOperation -{ - /// - /// Calculate the dot products of two vectors of type . - /// - /// The unmanaged data type (, currently supported). - /// The first vector. - /// The second vector. - /// The dot product as a . - public static double DotProduct(this ReadOnlySpan x, ReadOnlySpan y) - where TNumber : unmanaged - { - if (typeof(TNumber) == typeof(float)) - { - ReadOnlySpan floatSpanX = MemoryMarshal.Cast(x); - ReadOnlySpan floatSpanY = MemoryMarshal.Cast(y); - return DotProductImplementation(floatSpanX, floatSpanY); - } - else if (typeof(TNumber) == typeof(double)) - { - ReadOnlySpan doubleSpanX = MemoryMarshal.Cast(x); - ReadOnlySpan doubleSpanY = MemoryMarshal.Cast(y); - return DotProductImplementation(doubleSpanX, doubleSpanY); - } - else - { - throw new NotSupportedException(); - } - } - - /// - /// Calculate the dot products of two vectors of type . - /// - /// The unmanaged data type (, currently supported). - /// The first vector. - /// The second vector. - /// The dot product as a . - public static double DotProduct(this Span x, Span y) - where TNumber : unmanaged - { - return x.AsReadOnlySpan().DotProduct(y.AsReadOnlySpan()); - } - - /// - /// Calculate the dot products of two vectors of type . - /// - /// The unmanaged data type (, currently supported). - /// The first vector. - /// The second vector. - /// The dot product as a . - public static double DotProduct(this TNumber[] x, TNumber[] y) - where TNumber : unmanaged - { - return x.AsReadOnlySpan().DotProduct(y.AsReadOnlySpan()); - } - - #region private ================================================================================ - - private static unsafe double DotProductImplementation(ReadOnlySpan x, ReadOnlySpan y) - { - if (x.Length != y.Length) - { - throw new ArgumentException("Array lengths must be equal"); - } - - fixed (double* pxBuffer = x, pyBuffer = y) - { - double* px = pxBuffer, py = pyBuffer; - double* pxEnd = px + x.Length; - - double dotSum = 0; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - double* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - dotSum += Vector.Dot(*(Vector*)px, *(Vector*)py); // Dot product - - px += Vector.Count; - py += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - dotSum += *px * *py; // Dot product - - ++px; - ++py; - } - - return dotSum; - } - } - - private static unsafe double DotProductImplementation(ReadOnlySpan x, ReadOnlySpan y) - { - if (x.Length != y.Length) - { - throw new ArgumentException("Array lengths must be equal"); - } - - fixed (float* pxBuffer = x, pyBuffer = y) - { - float* px = pxBuffer, py = pyBuffer; - float* pxEnd = px + x.Length; - - double dotSum = 0; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - float* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - dotSum += Vector.Dot(*(Vector*)px, *(Vector*)py); // Dot product - - px += Vector.Count; - py += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - dotSum += *px * *py; // Dot product - - ++px; - ++py; - } - - return dotSum; - } - } - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/EuclideanLengthOperation.cs b/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/EuclideanLengthOperation.cs deleted file mode 100644 index 416569ebe0cf..000000000000 --- a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/EuclideanLengthOperation.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; - -namespace Microsoft.SemanticKernel.AI.Embeddings.VectorOperations; - -/// -/// Extension methods to calculate the Euclidean length of a vector. -/// -[Obsolete("Numerical operations will be removed in a future release. Use System.Numerics.Tensors.TensorPrimitives instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class EuclideanLengthOperation -{ - /// - /// Calculate the Euclidean length of a vector of type . - /// - /// The unmanaged data type (, currently supported). - /// The vector. - /// Euclidean length as a - public static double EuclideanLength(this ReadOnlySpan x) - where TNumber : unmanaged - { - return Math.Sqrt(x.DotProduct(x)); - } - - /// - /// Calculate the Euclidean length of a vector of type . - /// - /// The unmanaged data type (, currently supported). - /// The vector. - /// Euclidean length as a - public static double EuclideanLength(this Span x) - where TNumber : unmanaged - { - var readOnly = x.AsReadOnlySpan(); - return readOnly.EuclideanLength(); - } - - /// - /// Calculate the Euclidean length of a vector of type . - /// - /// The unmanaged data type (, currently supported). - /// The vector. - /// Euclidean length as a - public static double EuclideanLength(this TNumber[] vector) - where TNumber : unmanaged - { - return vector.AsReadOnlySpan().EuclideanLength(); - } -} diff --git a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/MultiplyOperation.cs b/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/MultiplyOperation.cs deleted file mode 100644 index cb1596a334de..000000000000 --- a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/MultiplyOperation.cs +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Numerics; -using System.Runtime.InteropServices; - -namespace Microsoft.SemanticKernel.AI.Embeddings.VectorOperations; - -/// -/// Extension methods to multiply a vector by a scalar. -/// -[Obsolete("Numerical operations will be removed in a future release. Use System.Numerics.Tensors.TensorPrimitives instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class MultiplyOperation -{ - /// - /// Multiplies all elements of a vector by the scalar in-place. - /// Does not allocate new memory. - /// - /// The unmanaged data type (, currently supported). - /// The input vector. - /// The scalar. - public static void MultiplyByInPlace(this Span vector, double multiplier) - where TNumber : unmanaged - { - if (typeof(TNumber) == typeof(float)) - { - Span floatSpan = MemoryMarshal.Cast(vector); - MultiplyByInPlaceImplementation(floatSpan, (float)multiplier); - } - else if (typeof(TNumber) == typeof(double)) - { - Span doubleSpan = MemoryMarshal.Cast(vector); - MultiplyByInPlaceImplementation(doubleSpan, multiplier); - } - else - { - throw new NotSupportedException(); - } - } - - /// - /// Multiplies all elements of a vector by the scalar in-place. - /// Does not allocate new memory. - /// - /// The unmanaged data type (, currently supported). - /// The input vector. - /// The scalar. - public static void MultiplyByInPlace(this TNumber[] vector, double multiplier) - where TNumber : unmanaged - { - vector.AsSpan().MultiplyByInPlace(multiplier); - } - - #region private ================================================================================ - - private static unsafe void MultiplyByInPlaceImplementation(Span x, float multiplier) - { - fixed (float* pxBuffer = x) - { - float* px = pxBuffer; - float* pxEnd = px + x.Length; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - Vector multiplierVec = new(multiplier); - float* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - *(Vector*)px *= multiplierVec; - px += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - *px *= multiplier; - px++; - } - } - } - - private static unsafe void MultiplyByInPlaceImplementation(Span x, double multiplier) - { - fixed (double* pxBuffer = x) - { - double* px = pxBuffer; - double* pxEnd = px + x.Length; - - if (Vector.IsHardwareAccelerated && - x.Length >= Vector.Count) - { - Vector multiplierVec = new(multiplier); - double* pxOneVectorFromEnd = pxEnd - Vector.Count; - do - { - *(Vector*)px *= multiplierVec; - px += Vector.Count; - } while (px <= pxOneVectorFromEnd); - } - - while (px < pxEnd) - { - *px *= multiplier; - px++; - } - } - } - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/NormalizeOperation.cs b/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/NormalizeOperation.cs deleted file mode 100644 index e17d319b5216..000000000000 --- a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/NormalizeOperation.cs +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; - -namespace Microsoft.SemanticKernel.AI.Embeddings.VectorOperations; - -/// -/// Extension methods to normalize a vector. -/// -/// -/// https://en.wikipedia.org/wiki/Unit_vector -/// -[Obsolete("Numerical operations will be removed in a future release. Use System.Numerics.Tensors.TensorPrimitives instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -public static class NormalizeOperation -{ - /// - /// Normalizes a vector in-place by dividing all elements by the scalar Euclidean length. - /// The resulting length will be 1.0. Does not allocate new memory. - /// - /// The unmanaged data type (, currently supported). - /// The input vector. - public static void NormalizeInPlace(this Span vector) - where TNumber : unmanaged - { - vector.DivideByInPlace(vector.EuclideanLength()); - } - - /// - /// Normalizes a vector in-place by dividing all elements by the scalar Euclidean length. - /// The resulting length will be 1.0. Does not allocate new memory. - /// - /// The unmanaged data type (, currently supported). - /// The input vector. - public static void NormalizeInPlace(this TNumber[] vector) - where TNumber : unmanaged - { - vector.AsSpan().NormalizeInPlace(); - } -} diff --git a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/SpanExtensions.cs b/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/SpanExtensions.cs deleted file mode 100644 index 7182853aaf2e..000000000000 --- a/dotnet/src/SemanticKernel.Core/AI/Embeddings/VectorOperations/SpanExtensions.cs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; - -namespace Microsoft.SemanticKernel.AI.Embeddings.VectorOperations; - -/// -/// Extension methods to convert from array and to . -/// -[Obsolete("Numerical operations will be removed in a future release. Use System.Numerics.Tensors.TensorPrimitives instead.")] -[EditorBrowsable(EditorBrowsableState.Never)] -internal static class SpanExtensions -{ - internal static ReadOnlySpan AsReadOnlySpan(this TNumber[] vector) - { - return new ReadOnlySpan(vector); - } - - internal static ReadOnlySpan AsReadOnlySpan(this Span span) - { - return span; - } -} diff --git a/dotnet/src/SemanticKernel.Core/Contents/StreamingMethodContent.cs b/dotnet/src/SemanticKernel.Core/Contents/StreamingMethodContent.cs new file mode 100644 index 000000000000..e6751607c5e3 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Contents/StreamingMethodContent.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a manufactured streaming content from a single function result. +/// +public sealed class StreamingMethodContent : StreamingKernelContent +{ + /// + /// Gets the result of the function invocation. + /// + public object Content { get; } + + /// + public override byte[] ToByteArray() + { + if (this.Content is byte[] bytes) + { + return bytes; + } + + // By default if a native value is not Byte[] we output the UTF8 string representation of the value + return this.Content?.ToString() is string s ? + Encoding.UTF8.GetBytes(s) : + Array.Empty(); + } + + /// + public override string ToString() + { + return this.Content.ToString() ?? string.Empty; + } + + /// + /// Initializes a new instance of the class. + /// + /// Underlying object that represents the chunk content. + /// Additional metadata associated with the content. + public StreamingMethodContent(object innerContent, IReadOnlyDictionary? metadata = null) : base(innerContent, metadata: metadata) + { + this.Content = innerContent; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Extensions/KernelSemanticFunctionExtensions.cs b/dotnet/src/SemanticKernel.Core/Extensions/KernelSemanticFunctionExtensions.cs deleted file mode 100644 index 2bae6043c70d..000000000000 --- a/dotnet/src/SemanticKernel.Core/Extensions/KernelSemanticFunctionExtensions.cs +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.IO; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.TemplateEngine; -using Microsoft.SemanticKernel.Text; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the namespace of IKernel -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Class for extensions methods to define semantic functions. -/// -public static class KernelSemanticFunctionExtensions -{ - /// - /// Build and register a function in the internal function collection, in a global generic plugin. - /// - /// Semantic Kernel instance - /// Name of the semantic function. The name can contain only alphanumeric chars + underscore. - /// Prompt template configuration. - /// Prompt template. - /// A C# function wrapping AI logic, usually defined with natural language - public static ISKFunction RegisterSemanticFunction( - this IKernel kernel, - string functionName, - PromptTemplateConfig promptTemplateConfig, - IPromptTemplate promptTemplate) - { - return kernel.RegisterSemanticFunction(FunctionCollection.GlobalFunctionsPluginName, functionName, promptTemplateConfig, promptTemplate); - } - - /// - /// Build and register a function in the internal function collection. - /// - /// Semantic Kernel instance - /// Name of the plugin containing the function. The name can contain only alphanumeric chars + underscore. - /// Name of the semantic function. The name can contain only alphanumeric chars + underscore. - /// Prompt template configuration. - /// Prompt template. - /// A C# function wrapping AI logic, usually defined with natural language - public static ISKFunction RegisterSemanticFunction( - this IKernel kernel, - string pluginName, - string functionName, - PromptTemplateConfig promptTemplateConfig, - IPromptTemplate promptTemplate) - { - // Future-proofing the name not to contain special chars - Verify.ValidFunctionName(functionName); - - ISKFunction function = kernel.CreateSemanticFunction(pluginName, functionName, promptTemplateConfig, promptTemplate); - return kernel.RegisterCustomFunction(function); - } - - /// - /// Define a string-to-string semantic function, with no direct support for input context. - /// The function can be referenced in templates and will receive the context, but when invoked programmatically you - /// can only pass in a string in input and receive a string in output. - /// - /// Semantic Kernel instance - /// Plain language definition of the semantic function, using SK template language - /// A name for the given function. The name can be referenced in templates and used by the pipeline planner. - /// Optional plugin name, for namespacing and avoid collisions - /// Optional description, useful for the planner - /// Optional LLM request settings - /// A function ready to use - public static ISKFunction CreateSemanticFunction( - this IKernel kernel, - string promptTemplate, - string? functionName = null, - string? pluginName = null, - string? description = null, - AIRequestSettings? requestSettings = null) - { - functionName ??= RandomFunctionName(); - - var promptTemplateConfig = new PromptTemplateConfig - { - Description = description ?? "Generic function, unknown purpose", - }; - - if (requestSettings is not null) - { - promptTemplateConfig.ModelSettings.Add(requestSettings); - } - - return kernel.CreateSemanticFunction( - promptTemplate: promptTemplate, - promptTemplateConfig: promptTemplateConfig, - functionName: functionName, - pluginName: pluginName); - } - - /// - /// Allow to define a semantic function passing in the definition in natural language, i.e. the prompt template. - /// - /// Semantic Kernel instance - /// Plain language definition of the semantic function, using SK template language - /// Prompt template configuration. - /// A name for the given function. The name can be referenced in templates and used by the pipeline planner. - /// An optional plugin name, e.g. to namespace functions with the same name. When empty, - /// the function is added to the global namespace, overwriting functions with the same name - /// A function ready to use - public static ISKFunction CreateSemanticFunction( - this IKernel kernel, - string promptTemplate, - PromptTemplateConfig promptTemplateConfig, - string? functionName = null, - string? pluginName = null) - { - functionName ??= RandomFunctionName(); - Verify.ValidFunctionName(functionName); - if (!string.IsNullOrEmpty(pluginName)) { Verify.ValidPluginName(pluginName); } - - var template = new PromptTemplate(promptTemplate, promptTemplateConfig, kernel.PromptTemplateEngine); - - // TODO: manage overwrites, potentially error out - return string.IsNullOrEmpty(pluginName) - ? kernel.RegisterSemanticFunction(functionName, promptTemplateConfig, template) - : kernel.RegisterSemanticFunction(pluginName!, functionName, promptTemplateConfig, template); - } - - /// - /// Invoke a semantic function using the provided prompt template. - /// - /// Semantic Kernel instance - /// Plain language definition of the semantic function, using SK template language - /// A name for the given function. The name can be referenced in templates and used by the pipeline planner. - /// Optional plugin name, for namespacing and avoid collisions - /// Optional description, useful for the planner - /// Optional LLM request settings - /// Kernel execution result - public static Task InvokeSemanticFunctionAsync( - this IKernel kernel, - string template, - string? functionName = null, - string? pluginName = null, - string? description = null, - AIRequestSettings? requestSettings = null) - { - var skFunction = kernel.CreateSemanticFunction( - template, - functionName, - pluginName, - description, - requestSettings); - - return kernel.RunAsync(skFunction); - } - - [Obsolete("Methods and classes which includes Skill in the name have been renamed to use Plugin. Use Kernel.ImportSemanticFunctionsFromDirectory instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public static IDictionary ImportSemanticSkillFromDirectory( - this IKernel kernel, string parentDirectory, params string[] pluginDirectoryNames) - { - return kernel.ImportSemanticFunctionsFromDirectory(parentDirectory, pluginDirectoryNames); - } -#pragma warning restore CS1591 - - /// - /// Imports semantic functions, defined by prompt templates stored in the filesystem. - /// - /// - /// - /// A plugin directory contains a set of subdirectories, one for each semantic function. - /// - /// - /// This method accepts the path of the parent directory (e.g. "d:\plugins") and the name of the plugin directory - /// (e.g. "OfficePlugin"), which is used also as the "plugin name" in the internal function collection (note that - /// plugin and function names can contain only alphanumeric chars and underscore). - /// - /// - /// Example: - /// D:\plugins\ # parentDirectory = "D:\plugins" - /// - /// |__ OfficePlugin\ # pluginDirectoryName = "SummarizeEmailThread" - /// - /// |__ ScheduleMeeting # semantic function - /// |__ skprompt.txt # prompt template - /// |__ config.json # settings (optional file) - /// - /// |__ SummarizeEmailThread # semantic function - /// |__ skprompt.txt # prompt template - /// |__ config.json # settings (optional file) - /// - /// |__ MergeWordAndExcelDocs # semantic function - /// |__ skprompt.txt # prompt template - /// |__ config.json # settings (optional file) - /// - /// |__ XboxPlugin\ # another plugin, etc. - /// - /// |__ MessageFriend - /// |__ skprompt.txt - /// |__ config.json - /// |__ LaunchGame - /// |__ skprompt.txt - /// |__ config.json - /// - /// - /// See https://github.com/microsoft/semantic-kernel/tree/main/samples/plugins for examples in the Semantic Kernel repository. - /// - /// - /// Semantic Kernel instance - /// Directory containing the plugin directory, e.g. "d:\myAppPlugins" - /// Name of the directories containing the selected plugins, e.g. "StrategyPlugin" - /// A list of all the semantic functions found in the directory, indexed by plugin name. - public static IDictionary ImportSemanticFunctionsFromDirectory( - this IKernel kernel, string parentDirectory, params string[] pluginDirectoryNames) - { - const string ConfigFile = "config.json"; - const string PromptFile = "skprompt.txt"; - - var functions = new Dictionary(); - - ILogger? logger = null; - foreach (string pluginDirectoryName in pluginDirectoryNames) - { - Verify.ValidPluginName(pluginDirectoryName); - var pluginDirectory = Path.Combine(parentDirectory, pluginDirectoryName); - Verify.DirectoryExists(pluginDirectory); - - string[] directories = Directory.GetDirectories(pluginDirectory); - foreach (string dir in directories) - { - var functionName = Path.GetFileName(dir); - - // Continue only if prompt template exists - var promptPath = Path.Combine(dir, PromptFile); - if (!File.Exists(promptPath)) { continue; } - - // Load prompt configuration. Note: the configuration is optional. - var config = new PromptTemplateConfig(); - var configPath = Path.Combine(dir, ConfigFile); - if (File.Exists(configPath)) - { - config = PromptTemplateConfig.FromJson(File.ReadAllText(configPath)); - } - - logger ??= kernel.LoggerFactory.CreateLogger(typeof(IKernel)); - if (logger.IsEnabled(LogLevel.Trace)) - { - logger.LogTrace("Config {0}: {1}", functionName, Json.Serialize(config)); - } - - // Load prompt template - var template = new PromptTemplate(File.ReadAllText(promptPath), config, kernel.PromptTemplateEngine); - - if (logger.IsEnabled(LogLevel.Trace)) - { - logger.LogTrace("Registering function {0}.{1} loaded from {2}", pluginDirectoryName, functionName, dir); - } - - functions[functionName] = kernel.RegisterSemanticFunction(pluginDirectoryName, functionName, config, template); - } - } - - return functions; - } - - private static string RandomFunctionName() => "func" + Guid.NewGuid().ToString("N"); - - private static ISKFunction CreateSemanticFunction( - this IKernel kernel, - string pluginName, - string functionName, - PromptTemplateConfig promptTemplateConfig, - IPromptTemplate promptTemplate) - { - return SemanticFunction.FromSemanticConfig( - pluginName, - functionName, - promptTemplateConfig, - promptTemplate, - kernel.LoggerFactory - ); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Extensions/SKFunctionExtensions.cs b/dotnet/src/SemanticKernel.Core/Extensions/SKFunctionExtensions.cs deleted file mode 100644 index 533987024a38..000000000000 --- a/dotnet/src/SemanticKernel.Core/Extensions/SKFunctionExtensions.cs +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Globalization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 // Namespace does not match folder structure -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 // Namespace does not match folder structure - -/// -/// Class that holds extension methods for objects implementing ISKFunction. -/// -public static class SKFunctionExtensions -{ - /// - /// Configure the LLM settings used by semantic function. - /// - /// Semantic function - /// Request settings - /// Self instance - [Obsolete("Use implementation of IAIServiceConfigurationProvider instead. This will be removed in a future release.")] - public static ISKFunction UseCompletionSettings(this ISKFunction skFunction, AIRequestSettings requestSettings) - { - return skFunction.SetAIConfiguration(requestSettings); - } - - /// - /// Execute a function allowing to pass the main input separately from the rest of the context. - /// - /// Function to execute - /// Kernel - /// Input variables for the function - /// Collection of functions that this function can access - /// Culture to use for the function execution - /// LLM completion settings (for semantic functions only) - /// The to use for logging. If null, no logging will be performed. - /// The to monitor for cancellation requests. The default is . - /// The result of the function execution - public static Task InvokeAsync(this ISKFunction function, - IKernel kernel, - ContextVariables? variables = null, - IReadOnlyFunctionCollection? functions = null, - CultureInfo? culture = null, - AIRequestSettings? requestSettings = null, - ILoggerFactory? loggerFactory = null, - CancellationToken cancellationToken = default) - { - var context = kernel.CreateNewContext(variables, functions, loggerFactory, culture); - return function.InvokeAsync(context, requestSettings, cancellationToken); - } - - /// - /// Execute a function allowing to pass the main input separately from the rest of the context. - /// - /// Function to execute - /// Input string for the function - /// Kernel - /// Collection of functions that this function can access - /// Culture to use for the function execution - /// LLM completion settings (for semantic functions only) - /// The to use for logging. If null, no logging will be performed. - /// The to monitor for cancellation requests. The default is . - /// The result of the function execution - public static Task InvokeAsync(this ISKFunction function, - string input, - IKernel kernel, - IReadOnlyFunctionCollection? functions = null, - CultureInfo? culture = null, - AIRequestSettings? requestSettings = null, - ILoggerFactory? loggerFactory = null, - CancellationToken cancellationToken = default) - => function.InvokeAsync(kernel, new ContextVariables(input), functions, culture, requestSettings, loggerFactory, cancellationToken); - - /// - /// Returns decorated instance of with enabled instrumentation. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public static ISKFunction WithInstrumentation(this ISKFunction function, ILoggerFactory? loggerFactory = null) - { - return new InstrumentedSKFunction(function, loggerFactory); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/DefaultKernelPlugin.cs b/dotnet/src/SemanticKernel.Core/Functions/DefaultKernelPlugin.cs new file mode 100644 index 000000000000..d7a9e2c9a4e4 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Functions/DefaultKernelPlugin.cs @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides an implementation around a collection of functions. +/// +internal sealed class DefaultKernelPlugin : KernelPlugin +{ + /// The collection of functions associated with this plugin. + private readonly Dictionary _functions; + + /// Initializes the new plugin from the provided name, description, and function collection. + /// The name for the plugin. + /// A description of the plugin. + /// The initial functions to be available as part of the plugin. + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + internal DefaultKernelPlugin(string name, string? description, IEnumerable? functions = null) : base(name, description) + { + this._functions = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (functions is not null) + { + foreach (KernelFunction f in functions) + { + Verify.NotNull(f, nameof(functions)); + this._functions.Add(f.Name, f); + } + } + } + + /// + public override int FunctionCount => this._functions.Count; + + /// + public override bool TryGetFunction(string name, [NotNullWhen(true)] out KernelFunction? function) => + this._functions.TryGetValue(name, out function); + + /// + public override IEnumerator GetEnumerator() => this._functions.Values.GetEnumerator(); +} diff --git a/dotnet/src/SemanticKernel.Core/Functions/DelegatingAIServiceSelector.cs b/dotnet/src/SemanticKernel.Core/Functions/DelegatingAIServiceSelector.cs deleted file mode 100644 index 05a7a05e6411..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/DelegatingAIServiceSelector.cs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.Functions; - -/// -/// Added for backward compatibility only, this will be removed when ISKFunction.SetAIService and ISKFunction.SetAIConfiguration are removed. -/// -[Obsolete("Remove this when ISKFunction.SetAIService and ISKFunction.SetAIConfiguration are removed.")] -internal class DelegatingAIServiceSelector : IAIServiceSelector -{ - internal Func? ServiceFactory { get; set; } - internal AIRequestSettings? RequestSettings { get; set; } - - /// - public (T?, AIRequestSettings?) SelectAIService(string renderedPrompt, IAIServiceProvider serviceProvider, IReadOnlyList? modelSettings) where T : IAIService - { - return ((T?)this.ServiceFactory?.Invoke() ?? serviceProvider.GetService(null), this.RequestSettings ?? modelSettings?[0]); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/FunctionCollection.cs b/dotnet/src/SemanticKernel.Core/Functions/FunctionCollection.cs deleted file mode 100644 index 37927178df05..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/FunctionCollection.cs +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Semantic Kernel default function collection class. -/// The class holds a list of all the functions, native and semantic, known to the kernel instance. -/// The list is used by the planner and when executing pipelines of function compositions. -/// -[SuppressMessage("Naming", "CA1711:Identifiers should not have incorrect suffix")] -[DebuggerTypeProxy(typeof(IReadOnlyFunctionCollectionTypeProxy))] -[DebuggerDisplay("{DebuggerDisplay,nq}")] -public class FunctionCollection : IFunctionCollection -{ - /// - /// Plugin name used when storing global functions. - /// - public const string GlobalFunctionsPluginName = "_GLOBAL_FUNCTIONS_"; - - /// - /// Initializes a new instance of the class. - /// - public FunctionCollection() : this((IReadOnlyFunctionCollection?)null) - { - } - - /// - /// Initializes a new instance of the class. - /// - /// Collection of functions with which to populate this instance. - public FunctionCollection(IReadOnlyFunctionCollection? readOnlyFunctionCollection) - { - // Important: names are case insensitive - this._functionCollection = new(StringComparer.OrdinalIgnoreCase); - - if (readOnlyFunctionCollection is not null) - { - foreach (var functionView in readOnlyFunctionCollection.GetFunctionViews()) - { - this.AddFunction(readOnlyFunctionCollection.GetFunction(functionView.PluginName, functionView.Name)); - } - } - } - - /// - /// Adds a function to the function collection. - /// - /// The function instance to add. - /// The updated function collection. - public IFunctionCollection AddFunction(ISKFunction functionInstance) - { - Verify.NotNull(functionInstance); - - ConcurrentDictionary functions = this._functionCollection.GetOrAdd(functionInstance.PluginName, static _ => new(StringComparer.OrdinalIgnoreCase)); - functions[functionInstance.Name] = functionInstance; - - return this; - } - - /// - public ISKFunction GetFunction(string functionName) => - this.GetFunction(GlobalFunctionsPluginName, functionName); - - /// - public ISKFunction GetFunction(string pluginName, string functionName) - { - pluginName = !string.IsNullOrWhiteSpace(pluginName) ? pluginName : GlobalFunctionsPluginName; - - if (!this.TryGetFunction(pluginName, functionName, out ISKFunction? functionInstance)) - { - throw new SKException($"Function not available {pluginName}.{functionName}"); - } - - return functionInstance; - } - - /// - public bool TryGetFunction(string functionName, [NotNullWhen(true)] out ISKFunction? availableFunction) => - this.TryGetFunction(GlobalFunctionsPluginName, functionName, out availableFunction); - - /// - public bool TryGetFunction(string pluginName, string functionName, [NotNullWhen(true)] out ISKFunction? availableFunction) - { - Verify.NotNull(pluginName); - Verify.NotNull(functionName); - - if (this._functionCollection.TryGetValue(pluginName, out ConcurrentDictionary? functions)) - { - return functions.TryGetValue(functionName, out availableFunction); - } - - availableFunction = null; - return false; - } - - /// - public IReadOnlyList GetFunctionViews() - { - var result = new List(); - - foreach (var functions in this._functionCollection.Values) - { - foreach (ISKFunction f in functions.Values) - { - result.Add(f.Describe()); - } - } - - return result; - } - - [DebuggerBrowsable(DebuggerBrowsableState.Never)] - internal string DebuggerDisplay => $"Count = {this._functionCollection.Count}"; - - #region Obsolete to be removed - /// - /// Initializes a new instance of the class. - /// - /// Optional skill collection to copy from - /// The logger factory. - [Obsolete("Use a constructor that doesn't accept an ILoggerFactory. This constructor will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public FunctionCollection(IReadOnlyFunctionCollection? readOnlyFunctionCollection = null, ILoggerFactory? loggerFactory = null) : this(readOnlyFunctionCollection) - { - } - - /// - /// Initializes a new instance of the class. - /// - /// The logger factory. - [Obsolete("Use a constructor that doesn't accept an ILoggerFactory. This constructor will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public FunctionCollection(ILoggerFactory? loggerFactory = null) : this() - { - } - #endregion - - #region private ================================================================================ - - private readonly ConcurrentDictionary> _functionCollection; - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/IReadOnlyFunctionCollectionTypeProxy.cs b/dotnet/src/SemanticKernel.Core/Functions/IReadOnlyFunctionCollectionTypeProxy.cs deleted file mode 100644 index 0d41bc2ff4df..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/IReadOnlyFunctionCollectionTypeProxy.cs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Debugger type proxy for . -/// -// ReSharper disable once InconsistentNaming -internal sealed class IReadOnlyFunctionCollectionTypeProxy -{ - private readonly IReadOnlyFunctionCollection _collection; - - public IReadOnlyFunctionCollectionTypeProxy(IReadOnlyFunctionCollection collection) => this._collection = collection; - - [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] - public FunctionsProxy[] Functions - { - get - { - return this._collection.GetFunctionViews() - .GroupBy(f => f.PluginName) - .Select(g => new FunctionsProxy(g) { Name = g.Key }) - .ToArray(); - } - } - - [DebuggerDisplay("{Name}")] - public sealed class FunctionsProxy : List - { - [DebuggerBrowsable(DebuggerBrowsableState.Never)] - public string? Name; - - public FunctionsProxy(IEnumerable functions) : base(functions) { } - } -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/InstrumentedSKFunction.cs b/dotnet/src/SemanticKernel.Core/Functions/InstrumentedSKFunction.cs deleted file mode 100644 index cef67d4718c5..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/InstrumentedSKFunction.cs +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Standard Semantic Kernel callable function with instrumentation. -/// -internal sealed class InstrumentedSKFunction : ISKFunction -{ - /// - public string Name => this._function.Name; - - /// - public string PluginName => this._function.PluginName; - - /// - public string Description => this._function.Description; - - /// - /// Initialize a new instance of the class. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public InstrumentedSKFunction( - ISKFunction function, - ILoggerFactory? loggerFactory = null) - { - this._function = function; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(InstrumentedSKFunction)) : NullLogger.Instance; - - this._executionTimeHistogram = s_meter.CreateHistogram( - name: $"SK.{this.PluginName}.{this.Name}.ExecutionTime", - unit: "ms", - description: "Duration of function execution"); - - this._executionTotalCounter = s_meter.CreateCounter( - name: $"SK.{this.PluginName}.{this.Name}.ExecutionTotal", - description: "Total number of function executions"); - - this._executionSuccessCounter = s_meter.CreateCounter( - name: $"SK.{this.PluginName}.{this.Name}.ExecutionSuccess", - description: "Number of successful function executions"); - - this._executionFailureCounter = s_meter.CreateCounter( - name: $"SK.{this.PluginName}.{this.Name}.ExecutionFailure", - description: "Number of failed function executions"); - } - - /// - public FunctionView Describe() => - this._function.Describe(); - - /// - public async Task InvokeAsync( - SKContext context, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - return await this.InvokeWithInstrumentationAsync(() => - this._function.InvokeAsync(context, requestSettings, cancellationToken)).ConfigureAwait(false); - } - - #region private ================================================================================ - - private readonly ISKFunction _function; - private readonly ILogger _logger; - - /// - /// Instance of for function-related activities. - /// - private static readonly ActivitySource s_activitySource = new(typeof(SKFunction).FullName); - - /// - /// Instance of for function-related metrics. - /// - private static readonly Meter s_meter = new(typeof(SKFunction).FullName); - - /// - /// Instance of to measure and track the time of function execution. - /// - private readonly Histogram _executionTimeHistogram; - - /// - /// Instance of to keep track of the total number of function executions. - /// - private readonly Counter _executionTotalCounter; - - /// - /// Instance of to keep track of the number of successful function executions. - /// - private readonly Counter _executionSuccessCounter; - - /// - /// Instance of to keep track of the number of failed function executions. - /// - private readonly Counter _executionFailureCounter; - - /// - /// Wrapper for instrumentation to be used in multiple invocation places. - /// - /// Delegate to instrument. - private async Task InvokeWithInstrumentationAsync(Func> func) - { - using var activity = s_activitySource.StartActivity($"{this.PluginName}.{this.Name}"); - - this._logger.LogInformation("{PluginName}.{FunctionName}: Function execution started.", this.PluginName, this.Name); - - var stopwatch = new Stopwatch(); - stopwatch.Start(); - - FunctionResult result; - - try - { - result = await func().ConfigureAwait(false); - } - catch (Exception ex) - { - this._logger.LogWarning("{PluginName}.{FunctionName}: Function execution status: {Status}", - this.PluginName, this.Name, "Failed"); - - this._logger.LogError(ex, "{PluginName}.{FunctionName}: Function execution exception details: {Message}", - this.PluginName, this.Name, ex.Message); - - this._executionFailureCounter.Add(1); - - throw; - } - finally - { - stopwatch.Stop(); - this._executionTotalCounter.Add(1); - this._executionTimeHistogram.Record(stopwatch.ElapsedMilliseconds); - } - - this._logger.LogInformation("{PluginName}.{FunctionName}: Function execution status: {Status}", - this.PluginName, this.Name, "Success"); - - this._logger.LogInformation("{PluginName}.{FunctionName}: Function execution finished in {ExecutionTime}ms", - this.PluginName, this.Name, stopwatch.ElapsedMilliseconds); - - this._executionSuccessCounter.Add(1); - - return result; - } - - #endregion - - #region Obsolete ======================================================================= - - /// - [Obsolete("Use ISKFunction.RequestSettingsFactory instead. This will be removed in a future release.")] - public AIRequestSettings? RequestSettings => this._function.RequestSettings; - - /// - [Obsolete("Use ISKFunction.SetAIRequestSettingsFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIConfiguration(AIRequestSettings? requestSettings) => - this._function.SetAIConfiguration(requestSettings); - - /// - [Obsolete("Use ISKFunction.SetAIServiceFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIService(Func serviceFactory) => - this._function.SetAIService(serviceFactory); - - /// - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use ISKFunction.PluginName instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public string SkillName => this._function.PluginName; - - /// - [Obsolete("Kernel no longer differentiates between Semantic and Native functions. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public bool IsSemantic => this._function.IsSemantic; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultSkillCollection(IReadOnlyFunctionCollection skills) => this; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultFunctionCollection(IReadOnlyFunctionCollection functions) => this; - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs new file mode 100644 index 000000000000..3af9a7b48fde --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides factory methods for creating commonly-used implementations of , such as +/// those backed by a prompt to be submitted to an LLM or those backed by a .NET method. +/// +public static class KernelFunctionFactory +{ + #region FromMethod + /// + /// Creates a instance for a method, specified via a delegate. + /// + /// The method to be represented via the created . + /// The name to use for the function. If null, it will default to one derived from the method represented by . + /// The description to use for the function. If null, it will default to one derived from the method represented by , if possible (e.g. via a on the method). + /// Optional parameter descriptions. If null, it will default to one derived from the method represented by . + /// Optional return parameter description. If null, it will default to one derived from the method represented by . + /// The to use for logging. If null, no logging will be performed. + /// The created for invoking . + public static KernelFunction CreateFromMethod( + Delegate method, + string? functionName = null, + string? description = null, + IEnumerable? parameters = null, + KernelReturnParameterMetadata? returnParameter = null, + ILoggerFactory? loggerFactory = null) => + CreateFromMethod(method.Method, method.Target, functionName, description, parameters, returnParameter, loggerFactory); + + /// + /// Creates a instance for a method, specified via an instance + /// and an optional target object if the method is an instance method. + /// + /// The method to be represented via the created . + /// The target object for the if it represents an instance method. This should be null if and only if is a static method. + /// The name to use for the function. If null, it will default to one derived from the method represented by . + /// The description to use for the function. If null, it will default to one derived from the method represented by , if possible (e.g. via a on the method). + /// Optional parameter descriptions. If null, it will default to ones derived from the method represented by . + /// Optional return parameter description. If null, it will default to one derived from the method represented by . + /// The to use for logging. If null, no logging will be performed. + /// The created for invoking . + public static KernelFunction CreateFromMethod( + MethodInfo method, + object? target = null, + string? functionName = null, + string? description = null, + IEnumerable? parameters = null, + KernelReturnParameterMetadata? returnParameter = null, + ILoggerFactory? loggerFactory = null) => + KernelFunctionFromMethod.Create(method, target, functionName, description, parameters, returnParameter, loggerFactory); + #endregion + + #region FromPrompt + + /// + /// Creates a instance for a prompt specified via a prompt template. + /// + /// Prompt template for the function. + /// Default execution settings to use when invoking this prompt function. + /// The name to use for the function. If null, it will default to a randomly generated name. + /// The description to use for the function. + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The to use for logging. If null, no logging will be performed. + /// The created for invoking the prompt. + public static KernelFunction CreateFromPrompt( + string promptTemplate, + PromptExecutionSettings? executionSettings = null, + string? functionName = null, + string? description = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null, + ILoggerFactory? loggerFactory = null) => + KernelFunctionFromPrompt.Create(promptTemplate, CreateSettingsDictionary(executionSettings), functionName, description, templateFormat, promptTemplateFactory, loggerFactory); + + /// + /// Creates a instance for a prompt specified via a prompt template configuration. + /// + /// Configuration information describing the prompt. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The to use for logging. If null, no logging will be performed. + /// The created for invoking the prompt. + public static KernelFunction CreateFromPrompt( + PromptTemplateConfig promptConfig, + IPromptTemplateFactory? promptTemplateFactory = null, + ILoggerFactory? loggerFactory = null) => KernelFunctionFromPrompt.Create(promptConfig, promptTemplateFactory, loggerFactory); + + /// + /// Creates a instance for a prompt specified via a prompt template and prompt template configuration. + /// + /// Prompt template for the function. + /// Configuration information describing the prompt. + /// The to use for logging. If null, no logging will be performed. + /// The created for invoking the prompt. + public static KernelFunction CreateFromPrompt( + IPromptTemplate promptTemplate, + PromptTemplateConfig promptConfig, + ILoggerFactory? loggerFactory = null) => KernelFunctionFromPrompt.Create(promptTemplate, promptConfig, loggerFactory); + #endregion + + /// + /// Wraps the specified settings into a dictionary with the default service ID as the key. + /// + [return: NotNullIfNotNull("settings")] + private static Dictionary? CreateSettingsDictionary(PromptExecutionSettings? settings) => + settings is null ? null : + new Dictionary(1) + { + { PromptExecutionSettings.DefaultServiceId, settings }, + }; +} diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs new file mode 100644 index 000000000000..a31950412eb1 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs @@ -0,0 +1,761 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; +using System.Linq; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.ExceptionServices; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides factory methods for creating instances backed by a .NET method. +/// +[DebuggerDisplay("{DebuggerDisplay,nq}")] +internal sealed class KernelFunctionFromMethod : KernelFunction +{ + /// + /// Creates a instance for a method, specified via an instance + /// and an optional target object if the method is an instance method. + /// + /// The method to be represented via the created . + /// The target object for the if it represents an instance method. This should be null if and only if is a static method. + /// The name to use for the function. If null, it will default to one derived from the method represented by . + /// The description to use for the function. If null, it will default to one derived from the method represented by , if possible (e.g. via a on the method). + /// Optional parameter descriptions. If null, it will default to one derived from the method represented by . + /// Optional return parameter description. If null, it will default to one derived from the method represented by . + /// The to use for logging. If null, no logging will be performed. + /// The created wrapper for . + public static KernelFunction Create( + MethodInfo method, + object? target = null, + string? functionName = null, + string? description = null, + IEnumerable? parameters = null, + KernelReturnParameterMetadata? returnParameter = null, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(method); + if (!method.IsStatic && target is null) + { + throw new ArgumentNullException(nameof(target), "Target must not be null for an instance method."); + } + + MethodDetails methodDetails = GetMethodDetails(functionName, method, target); + var result = new KernelFunctionFromMethod( + methodDetails.Function, + methodDetails.Name, + description ?? methodDetails.Description, + parameters?.ToList() ?? methodDetails.Parameters, + returnParameter ?? methodDetails.ReturnParameter); + + if (loggerFactory?.CreateLogger(method.DeclaringType ?? typeof(KernelFunctionFromPrompt)) is ILogger logger && + logger.IsEnabled(LogLevel.Trace)) + { + logger.LogTrace("Created KernelFunction '{Name}' for '{MethodName}'", result.Name, method.Name); + } + + return result; + } + + /// + protected override ValueTask InvokeCoreAsync( + Kernel kernel, + KernelArguments arguments, + CancellationToken cancellationToken) + { + return this._function(kernel, this, arguments, cancellationToken); + } + + /// + protected override async IAsyncEnumerable InvokeStreamingCoreAsync( + Kernel kernel, + KernelArguments arguments, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + FunctionResult functionResult = await this.InvokeCoreAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + if (functionResult.Value is TResult result) + { + yield return result; + yield break; + } + + // If the function returns an IAsyncEnumerable, we can stream the results directly. + // This helps to enable composition, with a KernelFunctionFromMethod that returns an + // Invoke{Prompt}StreamingAsync and returns its result enumerable directly. + if (functionResult.Value is IAsyncEnumerable asyncEnumerable) + { + await foreach (TResult item in asyncEnumerable.WithCancellation(cancellationToken).ConfigureAwait(false)) + { + yield return item; + } + + yield break; + } + + // Supports the following provided T types for Method streaming + if (typeof(TResult) == typeof(StreamingKernelContent) || + typeof(TResult) == typeof(StreamingMethodContent)) + { + if (functionResult.Value is not null) + { + yield return (TResult)(object)new StreamingMethodContent(functionResult.Value, functionResult.Metadata); + } + yield break; + } + + throw new NotSupportedException($"Streaming function {this.Name} does not support type {typeof(TResult)}"); + + // We don't invoke the hook here as the InvokeCoreAsync will do that for us + } + + /// + /// JSON serialized string representation of the function. + /// + public override string ToString() => JsonSerializer.Serialize(this, JsonOptionsCache.WriteIndented); + + #region private + + /// Delegate used to invoke the underlying delegate. + private delegate ValueTask ImplementationFunc( + Kernel kernel, + KernelFunction function, + KernelArguments arguments, + CancellationToken cancellationToken); + + private static readonly object[] s_cancellationTokenNoneArray = new object[] { CancellationToken.None }; + private readonly ImplementationFunc _function; + + private record struct MethodDetails(string Name, string Description, ImplementationFunc Function, List Parameters, KernelReturnParameterMetadata ReturnParameter); + + private KernelFunctionFromMethod( + ImplementationFunc implementationFunc, + string functionName, + string description, + IReadOnlyList parameters, + KernelReturnParameterMetadata returnParameter) : + base(functionName, description, parameters, returnParameter) + { + Verify.ValidFunctionName(functionName); + + this._function = implementationFunc; + } + + private static MethodDetails GetMethodDetails(string? functionName, MethodInfo method, object? target) + { + ThrowForInvalidSignatureIf(method.IsGenericMethodDefinition, method, "Generic methods are not supported"); + + if (functionName is null) + { + // Get the name to use for the function. If the function has a KernelFunction attribute and it contains a name, we use that. + // Otherwise, we use the name of the method, but strip off any "Async" suffix if it's {Value}Task-returning. + // We don't apply any heuristics to the value supplied by KernelFunction's Name so that it can always be used + // as a definitive override. + functionName = method.GetCustomAttribute(inherit: true)?.Name?.Trim(); + if (string.IsNullOrEmpty(functionName)) + { + functionName = SanitizeMetadataName(method.Name!); + + if (IsAsyncMethod(method) && + functionName.EndsWith("Async", StringComparison.Ordinal) && + functionName.Length > "Async".Length) + { + functionName = functionName.Substring(0, functionName.Length - "Async".Length); + } + } + } + + Verify.ValidFunctionName(functionName); + + // Build up a list of KernelParameterMetadata for the parameters we expect to be populated + // from arguments. Some arguments are populated specially, not from arguments, and thus + // we don't want to advertize their metadata, e.g. CultureInfo, ILoggerFactory, etc. + List argParameterViews = new(); + + // Get marshaling funcs for parameters and build up the parameter metadata. + var parameters = method.GetParameters(); + var parameterFuncs = new Func[parameters.Length]; + bool sawFirstParameter = false; + for (int i = 0; i < parameters.Length; i++) + { + (parameterFuncs[i], KernelParameterMetadata? parameterView) = GetParameterMarshalerDelegate(method, parameters[i], ref sawFirstParameter); + if (parameterView is not null) + { + argParameterViews.Add(parameterView); + } + } + + // Check for param names conflict + Verify.ParametersUniqueness(argParameterViews); + + // Get the return type and a marshaling func for the return value. + (Type returnType, Func> returnFunc) = GetReturnValueMarshalerDelegate(method); + if (Nullable.GetUnderlyingType(returnType) is Type underlying) + { + // Unwrap the U from a Nullable since everything is going through object, at which point Nullable and a boxed U are indistinguishable. + returnType = underlying; + } + + // Create the func + ValueTask Function(Kernel kernel, KernelFunction function, KernelArguments arguments, CancellationToken cancellationToken) + { + // Create the arguments. + object?[] args = parameterFuncs.Length != 0 ? new object?[parameterFuncs.Length] : Array.Empty(); + for (int i = 0; i < args.Length; i++) + { + args[i] = parameterFuncs[i](function, kernel, arguments, cancellationToken); + } + + // Invoke the method. + object? result = Invoke(method, target, args); + + // Extract and return the result. + return returnFunc(kernel, function, result); + } + + // And return the details. + return new MethodDetails + { + Function = Function, + Name = functionName!, + Description = method.GetCustomAttribute(inherit: true)?.Description ?? "", + Parameters = argParameterViews, + ReturnParameter = new KernelReturnParameterMetadata() + { + ParameterType = returnType, + Description = method.ReturnParameter.GetCustomAttribute(inherit: true)?.Description, + } + }; + } + + /// Gets whether a method has a known async return type. + private static bool IsAsyncMethod(MethodInfo method) + { + Type t = method.ReturnType; + + if (t == typeof(Task) || t == typeof(ValueTask)) + { + return true; + } + + if (t.IsGenericType) + { + t = t.GetGenericTypeDefinition(); + if (t == typeof(Task<>) || t == typeof(ValueTask<>) || t == typeof(IAsyncEnumerable<>)) + { + return true; + } + } + + return false; + } + + /// + /// Gets a delegate for handling the marshaling of a parameter. + /// + private static (Func, KernelParameterMetadata?) GetParameterMarshalerDelegate( + MethodInfo method, ParameterInfo parameter, ref bool sawFirstParameter) + { + Type type = parameter.ParameterType; + + // Handle special types. + // These are not reported as part of KernelParameterMetadata because they're not satisfied from arguments. + + if (type == typeof(KernelFunction)) + { + return (static (KernelFunction func, Kernel _, KernelArguments _, CancellationToken _) => func, null); + } + + if (type == typeof(Kernel)) + { + return (static (KernelFunction _, Kernel kernel, KernelArguments _, CancellationToken _) => kernel, null); + } + + if (type == typeof(KernelArguments)) + { + return (static (KernelFunction _, Kernel _, KernelArguments arguments, CancellationToken _) => arguments, null); + } + + if (type == typeof(ILoggerFactory)) + { + return ((KernelFunction _, Kernel kernel, KernelArguments _, CancellationToken _) => kernel.LoggerFactory, null); + } + + if (type == typeof(ILogger)) + { + return ((KernelFunction _, Kernel kernel, KernelArguments _, CancellationToken _) => kernel.LoggerFactory.CreateLogger(method?.DeclaringType ?? typeof(KernelFunctionFromPrompt)) ?? NullLogger.Instance, null); + } + + if (type == typeof(IAIServiceSelector)) + { + return ((KernelFunction _, Kernel kernel, KernelArguments _, CancellationToken _) => kernel.ServiceSelector, null); + } + + if (type == typeof(CultureInfo) || type == typeof(IFormatProvider)) + { + return (static (KernelFunction _, Kernel kernel, KernelArguments _, CancellationToken _) => kernel.Culture, null); + } + + if (type == typeof(CancellationToken)) + { + return (static (KernelFunction _, Kernel _, KernelArguments _, CancellationToken cancellationToken) => cancellationToken, null); + } + + // Handle the special FromKernelServicesAttribute, which indicates that the parameter should be sourced from the kernel's services. + // As with the above, these are not reported as part of KernelParameterMetadata because they're not satisfied from arguments. + if (parameter.GetCustomAttribute() is FromKernelServicesAttribute fromKernelAttr) + { + return ((KernelFunction _, Kernel kernel, KernelArguments _, CancellationToken _) => + { + // Try to resolve the service from kernel.Services, using the attribute's key if one was provided. + object? service = kernel.Services is IKeyedServiceProvider keyedServiceProvider ? + keyedServiceProvider.GetKeyedService(type, fromKernelAttr.ServiceKey) : + kernel.Services.GetService(type); + if (service is not null) + { + return service; + } + + // The service wasn't available. If the parameter has a default value (typically null), use that. + if (parameter.HasDefaultValue) + { + return parameter.DefaultValue; + } + + // Otherwise, fail. + throw new KernelException($"Missing service for function parameter '{parameter.Name}'", + new ArgumentException("Missing service for function parameter", parameter.Name)); + }, null); + } + + // Handle parameters to be satisfied from KernelArguments. + + string name = SanitizeMetadataName(parameter.Name ?? ""); + ThrowForInvalidSignatureIf(string.IsNullOrWhiteSpace(name), method, $"Parameter {parameter.Name}'s attribute defines an invalid name."); + + var converter = GetConverter(type); + + object? parameterFunc(KernelFunction _, Kernel kernel, KernelArguments arguments, CancellationToken __) + { + // 1. Use the value of the variable if it exists. + if (arguments.TryGetValue(name, out object? value)) + { + return Process(value); + } + + // 2. Otherwise, use the default value if there is one, sourced either from an attribute or the parameter's default. + if (parameter.HasDefaultValue) + { + return parameter.DefaultValue; + } + + // 3. Otherwise, fail. + throw new KernelException($"Missing argument for function parameter '{name}'", + new ArgumentException("Missing argument for function parameter", name)); + + object? Process(object? value) + { + if (!type.IsAssignableFrom(value?.GetType())) + { + if (converter is not null) + { + try + { + return converter(value, kernel.Culture); + } + catch (Exception e) when (!e.IsCriticalException()) + { + throw new ArgumentOutOfRangeException(name, value, e.Message); + } + } + + if (value is not null && TryToDeserializeValue(value, type, out var deserializedValue)) + { + return deserializedValue; + } + } + + return value; + } + } + + sawFirstParameter = true; + + var parameterView = new KernelParameterMetadata(name) + { + Description = parameter.GetCustomAttribute(inherit: true)?.Description, + DefaultValue = parameter.DefaultValue?.ToString(), + IsRequired = !parameter.IsOptional, + ParameterType = type, + }; + + return (parameterFunc, parameterView); + } + + /// + /// Tries to deserialize the given value into an object of the specified target type. + /// + /// The value to be deserialized. + /// The type of the object to deserialize the value into. + /// The deserialized object if the method succeeds; otherwise, null. + /// true if the value is successfully deserialized; otherwise, false. + private static bool TryToDeserializeValue(object value, Type targetType, out object? deserializedValue) + { + try + { + deserializedValue = value switch + { + JsonDocument document => document.Deserialize(targetType), + JsonNode node => node.Deserialize(targetType), + JsonElement element => element.Deserialize(targetType), + // The JSON can be represented by other data types from various libraries. For example, JObject, JToken, and JValue from the Newtonsoft.Json library. + // Since we don't take dependencies on these libraries and don't have access to the types here, + // the only way to deserialize those types is to convert them to a string first by calling the 'ToString' method. + // Attempting to use the 'JsonSerializer.Serialize' method, instead of calling the 'ToString' directly on those types, can lead to unpredictable outcomes. + // For instance, the JObject for { "id": 28 } JSON is serialized into the string "{ "Id": [] }", and the deserialization fails with the + // following exception - "The JSON value could not be converted to System.Int32. Path: $.Id | LineNumber: 0 | BytePositionInLine: 7." + _ => JsonSerializer.Deserialize(value.ToString(), targetType) + }; + + return true; + } + catch (NotSupportedException) + { + // There is no compatible JsonConverter for targetType or its serializable members. + } + catch (JsonException) + { + // The JSON is invalid. + } + + deserializedValue = null; + return false; + } + + /// + /// Gets a delegate for handling the result value of a method, converting it into the to return from the invocation. + /// + private static (Type ReturnType, Func> Marshaler) GetReturnValueMarshalerDelegate(MethodInfo method) + { + // Handle each known return type for the method + Type returnType = method.ReturnType; + + // No return value, either synchronous (void) or asynchronous (Task / ValueTask). + + if (returnType == typeof(void)) + { + return (typeof(void), (static (_, function, _) => + new ValueTask(new FunctionResult(function)))); + } + + if (returnType == typeof(Task)) + { + return (typeof(void), async static (_, function, result) => + { + await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); + return new FunctionResult(function); + } + ); + } + + if (returnType == typeof(ValueTask)) + { + return (typeof(void), async static (_, function, result) => + { + await ((ValueTask)ThrowIfNullResult(result)).ConfigureAwait(false); + return new FunctionResult(function); + } + ); + } + + // string (which is special as no marshaling is required), either synchronous (string) or asynchronous (Task / ValueTask) + + if (returnType == typeof(string)) + { + return (typeof(string), static (kernel, function, result) => + { + var resultString = (string?)result; + return new ValueTask(new FunctionResult(function, resultString, kernel.Culture)); + } + ); + } + + if (returnType == typeof(Task)) + { + return (typeof(string), async static (kernel, function, result) => + { + var resultString = await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); + return new FunctionResult(function, resultString, kernel.Culture); + } + ); + } + + if (returnType == typeof(ValueTask)) + { + return (typeof(string), async static (kernel, function, result) => + { + var resultString = await ((ValueTask)ThrowIfNullResult(result)).ConfigureAwait(false); + return new FunctionResult(function, resultString, kernel.Culture); + } + ); + } + + if (returnType == typeof(FunctionResult)) + { + return (typeof(object), static (_, function, result) => + { + var functionResult = (FunctionResult?)result; + return new ValueTask(functionResult ?? new FunctionResult(function)); + } + ); + } + + if (returnType == typeof(Task)) + { + return (typeof(object), async static (_, _, result) => + { + var functionResult = await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); + return functionResult; + } + ); + } + + if (returnType == typeof(ValueTask)) + { + return (typeof(object), async static (_, _, result) => + { + var functionResult = await ((ValueTask)ThrowIfNullResult(result)).ConfigureAwait(false); + return functionResult; + } + ); + } + + // All other synchronous return types T. + + if (!returnType.IsGenericType || returnType.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + return (returnType, (kernel, function, result) => + { + return new ValueTask(new FunctionResult(function, result, kernel.Culture)); + } + ); + } + + // All other asynchronous return types + + // Task + if (returnType.GetGenericTypeDefinition() is Type genericTask && + genericTask == typeof(Task<>) && + returnType.GetProperty("Result", BindingFlags.Public | BindingFlags.Instance)?.GetGetMethod() is MethodInfo taskResultGetter) + { + return (taskResultGetter.ReturnType, async (kernel, function, result) => + { + await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); + + var taskResult = Invoke(taskResultGetter, result, Array.Empty()); + return new FunctionResult(function, taskResult, kernel.Culture); + } + ); + } + + // ValueTask + if (returnType.GetGenericTypeDefinition() is Type genericValueTask && + genericValueTask == typeof(ValueTask<>) && + returnType.GetMethod("AsTask", BindingFlags.Public | BindingFlags.Instance) is MethodInfo valueTaskAsTask && + valueTaskAsTask.ReturnType.GetProperty("Result", BindingFlags.Public | BindingFlags.Instance)?.GetGetMethod() is MethodInfo asTaskResultGetter) + { + return (asTaskResultGetter.ReturnType, async (kernel, function, result) => + { + Task task = (Task)Invoke(valueTaskAsTask, ThrowIfNullResult(result), Array.Empty())!; + await task.ConfigureAwait(false); + + var taskResult = Invoke(asTaskResultGetter, task, Array.Empty()); + return new FunctionResult(function, taskResult, kernel.Culture); + } + ); + } + + // IAsyncEnumerable + if (returnType.GetGenericTypeDefinition() is Type genericAsyncEnumerable && genericAsyncEnumerable == typeof(IAsyncEnumerable<>)) + { + Type elementType = returnType.GetGenericArguments()[0]; + + MethodInfo? getAsyncEnumeratorMethod = typeof(IAsyncEnumerable<>) + .MakeGenericType(elementType) + .GetMethod("GetAsyncEnumerator"); + + if (getAsyncEnumeratorMethod is not null) + { + return (returnType, (kernel, function, result) => + { + var asyncEnumerator = Invoke(getAsyncEnumeratorMethod, result, s_cancellationTokenNoneArray); + + if (asyncEnumerator is not null) + { + return new ValueTask(new FunctionResult(function, asyncEnumerator, kernel.Culture)); + } + + return new ValueTask(new FunctionResult(function)); + } + ); + } + } + + // Unrecognized return type. + throw GetExceptionForInvalidSignature(method, $"Unknown return type {returnType}"); + + // Throws an exception if a result is found to be null unexpectedly + static object ThrowIfNullResult(object? result) => + result ?? + throw new KernelException("Function returned null unexpectedly."); + } + + /// Invokes the MethodInfo with the specified target object and arguments. + private static object? Invoke(MethodInfo method, object? target, object?[]? arguments) + { + object? result = null; + try + { + const BindingFlags BindingFlagsDoNotWrapExceptions = (BindingFlags)0x02000000; // BindingFlags.DoNotWrapExceptions on .NET Core 2.1+, ignored before then + result = method.Invoke(target, BindingFlagsDoNotWrapExceptions, binder: null, arguments, culture: null); + } + catch (TargetInvocationException e) when (e.InnerException is not null) + { + // If we're targeting .NET Framework, such that BindingFlags.DoNotWrapExceptions + // is ignored, the original exception will be wrapped in a TargetInvocationException. + // Unwrap it and throw that original exception, maintaining its stack information. + ExceptionDispatchInfo.Capture(e.InnerException).Throw(); + } + + return result; + } + + /// Gets an exception that can be thrown indicating an invalid signature. + [DoesNotReturn] + private static Exception GetExceptionForInvalidSignature(MethodInfo method, string reason) => + throw new KernelException($"Function '{method.Name}' is not supported by the kernel. {reason}"); + + /// Throws an exception indicating an invalid KernelFunctionFactory signature if the specified condition is not met. + private static void ThrowForInvalidSignatureIf([DoesNotReturnIf(true)] bool condition, MethodInfo method, string reason) + { + if (condition) + { + throw GetExceptionForInvalidSignature(method, reason); + } + } + + /// + /// Gets a converter for type to ty conversion. For example, string to int, string to Guid, double to int, CustomType to string, etc. + /// + /// Specifies the target type into which a source type should be converted. + /// The converter function if the target type is supported; otherwise, null. + /// + /// The conversion function uses whatever TypeConverter is registered for the target type. + /// Conversion is first attempted using the current culture, and if that fails, it tries again + /// with the invariant culture. If both fail, an exception is thrown. + /// + private static Func? GetConverter(Type targetType) => + s_parsers.GetOrAdd(targetType, static targetType => + { + // For nullables, parse as the inner type. We then just need to be careful to treat null as null, + // as the underlying parser might not be expecting null. + bool wasNullable = !targetType.IsValueType; + if (!wasNullable && targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + wasNullable = true; + targetType = Nullable.GetUnderlyingType(targetType)!; + } + + // Finally, look up and use a type converter. Again, special-case null if it was actually Nullable. + if (TypeConverterFactory.GetTypeConverter(targetType) is TypeConverter converter) + { + return (input, cultureInfo) => + { + // This if block returns null if the target ValueType is nullable, or if the target type is a ReferenceType, which is inherently nullable. + // This prevents null from being handled by converters below, which may fail when converting from nulls or to the target type from nulls. + if (input is null && wasNullable) + { + return null; + } + + object? Convert(CultureInfo culture) + { + if (input?.GetType() is Type type && converter.CanConvertFrom(type)) + { + // This line performs string to type conversion + return converter.ConvertFrom(context: null, culture, input); + } + + // This line performs implicit type conversion, e.g., int to long, byte to int, Guid to string, etc. + if (converter.CanConvertTo(targetType)) + { + return converter.ConvertTo(context: null, culture, input, targetType); + } + + // EnumConverter cannot convert integer, so we verify manually + if (targetType.IsEnum && + (input is int || + input is uint || + input is long || + input is ulong || + input is short || + input is ushort || + input is byte || + input is sbyte)) + { + return Enum.ToObject(targetType, input); + } + + throw new InvalidOperationException($"No converter found to convert from {targetType} to {input?.GetType()}."); + } + + // First try to parse using the supplied culture (or current if none was supplied). + // If that fails, try with the invariant culture and allow any exception to propagate. + try + { + return Convert(cultureInfo); + } + catch (Exception e) when (!e.IsCriticalException() && cultureInfo != CultureInfo.InvariantCulture) + { + return Convert(CultureInfo.InvariantCulture); + } + }; + } + + // Unsupported type. + return null; + }); + + [DebuggerBrowsable(DebuggerBrowsableState.Never)] + private string DebuggerDisplay => string.IsNullOrWhiteSpace(this.Description) ? this.Name : $"{this.Name} ({this.Description})"; + + /// + /// Remove characters from method name that are valid in metadata but invalid for SK. + /// + private static string SanitizeMetadataName(string methodName) => + s_invalidNameCharsRegex.Replace(methodName, "_"); + + /// Regex that flags any character other than ASCII digits or letters or the underscore. + private static readonly Regex s_invalidNameCharsRegex = new("[^0-9A-Za-z_]"); + + /// Parser functions for converting strings to parameter types. + private static readonly ConcurrentDictionary?> s_parsers = new(); + + #endregion +} diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs new file mode 100644 index 000000000000..4415afea8057 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs @@ -0,0 +1,417 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel; + +/// +/// A Semantic Kernel "Semantic" prompt function. +/// +[DebuggerDisplay("{DebuggerDisplay,nq}")] +internal sealed class KernelFunctionFromPrompt : KernelFunction +{ + /// + /// Creates a instance for a prompt specified via a prompt template. + /// + /// Prompt template for the function, defined using the template format. + /// Default execution settings to use when invoking this prompt function. + /// A name for the given function. The name can be referenced in templates and used by the pipeline planner. + /// The description to use for the function. + /// Optional format of the template. Must be provided if a prompt template factory is provided + /// Optional: Prompt template factory + /// Logger factory + /// A function ready to use + public static KernelFunction Create( + string promptTemplate, + Dictionary? executionSettings = null, + string? functionName = null, + string? description = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNullOrWhiteSpace(promptTemplate); + + if (promptTemplateFactory is not null) + { + if (string.IsNullOrWhiteSpace(templateFormat)) + { + throw new ArgumentException($"Template format is required when providing a {nameof(promptTemplateFactory)}", nameof(templateFormat)); + } + } + + var promptConfig = new PromptTemplateConfig + { + TemplateFormat = templateFormat ?? PromptTemplateConfig.SemanticKernelTemplateFormat, + Name = functionName, + Description = description ?? "Generic function, unknown purpose", + Template = promptTemplate + }; + + if (executionSettings is not null) + { + promptConfig.ExecutionSettings = executionSettings; + } + + var factory = promptTemplateFactory ?? new KernelPromptTemplateFactory(loggerFactory); + + return Create( + promptTemplate: factory.Create(promptConfig), + promptConfig: promptConfig, + loggerFactory: loggerFactory); + } + + /// + /// Creates a instance for a prompt specified via a prompt template configuration. + /// + /// Prompt template configuration + /// Optional: Prompt template factory + /// Logger factory + /// A function ready to use + public static KernelFunction Create( + PromptTemplateConfig promptConfig, + IPromptTemplateFactory? promptTemplateFactory = null, + ILoggerFactory? loggerFactory = null) + { + var factory = promptTemplateFactory ?? new KernelPromptTemplateFactory(loggerFactory); + + return Create( + promptTemplate: factory.Create(promptConfig), + promptConfig: promptConfig, + loggerFactory: loggerFactory); + } + + /// + /// Creates a instance for a prompt specified via a prompt template and a prompt template configuration. + /// + /// Prompt template for the function, defined using the template format. + /// Prompt template configuration. + /// Logger factory + /// A function ready to use + public static KernelFunction Create( + IPromptTemplate promptTemplate, + PromptTemplateConfig promptConfig, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(promptTemplate); + Verify.NotNull(promptConfig); + + return new KernelFunctionFromPrompt( + template: promptTemplate, + promptConfig: promptConfig, + loggerFactory: loggerFactory); + } + + /// j + protected override async ValueTask InvokeCoreAsync( + Kernel kernel, + KernelArguments arguments, + CancellationToken cancellationToken = default) + { + this.AddDefaultValues(arguments); + + var result = await this.RenderPromptAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + +#pragma warning disable CS0612 // Events are deprecated + if (result.RenderedEventArgs?.Cancel is true) + { + throw new OperationCanceledException($"A {nameof(Kernel)}.{nameof(Kernel.PromptRendered)} event handler requested cancellation after prompt rendering."); + } +#pragma warning restore CS0612 // Events are deprecated + + if (result.RenderedContext?.Cancel is true) + { + throw new OperationCanceledException("A prompt filter requested cancellation after prompt rendering."); + } + + if (result.AIService is IChatCompletionService chatCompletion) + { + var chatContent = await chatCompletion.GetChatMessageContentAsync(result.RenderedPrompt, result.ExecutionSettings, kernel, cancellationToken).ConfigureAwait(false); + this.CaptureUsageDetails(chatContent.ModelId, chatContent.Metadata, this._logger); + return new FunctionResult(this, chatContent, kernel.Culture, chatContent.Metadata); + } + + if (result.AIService is ITextGenerationService textGeneration) + { + var textContent = await textGeneration.GetTextContentWithDefaultParserAsync(result.RenderedPrompt, result.ExecutionSettings, kernel, cancellationToken).ConfigureAwait(false); + this.CaptureUsageDetails(textContent.ModelId, textContent.Metadata, this._logger); + return new FunctionResult(this, textContent, kernel.Culture, textContent.Metadata); + } + + // The service selector didn't find an appropriate service. This should only happen with a poorly implemented selector. + throw new NotSupportedException($"The AI service {result.AIService.GetType()} is not supported. Supported services are {typeof(IChatCompletionService)} and {typeof(ITextGenerationService)}"); + } + + protected override async IAsyncEnumerable InvokeStreamingCoreAsync( + Kernel kernel, + KernelArguments arguments, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + this.AddDefaultValues(arguments); + + var result = await this.RenderPromptAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + +#pragma warning disable CS0612 // Events are deprecated + if (result.RenderedEventArgs?.Cancel is true) + { + yield break; + } +#pragma warning restore CS0612 // Events are deprecated + + if (result.RenderedContext?.Cancel is true) + { + yield break; + } + + IAsyncEnumerable? asyncReference = null; + + if (result.AIService is IChatCompletionService chatCompletion) + { + asyncReference = chatCompletion.GetStreamingChatMessageContentsAsync(result.RenderedPrompt, result.ExecutionSettings, kernel, cancellationToken); + } + else if (result.AIService is ITextGenerationService textGeneration) + { + asyncReference = textGeneration.GetStreamingTextContentsWithDefaultParserAsync(result.RenderedPrompt, result.ExecutionSettings, kernel, cancellationToken); + } + else + { + // The service selector didn't find an appropriate service. This should only happen with a poorly implemented selector. + throw new NotSupportedException($"The AI service {result.AIService.GetType()} is not supported. Supported services are {typeof(IChatCompletionService)} and {typeof(ITextGenerationService)}"); + } + + await foreach (var content in asyncReference) + { + cancellationToken.ThrowIfCancellationRequested(); + + yield return typeof(TResult) switch + { + _ when typeof(TResult) == typeof(string) + => (TResult)(object)content.ToString(), + + _ when content is TResult contentAsT + => contentAsT, + + _ when content.InnerContent is TResult innerContentAsT + => innerContentAsT, + + _ when typeof(TResult) == typeof(byte[]) + => (TResult)(object)content.ToByteArray(), + + _ => throw new NotSupportedException($"The specific type {typeof(TResult)} is not supported. Support types are {typeof(StreamingTextContent)}, string, byte[], or a matching type for {typeof(StreamingTextContent)}.{nameof(StreamingTextContent.InnerContent)} property") + }; + } + + // There is no post cancellation check to override the result as the stream data was already sent. + } + + /// + /// JSON serialized string representation of the function. + /// + public override string ToString() => JsonSerializer.Serialize(this); + + private KernelFunctionFromPrompt( + IPromptTemplate template, + PromptTemplateConfig promptConfig, + ILoggerFactory? loggerFactory = null) : base( + promptConfig.Name ?? CreateRandomFunctionName(), + promptConfig.Description ?? string.Empty, + promptConfig.GetKernelParametersMetadata(), + promptConfig.GetKernelReturnParameterMetadata(), + promptConfig.ExecutionSettings) + { + this._logger = loggerFactory?.CreateLogger(typeof(KernelFunctionFactory)) ?? NullLogger.Instance; + + this._promptTemplate = template; + this._promptConfig = promptConfig; + } + + #region private + + private readonly ILogger _logger; + private readonly PromptTemplateConfig _promptConfig; + private readonly IPromptTemplate _promptTemplate; + + [DebuggerBrowsable(DebuggerBrowsableState.Never)] + private string DebuggerDisplay => string.IsNullOrWhiteSpace(this.Description) ? this.Name : $"{this.Name} ({this.Description})"; + + /// The measurement tag name for the model used. + private const string MeasurementModelTagName = "semantic_kernel.function.model_id"; + + /// to record function invocation prompt token usage. + private static readonly Histogram s_invocationTokenUsagePrompt = s_meter.CreateHistogram( + name: "semantic_kernel.function.invocation.token_usage.prompt", + unit: "{token}", + description: "Measures the prompt token usage"); + + /// to record function invocation completion token usage. + private static readonly Histogram s_invocationTokenUsageCompletion = s_meter.CreateHistogram( + name: "semantic_kernel.function.invocation.token_usage.completion", + unit: "{token}", + description: "Measures the completion token usage"); + + /// Add default values to the arguments if an argument is not defined + private void AddDefaultValues(KernelArguments arguments) + { + foreach (var parameter in this._promptConfig.InputVariables) + { + if (!arguments.ContainsName(parameter.Name) && parameter.Default != null) + { + arguments[parameter.Name] = parameter.Default; + } + } + } + + private async Task RenderPromptAsync(Kernel kernel, KernelArguments arguments, CancellationToken cancellationToken) + { + var serviceSelector = kernel.ServiceSelector; + IAIService? aiService; + + // Try to use IChatCompletionService. + if (serviceSelector.TrySelectAIService( + kernel, this, arguments, + out IChatCompletionService? chatService, out PromptExecutionSettings? executionSettings)) + { + aiService = chatService; + } + else + { + // If IChatCompletionService isn't available, try to fallback to ITextGenerationService, + // throwing if it's not available. + (aiService, executionSettings) = serviceSelector.SelectAIService(kernel, this, arguments); + } + + Verify.NotNull(aiService); + +#pragma warning disable CS0618 // Events are deprecated + kernel.OnPromptRendering(this, arguments); +#pragma warning restore CS0618 // Events are deprecated + + kernel.OnPromptRenderingFilter(this, arguments); + + var renderedPrompt = await this._promptTemplate.RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + if (this._logger.IsEnabled(LogLevel.Trace)) + { + this._logger.LogTrace("Rendered prompt: {Prompt}", renderedPrompt); + } + +#pragma warning disable CS0618 // Events are deprecated + var renderedEventArgs = kernel.OnPromptRendered(this, arguments, renderedPrompt); + + if (renderedEventArgs is not null && + !renderedEventArgs.Cancel && + renderedEventArgs.RenderedPrompt != renderedPrompt) + { + renderedPrompt = renderedEventArgs.RenderedPrompt; + + if (this._logger.IsEnabled(LogLevel.Trace)) + { + this._logger.LogTrace("Rendered prompt changed by event handler: {Prompt}", renderedEventArgs.RenderedPrompt); + } + } +#pragma warning restore CS0618 // Events are deprecated + + var renderedContext = kernel.OnPromptRenderedFilter(this, arguments, renderedPrompt); + + if (renderedContext is not null && + !renderedContext.Cancel && + renderedContext.RenderedPrompt != renderedPrompt) + { + renderedPrompt = renderedContext.RenderedPrompt; + + if (this._logger.IsEnabled(LogLevel.Trace)) + { + this._logger.LogTrace("Rendered prompt changed by prompt filter: {Prompt}", renderedContext.RenderedPrompt); + } + } + + return new(aiService, renderedPrompt) + { + ExecutionSettings = executionSettings, + RenderedEventArgs = renderedEventArgs, + RenderedContext = renderedContext + }; + } + + /// Create a random, valid function name. + private static string CreateRandomFunctionName() => $"func{Guid.NewGuid():N}"; + + /// + /// Captures usage details, including token information. + /// + private void CaptureUsageDetails(string? modelId, IReadOnlyDictionary? metadata, ILogger logger) + { + if (!logger.IsEnabled(LogLevel.Information) && + !s_invocationTokenUsageCompletion.Enabled && + !s_invocationTokenUsagePrompt.Enabled) + { + // Bail early to avoid unnecessary work. + return; + } + + if (string.IsNullOrWhiteSpace(modelId)) + { + logger.LogInformation("No model ID provided to capture usage details."); + return; + } + + if (metadata is null) + { + logger.LogInformation("No metadata provided to capture usage details."); + return; + } + + if (!metadata.TryGetValue("Usage", out object? usageObject) || usageObject is null) + { + logger.LogInformation("No usage details provided to capture usage details."); + return; + } + + var jsonObject = default(JsonElement); + try + { + jsonObject = JsonSerializer.SerializeToElement(usageObject); + } + catch (Exception ex) when (ex is NotSupportedException) + { + logger.LogWarning(ex, "Error while parsing usage details from model result."); + return; + } + + if (jsonObject.TryGetProperty("PromptTokens", out var promptTokensJson) && + promptTokensJson.TryGetInt32(out int promptTokens) && + jsonObject.TryGetProperty("CompletionTokens", out var completionTokensJson) && + completionTokensJson.TryGetInt32(out int completionTokens)) + { + logger.LogInformation( + "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}.", + promptTokens, completionTokens); + + TagList tags = new() { + { MeasurementFunctionTagName, this.Name }, + { MeasurementModelTagName, modelId } + }; + + s_invocationTokenUsagePrompt.Record(promptTokens, in tags); + s_invocationTokenUsageCompletion.Record(completionTokens, in tags); + } + else + { + logger.LogWarning("Unable to get token details from model result."); + } + } + + #endregion +} diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs new file mode 100644 index 000000000000..6ad62f9e122a --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Functions/KernelPluginFactory.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Reflection; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides static factory methods for creating commonly-used plugin implementations. +/// +public static class KernelPluginFactory +{ + /// Creates a plugin that wraps a new instance of the specified type . + /// Specifies the type of the object to wrap. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// + /// The to use for resolving any required services, such as an + /// and any services required to satisfy a constructor on . + /// + /// A containing s for all relevant members of . + /// + /// Public methods decorated with will be included in the plugin. + /// Attributed methods must all have different names; overloads are not supported. + /// + public static KernelPlugin CreateFromType(string? pluginName = null, IServiceProvider? serviceProvider = null) + { + serviceProvider ??= EmptyServiceProvider.Instance; + return CreateFromObject(ActivatorUtilities.CreateInstance(serviceProvider)!, pluginName, serviceProvider?.GetService()); + } + + /// Creates a plugin that wraps the specified target object. + /// The instance of the class to be wrapped. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// The to use for logging. If null, no logging will be performed. + /// A containing s for all relevant members of . + /// + /// Public methods decorated with will be included in the plugin. + /// Attributed methods must all have different names; overloads are not supported. + /// + public static KernelPlugin CreateFromObject(object target, string? pluginName = null, ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(target); + + pluginName ??= target.GetType().Name; + Verify.ValidPluginName(pluginName); + + MethodInfo[] methods = target.GetType().GetMethods(BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); + + // Filter out non-KernelFunctions and fail if two functions have the same name (with or without the same casing). + var functions = new List(); + foreach (MethodInfo method in methods) + { + if (method.GetCustomAttribute() is not null) + { + functions.Add(KernelFunctionFactory.CreateFromMethod(method, target, loggerFactory: loggerFactory)); + } + } + if (functions.Count == 0) + { + throw new ArgumentException($"The {target.GetType()} instance doesn't expose any public [KernelFunction]-attributed methods."); + } + + if (loggerFactory?.CreateLogger(target.GetType()) is ILogger logger && + logger.IsEnabled(LogLevel.Trace)) + { + logger.LogTrace("Created plugin {PluginName} with {IncludedFunctions} [KernelFunction] methods out of {TotalMethods} methods found.", pluginName, functions.Count, methods.Length); + } + + var description = target.GetType().GetCustomAttribute(inherit: true)?.Description; + + return KernelPluginFactory.CreateFromFunctions(pluginName, description, functions); + } + + /// Initializes the new plugin from the provided name and function collection. + /// The name for the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin CreateFromFunctions(string pluginName, IEnumerable? functions) => + CreateFromFunctions(pluginName, description: null, functions); + + /// Initializes the new plugin from the provided name, description, and function collection. + /// The name for the plugin. + /// A description of the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin CreateFromFunctions(string pluginName, string? description = null, IEnumerable? functions = null) => + new DefaultKernelPlugin(pluginName, description, functions); +} diff --git a/dotnet/src/SemanticKernel.Core/Functions/NativeFunction.cs b/dotnet/src/SemanticKernel.Core/Functions/NativeFunction.cs deleted file mode 100644 index da7ff333cd7a..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/NativeFunction.cs +++ /dev/null @@ -1,900 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; -using System.Globalization; -using System.Linq; -using System.Reflection; -using System.Text.Json; -using System.Text.RegularExpressions; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -#pragma warning disable format - -/// -/// Standard Semantic Kernel callable function. -/// SKFunction is used to extend one C# , , , -/// with additional methods required by the kernel. -/// -[DebuggerDisplay("{DebuggerDisplay,nq}")] -internal sealed class NativeFunction : ISKFunction, IDisposable -{ - /// - public string Name { get; } - - /// - public string PluginName { get; } - - /// - public string Description { get; } - - /// - /// List of function parameters - /// - public IReadOnlyList Parameters { get; } - - /// - /// Create a native function instance, wrapping a native object method - /// - /// Signature of the method to invoke - /// Object containing the method to invoke - /// SK plugin name - /// The to use for logging. If null, no logging will be performed. - /// SK function instance - public static ISKFunction FromNativeMethod( - MethodInfo method, - object? target = null, - string? pluginName = null, - ILoggerFactory? loggerFactory = null) - { - if (!method.IsStatic && target is null) - { - throw new ArgumentNullException(nameof(target), "Argument cannot be null for non-static methods"); - } - - if (string.IsNullOrWhiteSpace(pluginName)) - { - pluginName = FunctionCollection.GlobalFunctionsPluginName; - } - - ILogger logger = loggerFactory?.CreateLogger(method.DeclaringType ?? typeof(SKFunction)) ?? NullLogger.Instance; - - MethodDetails methodDetails = GetMethodDetails(method, target, pluginName!, logger); - - return new NativeFunction( - delegateFunction: methodDetails.Function, - parameters: methodDetails.Parameters, - pluginName: pluginName!, - functionName: methodDetails.Name, - description: methodDetails.Description, - logger: logger); - } - - /// - /// Create a native function instance, wrapping a delegate function - /// - /// Function to invoke - /// SK plugin name - /// SK function name - /// SK function description - /// SK function parameters - /// The to use for logging. If null, no logging will be performed. - /// SK function instance - public static ISKFunction FromNativeFunction( - Delegate nativeFunction, - string? pluginName = null, - string? functionName = null, - string? description = null, - IEnumerable? parameters = null, - ILoggerFactory? loggerFactory = null) - { - ILogger logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(ISKFunction)) : NullLogger.Instance; - - if (string.IsNullOrWhiteSpace(pluginName)) - { - pluginName = FunctionCollection.GlobalFunctionsPluginName; - } - - MethodDetails methodDetails = GetMethodDetails(nativeFunction.Method, nativeFunction.Target, pluginName!, logger); - - functionName ??= methodDetails.Name; - parameters ??= methodDetails.Parameters; - description ??= methodDetails.Description; - - return new NativeFunction( - delegateFunction: methodDetails.Function, - parameters: parameters.ToList(), - description: description, - pluginName: pluginName!, - functionName: functionName, - logger: logger); - } - - /// - public FunctionView Describe() - => this._view.Value; - - /// - public async Task InvokeAsync( - SKContext context, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - try - { - return await this._function(null, requestSettings, context, cancellationToken).ConfigureAwait(false); - } - catch (Exception e) when (!e.IsCriticalException()) - { - this._logger.LogError(e, "Native function {Plugin}.{Name} execution failed with error {Error}", this.PluginName, this.Name, e.Message); - throw; - } - } - - /// - /// Dispose of resources. - /// - public void Dispose() - { - } - - /// - /// JSON serialized string representation of the function. - /// - public override string ToString() - => this.ToString(false); - - /// - /// JSON serialized string representation of the function. - /// - public string ToString(bool writeIndented) - => JsonSerializer.Serialize(this, options: writeIndented ? s_toStringIndentedSerialization : s_toStringStandardSerialization); - - #region private - - private static readonly JsonSerializerOptions s_toStringStandardSerialization = new(); - private static readonly JsonSerializerOptions s_toStringIndentedSerialization = new() { WriteIndented = true }; - private readonly NativeFunctionDelegate _function; - private readonly ILogger _logger; - - private struct MethodDetails - { - public NativeFunctionDelegate Function { get; set; } - public List Parameters { get; set; } - public string Name { get; set; } - public string Description { get; set; } - } - - internal NativeFunction( - NativeFunctionDelegate delegateFunction, - IReadOnlyList parameters, - string pluginName, - string functionName, - string description, - ILogger logger) - { - Verify.NotNull(delegateFunction); - Verify.ValidPluginName(pluginName); - Verify.ValidFunctionName(functionName); - - this._logger = logger; - - this._function = delegateFunction; - this.Parameters = parameters.ToArray(); - Verify.ParametersUniqueness(this.Parameters); - - this.Name = functionName; - this.PluginName = pluginName; - this.Description = description; - - this._view = new(() => new (functionName, pluginName, description) { Parameters = this.Parameters }); - } - - /// - /// Throw an exception if the function is not semantic, use this method when some logic makes sense only for semantic functions. - /// - /// - [DoesNotReturn] - private void ThrowNotSemantic() - { - this._logger.LogError("The function is not semantic"); - throw new SKException("Invalid operation, the method requires a semantic function"); - } - - private static MethodDetails GetMethodDetails( - MethodInfo method, - object? target, - string pluginName, - ILogger? logger = null) - { - Verify.NotNull(method); - - // Get the name to use for the function. If the function has an SKName attribute, we use that. - // Otherwise, we use the name of the method, but strip off any "Async" suffix if it's {Value}Task-returning. - // We don't apply any heuristics to the value supplied by SKName so that it can always be used - // as a definitive override. - string? functionName = method.GetCustomAttribute(inherit: true)?.Name?.Trim(); - if (string.IsNullOrEmpty(functionName)) - { - functionName = SanitizeMetadataName(method.Name!); - Verify.ValidFunctionName(functionName); - - if (IsAsyncMethod(method) && - functionName.EndsWith("Async", StringComparison.Ordinal) && - functionName.Length > "Async".Length) - { - functionName = functionName.Substring(0, functionName.Length - "Async".Length); - } - } - - string? description = method.GetCustomAttribute(inherit: true)?.Description; - - var result = new MethodDetails - { - Name = functionName!, - Description = description ?? string.Empty, - }; - - (result.Function, result.Parameters) = GetDelegateInfo(functionName!, pluginName, target, method); - - logger?.LogTrace("Method '{0}' found", result.Name); - - return result; - } - - /// Gets whether a method has a known async return type. - private static bool IsAsyncMethod(MethodInfo method) - { - Type t = method.ReturnType; - - if (t == typeof(Task) || t == typeof(ValueTask)) - { - return true; - } - - if (t.IsGenericType) - { - t = t.GetGenericTypeDefinition(); - if (t == typeof(Task<>) || t == typeof(ValueTask<>)) - { - return true; - } - } - - return false; - } - - // Inspect a method and returns the corresponding delegate and related info - private static (NativeFunctionDelegate function, List) GetDelegateInfo( - string functionName, - string pluginName, - object? instance, - MethodInfo method) - { - ThrowForInvalidSignatureIf(method.IsGenericMethodDefinition, method, "Generic methods are not supported"); - - var stringParameterViews = new List(); - var parameters = method.GetParameters(); - - // Get marshaling funcs for parameters and build up the parameter views. - var parameterFuncs = new Func[parameters.Length]; - bool sawFirstParameter = false, hasSKContextParam = false, hasCancellationTokenParam = false, hasLoggerParam = false, hasMemoryParam = false, hasCultureParam = false; - for (int i = 0; i < parameters.Length; i++) - { - (parameterFuncs[i], ParameterView? parameterView) = GetParameterMarshalerDelegate( - method, parameters[i], - ref sawFirstParameter, ref hasSKContextParam, ref hasCancellationTokenParam, ref hasLoggerParam, ref hasMemoryParam, ref hasCultureParam); - if (parameterView is not null) - { - stringParameterViews.Add(parameterView); - } - } - - // Get marshaling func for the return value. - Func> returnFunc = GetReturnValueMarshalerDelegate(method); - - // Create the func - Task Function(ITextCompletion? text, AIRequestSettings? requestSettings, SKContext context, CancellationToken cancellationToken) - { - // Create the arguments. - object?[] args = parameterFuncs.Length != 0 ? new object?[parameterFuncs.Length] : Array.Empty(); - for (int i = 0; i < args.Length; i++) - { - args[i] = parameterFuncs[i](context, cancellationToken); - } - - // Invoke the method. - object? result = method.Invoke(instance, args); - - // Extract and return the result. - return returnFunc(functionName, pluginName, result, context); - } - - // Check for param names conflict - Verify.ParametersUniqueness(stringParameterViews); - - // Return the function and its parameter views. - return (Function, stringParameterViews); - } - - /// - /// Gets a delegate for handling the marshaling of a parameter. - /// - private static (Func, ParameterView?) GetParameterMarshalerDelegate( - MethodInfo method, ParameterInfo parameter, - ref bool sawFirstParameter, ref bool hasSKContextParam, ref bool hasCancellationTokenParam, ref bool hasLoggerParam, ref bool hasMemoryParam, ref bool hasCultureParam) - { - Type type = parameter.ParameterType; - - // Handle special types based on SKContext data. These can each show up at most once in the method signature, - // with the SKContext itself or the primary data from it mapped directly into the method's parameter. - // They do not get parameter views as they're not supplied from context variables. - - if (type == typeof(SKContext)) - { - TrackUniqueParameterType(ref hasSKContextParam, method, $"At most one {nameof(SKContext)} parameter is permitted."); - return (static (SKContext context, CancellationToken _) => context, null); - } - - if (type == typeof(ILogger) || type == typeof(ILoggerFactory)) - { - TrackUniqueParameterType(ref hasLoggerParam, method, $"At most one {nameof(ILogger)}/{nameof(ILoggerFactory)} parameter is permitted."); - return type == typeof(ILogger) ? - ((SKContext context, CancellationToken _) => context.LoggerFactory.CreateLogger(method?.DeclaringType ?? typeof(SKFunction)), null) : - ((SKContext context, CancellationToken _) => context.LoggerFactory, null); - } - - if (type == typeof(CultureInfo) || type == typeof(IFormatProvider)) - { - TrackUniqueParameterType(ref hasCultureParam, method, $"At most one {nameof(CultureInfo)}/{nameof(IFormatProvider)} parameter is permitted."); - return (static (SKContext context, CancellationToken _) => context.Culture, null); - } - - if (type == typeof(CancellationToken)) - { - TrackUniqueParameterType(ref hasCancellationTokenParam, method, $"At most one {nameof(CancellationToken)} parameter is permitted."); - return (static (SKContext _, CancellationToken cancellationToken) => cancellationToken, null); - } - - // Handle context variables. These are supplied from the SKContext's Variables dictionary. - - if (!type.IsByRef && GetParser(type) is Func parser) - { - // Use either the parameter's name or an override from an applied SKName attribute. - SKNameAttribute? nameAttr = parameter.GetCustomAttribute(inherit: true); - string name = nameAttr?.Name?.Trim() ?? SanitizeMetadataName(parameter.Name); - bool nameIsInput = name.Equals("input", StringComparison.OrdinalIgnoreCase); - ThrowForInvalidSignatureIf(name.Length == 0, method, $"Parameter {parameter.Name}'s context attribute defines an invalid name."); - ThrowForInvalidSignatureIf(sawFirstParameter && nameIsInput, method, "Only the first parameter may be named 'input'"); - - // Use either the parameter's optional default value as contained in parameter metadata (e.g. `string s = "hello"`) - // or an override from an applied SKParameter attribute. Note that a default value may be null. - DefaultValueAttribute defaultValueAttribute = parameter.GetCustomAttribute(inherit: true); - bool hasDefaultValue = defaultValueAttribute is not null; - object? defaultValue = defaultValueAttribute?.Value; - if (!hasDefaultValue && parameter.HasDefaultValue) - { - hasDefaultValue = true; - defaultValue = parameter.DefaultValue; - } - - if (hasDefaultValue) - { - // If we got a default value, make sure it's of the right type. This currently supports - // null values if the target type is a reference type or a Nullable, strings, - // anything that can be parsed from a string via a registered TypeConverter, - // and a value that's already the same type as the parameter. - if (defaultValue is string defaultStringValue && defaultValue.GetType() != typeof(string)) - { - // Invariant culture is used here as this value comes from the C# source - // and it should be deterministic across cultures. - defaultValue = parser(defaultStringValue, CultureInfo.InvariantCulture); - } - else - { - ThrowForInvalidSignatureIf( - defaultValue is null && type.IsValueType && Nullable.GetUnderlyingType(type) is null, - method, - $"Type {type} is a non-nullable value type but a null default value was specified."); - ThrowForInvalidSignatureIf( - defaultValue is not null && !type.IsAssignableFrom(defaultValue.GetType()), - method, - $"Default value {defaultValue} for parameter {name} is not assignable to type {type}."); - } - } - - bool fallBackToInput = !sawFirstParameter && !nameIsInput; - object? parameterFunc(SKContext context, CancellationToken _) - { - // 1. Use the value of the variable if it exists. - if (context.Variables.TryGetValue(name, out string? value)) - { - return Process(value); - } - - // 2. Otherwise, use the default value if there is one, sourced either from an attribute or the parameter's default. - if (hasDefaultValue) - { - return defaultValue; - } - - // 3. Otherwise, use "input" if this is the first (or only) parameter. - if (fallBackToInput) - { - return Process(context.Variables.Input); - } - - // 4. Otherwise, fail. - throw new SKException($"Missing value for parameter '{name}'", - new ArgumentException("Missing value function parameter", name)); - - object ? Process(string value) - { - if (type == typeof(string)) - { - return value; - } - - try - { - return parser(value, context.Culture); - } - catch (Exception e) when (!e.IsCriticalException()) - { - throw new ArgumentOutOfRangeException(name, value, e.Message); - } - } - } - - sawFirstParameter = true; - - var parameterView = new ParameterView( - name, - parameter.GetCustomAttribute(inherit: true)?.Description ?? string.Empty, - defaultValue?.ToString() ?? string.Empty, - IsRequired: !parameter.IsOptional); - - return (parameterFunc, parameterView); - } - - // Fail for unknown parameter types. - throw GetExceptionForInvalidSignature(method, $"Unknown parameter type {parameter.ParameterType}"); - } - - /// - /// Gets a delegate for handling the result value of a method, converting it into the to return from the invocation. - /// - private static Func> GetReturnValueMarshalerDelegate(MethodInfo method) - { - // Handle each known return type for the method - Type returnType = method.ReturnType; - - // No return value, either synchronous (void) or asynchronous (Task / ValueTask). - - if (returnType == typeof(void)) - { - return static (functionName, pluginName, result, context) => - Task.FromResult(new FunctionResult(functionName, pluginName, context)); - } - - if (returnType == typeof(Task)) - { - return async static (functionName, pluginName, result, context) => - { - await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); - return new FunctionResult(functionName, pluginName, context); - }; - } - - if (returnType == typeof(ValueTask)) - { - return async static (functionName, pluginName, result, context) => - { - await ((ValueTask)ThrowIfNullResult(result)).ConfigureAwait(false); - return new FunctionResult(functionName, pluginName, context); - }; - } - - // SKContext, either synchronous (SKContext) or asynchronous (Task / ValueTask). - - if (returnType == typeof(SKContext)) - { - return static (functionName, pluginName, result, _) => - { - var context = (SKContext)ThrowIfNullResult(result); - return Task.FromResult(new FunctionResult(functionName, pluginName, context, context.Result)); - }; - } - - if (returnType == typeof(Task)) - { - return static async (functionName, pluginName, result, _) => - { - var context = await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); - return new FunctionResult(functionName, pluginName, context, context.Result); - }; - } - - if (returnType == typeof(ValueTask)) - { - return static async (functionName, pluginName, result, _) => - { - var context = await ((ValueTask)ThrowIfNullResult(result)).ConfigureAwait(false); - return new FunctionResult(functionName, pluginName, context, context); - }; - } - - // string (which is special as no marshaling is required), either synchronous (string) or asynchronous (Task / ValueTask) - - if (returnType == typeof(string)) - { - return static (functionName, pluginName, result, context) => - { - var resultString = (string?)result; - context.Variables.Update(resultString); - return Task.FromResult(new FunctionResult(functionName, pluginName, context, resultString)); - }; - } - - if (returnType == typeof(Task)) - { - return async static (functionName, pluginName, result, context) => - { - var resultString = await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); - context.Variables.Update(resultString); - return new FunctionResult(functionName, pluginName, context, resultString); - }; - } - - if (returnType == typeof(ValueTask)) - { - return async static (functionName, pluginName, result, context) => - { - var resultString = await ((ValueTask)ThrowIfNullResult(result)).ConfigureAwait(false); - context.Variables.Update(resultString); - return new FunctionResult(functionName, pluginName, context, resultString); - }; - } - - // All other synchronous return types T. - - if (!returnType.IsGenericType || returnType.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - if (GetFormatter(returnType) is not Func formatter) - { - throw GetExceptionForInvalidSignature(method, $"Unknown return type {returnType}"); - } - - return (functionName, pluginName, result, context) => - { - context.Variables.Update(formatter(result, context.Culture)); - return Task.FromResult(new FunctionResult(functionName, pluginName, context, result)); - }; - } - - // All other asynchronous return types - - // Task - if (returnType.GetGenericTypeDefinition() is Type genericTask && - genericTask == typeof(Task<>) && - returnType.GetProperty("Result", BindingFlags.Public | BindingFlags.Instance)?.GetGetMethod() is MethodInfo taskResultGetter && - GetFormatter(taskResultGetter.ReturnType) is Func taskResultFormatter) - { - return async (functionName, pluginName, result, context) => - { - await ((Task)ThrowIfNullResult(result)).ConfigureAwait(false); - - var taskResult = taskResultGetter.Invoke(result!, Array.Empty()); - - context.Variables.Update(taskResultFormatter(taskResult, context.Culture)); - return new FunctionResult(functionName, pluginName, context, taskResult); - }; - } - - // ValueTask - if (returnType.GetGenericTypeDefinition() is Type genericValueTask && - genericValueTask == typeof(ValueTask<>) && - returnType.GetMethod("AsTask", BindingFlags.Public | BindingFlags.Instance) is MethodInfo valueTaskAsTask && - valueTaskAsTask.ReturnType.GetProperty("Result", BindingFlags.Public | BindingFlags.Instance)?.GetGetMethod() is MethodInfo asTaskResultGetter && - GetFormatter(asTaskResultGetter.ReturnType) is Func asTaskResultFormatter) - { - return async (functionName, pluginName, result, context) => - { - Task task = (Task)valueTaskAsTask.Invoke(ThrowIfNullResult(result), Array.Empty()); - await task.ConfigureAwait(false); - - var taskResult = asTaskResultGetter.Invoke(task!, Array.Empty()); - - context.Variables.Update(asTaskResultFormatter(taskResult, context.Culture)); - return new FunctionResult(functionName, pluginName, context, taskResult); - }; - } - - // IAsyncEnumerable - if (returnType.GetGenericTypeDefinition() is Type genericAsyncEnumerable && genericAsyncEnumerable == typeof(IAsyncEnumerable<>)) - { - Type elementType = returnType.GetGenericArguments()[0]; - - MethodInfo getAsyncEnumeratorMethod = typeof(IAsyncEnumerable<>) - .MakeGenericType(elementType) - .GetMethod("GetAsyncEnumerator"); - - if (getAsyncEnumeratorMethod is not null) - { - return (functionName, pluginName, result, context) => - { - var asyncEnumerator = getAsyncEnumeratorMethod.Invoke(result, new object[] { default(CancellationToken) }); - - if (asyncEnumerator is not null) - { - return Task.FromResult(new FunctionResult(functionName, pluginName, context, asyncEnumerator)); - } - - return Task.FromResult(new FunctionResult(functionName, pluginName, context)); - }; - } - } - - // Unrecognized return type. - throw GetExceptionForInvalidSignature(method, $"Unknown return type {returnType}"); - - // Throws an exception if a result is found to be null unexpectedly - static object ThrowIfNullResult(object? result) => - result ?? - throw new SKException("Function returned null unexpectedly."); - } - - /// Gets an exception that can be thrown indicating an invalid signature. - [DoesNotReturn] - private static Exception GetExceptionForInvalidSignature(MethodInfo method, string reason) => - throw new SKException($"Function '{method.Name}' is not supported by the kernel. {reason}"); - - /// Throws an exception indicating an invalid SKFunction signature if the specified condition is not met. - private static void ThrowForInvalidSignatureIf([DoesNotReturnIf(true)] bool condition, MethodInfo method, string reason) - { - if (condition) - { - throw GetExceptionForInvalidSignature(method, reason); - } - } - - /// Tracks whether a particular kind of parameter has been seen, throwing an exception if it has, and marking it as seen if it hasn't - private static void TrackUniqueParameterType(ref bool hasParameterType, MethodInfo method, string failureMessage) - { - ThrowForInvalidSignatureIf(hasParameterType, method, failureMessage); - hasParameterType = true; - } - - /// - /// Gets a TypeConverter-based parser for parsing a string as the target type. - /// - /// Specifies the target type into which a string should be parsed. - /// The parsing function if the target type is supported; otherwise, null. - /// - /// The parsing function uses whatever TypeConverter is registered for the target type. - /// Parsing is first attempted using the current culture, and if that fails, it tries again - /// with the invariant culture. If both fail, an exception is thrown. - /// - private static Func? GetParser(Type targetType) => - s_parsers.GetOrAdd(targetType, static targetType => - { - // Strings just parse to themselves. - if (targetType == typeof(string)) - { - return (input, cultureInfo) => input; - } - - // For nullables, parse as the inner type. We then just need to be careful to treat null as null, - // as the underlying parser might not be expecting null. - bool wasNullable = false; - if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - wasNullable = true; - targetType = Nullable.GetUnderlyingType(targetType); - } - - // For enums, delegate to Enum.Parse, special-casing null if it was actually Nullable. - if (targetType.IsEnum) - { - return (input, cultureInfo) => - { - if (wasNullable && input is null) - { - return null!; - } - - return Enum.Parse(targetType, input, ignoreCase: true); - }; - } - - // Finally, look up and use a type converter. Again, special-case null if it was actually Nullable. - if (GetTypeConverter(targetType) is TypeConverter converter && converter.CanConvertFrom(typeof(string))) - { - return (input, cultureInfo) => - { - if (wasNullable && input is null) - { - return null!; - } - - // First try to parse using the supplied culture (or current if none was supplied). - // If that fails, try with the invariant culture and allow any exception to propagate. - try - { - return converter.ConvertFromString(context: null, cultureInfo, input); - } - catch (Exception e) when (!e.IsCriticalException() && cultureInfo != CultureInfo.InvariantCulture) - { - return converter.ConvertFromInvariantString(input); - } - }; - } - - // Unsupported type. - return null; - }); - - /// - /// Gets a TypeConverter-based formatter for formatting an object as a string. - /// - /// - /// Formatting is performed in the invariant culture whenever possible. - /// - private static Func? GetFormatter(Type targetType) => - s_formatters.GetOrAdd(targetType, static targetType => - { - // For nullables, render as the underlying type. - bool wasNullable = false; - if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - wasNullable = true; - targetType = Nullable.GetUnderlyingType(targetType); - } - - // For enums, just ToString() and allow the object override to do the right thing. - if (targetType.IsEnum) - { - return (input, cultureInfo) => input?.ToString()!; - } - - // Strings just render as themselves. - if (targetType == typeof(string)) - { - return (input, cultureInfo) => (string)input!; - } - - // Finally, look up and use a type converter. - if (GetTypeConverter(targetType) is TypeConverter converter && converter.CanConvertTo(typeof(string))) - { - return (input, cultureInfo) => - { - if (wasNullable && input is null) - { - return null!; - } - - return converter.ConvertToString(context: null, cultureInfo, input); - }; - } - - return null; - }); - - private static TypeConverter? GetTypeConverter(Type targetType) - { - // In an ideal world, this would use TypeDescriptor.GetConverter. However, that is not friendly to - // any form of ahead-of-time compilation, as it could end up requiring functionality that was trimmed. - // Instead, we just use a hard-coded set of converters for the types we know about and then also support - // types that are explicitly attributed with TypeConverterAttribute. - - if (targetType == typeof(byte)) { return new ByteConverter(); } - if (targetType == typeof(sbyte)) { return new SByteConverter(); } - if (targetType == typeof(bool)) { return new BooleanConverter(); } - if (targetType == typeof(ushort)) { return new UInt16Converter(); } - if (targetType == typeof(short)) { return new Int16Converter(); } - if (targetType == typeof(char)) { return new CharConverter(); } - if (targetType == typeof(uint)) { return new UInt32Converter(); } - if (targetType == typeof(int)) { return new Int32Converter(); } - if (targetType == typeof(ulong)) { return new UInt64Converter(); } - if (targetType == typeof(long)) { return new Int64Converter(); } - if (targetType == typeof(float)) { return new SingleConverter(); } - if (targetType == typeof(double)) { return new DoubleConverter(); } - if (targetType == typeof(decimal)) { return new DecimalConverter(); } - if (targetType == typeof(TimeSpan)) { return new TimeSpanConverter(); } - if (targetType == typeof(DateTime)) { return new DateTimeConverter(); } - if (targetType == typeof(DateTimeOffset)) { return new DateTimeOffsetConverter(); } - if (targetType == typeof(Uri)) { return new UriTypeConverter(); } - if (targetType == typeof(Guid)) { return new GuidConverter(); } - - if (targetType.GetCustomAttribute() is TypeConverterAttribute tca && - Type.GetType(tca.ConverterTypeName, throwOnError: false) is Type converterType && - Activator.CreateInstance(converterType) is TypeConverter converter) - { - return converter; - } - - return null; - } - - [DebuggerBrowsable(DebuggerBrowsableState.Never)] - private string DebuggerDisplay => $"{this.Name} ({this.Description})"; - - /// - /// Remove characters from method name that are valid in metadata but invalid for SK. - /// - private static string SanitizeMetadataName(string methodName) => - s_invalidNameCharsRegex.Replace(methodName, "_"); - - /// Regex that flags any character other than ASCII digits or letters or the underscore. - private static readonly Regex s_invalidNameCharsRegex = new("[^0-9A-Za-z_]"); - - /// Parser functions for converting strings to parameter types. - private static readonly ConcurrentDictionary?> s_parsers = new(); - - /// Formatter functions for converting parameter types to strings. - private static readonly ConcurrentDictionary?> s_formatters = new(); - - private readonly Lazy _view; - - #endregion - - #region Obsolete - - /// - [Obsolete("Use ISKFunction.RequestSettingsFactory instead. This will be removed in a future release.")] - public AIRequestSettings? RequestSettings { get; } - - /// - [Obsolete("Use ISKFunction.SetAIServiceFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIService(Func serviceFactory) - { - this.ThrowNotSemantic(); - return this; - } - - /// - [Obsolete("Use ISKFunction.SetAIRequestSettingsFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIConfiguration(AIRequestSettings? requestSettings) - { - this.ThrowNotSemantic(); - return this; - } - - /// - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use ISKFunction.PluginName instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public string SkillName => this.PluginName; - - /// - [Obsolete("Kernel no longer differentiates between Semantic and Native functions. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public bool IsSemantic => true; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultSkillCollection(IReadOnlyFunctionCollection skills) => this; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultFunctionCollection(IReadOnlyFunctionCollection functions) => this; - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/NativeFunctionDelegate.cs b/dotnet/src/SemanticKernel.Core/Functions/NativeFunctionDelegate.cs deleted file mode 100644 index 99cc51ada273..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/NativeFunctionDelegate.cs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -internal delegate Task NativeFunctionDelegate( - ITextCompletion? textCompletion, - AIRequestSettings? requestSettings, - SKContext context, - CancellationToken cancellationToken); diff --git a/dotnet/src/SemanticKernel.Core/Functions/OrderedIAIServiceSelector.cs b/dotnet/src/SemanticKernel.Core/Functions/OrderedIAIServiceSelector.cs deleted file mode 100644 index 0ce1c06f67cb..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/OrderedIAIServiceSelector.cs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Linq; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.Functions; - -/// -/// Implementation of that selects the AI service based on the order of the model settings. -/// Uses the service id to select the preferred service provider and then returns the service and associated model settings. -/// -internal class OrderedIAIServiceSelector : IAIServiceSelector -{ - /// - public (T?, AIRequestSettings?) SelectAIService(string renderedPrompt, IAIServiceProvider serviceProvider, IReadOnlyList? modelSettings) where T : IAIService - { - if (modelSettings is null || modelSettings.Count == 0) - { - var service = serviceProvider.GetService(null); - if (service is not null) - { - return (service, null); - } - } - else - { - AIRequestSettings? defaultRequestSettings = null; - foreach (var model in modelSettings) - { - if (!string.IsNullOrEmpty(model.ServiceId)) - { - var service = serviceProvider.GetService(model.ServiceId); - if (service is not null) - { - return (service, model); - } - } - else - { - // First request settings with empty or null service id is the default - defaultRequestSettings ??= model; - } - } - - if (defaultRequestSettings is not null) - { - var service = serviceProvider.GetService(null); - if (service is not null) - { - return (service, defaultRequestSettings); - } - } - } - - var names = string.Join("|", modelSettings.Select(model => model.ServiceId).ToArray()); - throw new SKException($"Service of type {typeof(T)} and name {names ?? ""} not registered."); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/PromptRenderingResult.cs b/dotnet/src/SemanticKernel.Core/Functions/PromptRenderingResult.cs new file mode 100644 index 000000000000..3a3f8f9e61a5 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Functions/PromptRenderingResult.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Services; + +namespace Microsoft.SemanticKernel; + +/// +/// Contains result after prompt rendering process. +/// +internal sealed class PromptRenderingResult +{ + public IAIService AIService { get; set; } + + public string RenderedPrompt { get; set; } + + public PromptExecutionSettings? ExecutionSettings { get; set; } + +#pragma warning disable CS0618 // Events are deprecated + public PromptRenderedEventArgs? RenderedEventArgs { get; set; } +#pragma warning restore CS0618 // Events are deprecated + + public PromptRenderedContext? RenderedContext { get; set; } + + public PromptRenderingResult(IAIService aiService, string renderedPrompt) + { + this.AIService = aiService; + this.RenderedPrompt = renderedPrompt; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Functions/SKFunction.cs b/dotnet/src/SemanticKernel.Core/Functions/SKFunction.cs deleted file mode 100644 index 3b92a52135b8..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/SKFunction.cs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Reflection; -using Microsoft.Extensions.Logging; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -#pragma warning disable format - -/// -/// Static helpers to create instances. -/// -public static class SKFunction -{ - /// - /// Create a native function instance, wrapping a native object method - /// - /// Signature of the method to invoke - /// Object containing the method to invoke - /// SK plugin name - /// The to use for logging. If null, no logging will be performed. - /// SK function instance - public static ISKFunction FromNativeMethod( - MethodInfo method, - object? target = null, - string? pluginName = null, - ILoggerFactory? loggerFactory = null) - => NativeFunction.FromNativeMethod(method, target, pluginName, loggerFactory); - - /// - /// Create a native function instance, wrapping a delegate function - /// - /// Function to invoke - /// SK plugin name - /// SK function name - /// SK function description - /// SK function parameters - /// The to use for logging. If null, no logging will be performed. - /// SK function instance - public static ISKFunction FromNativeFunction( - Delegate nativeFunction, - string? pluginName = null, - string? functionName = null, - string? description = null, - IEnumerable? parameters = null, - ILoggerFactory? loggerFactory = null) - => NativeFunction.FromNativeFunction(nativeFunction, pluginName, functionName, description, parameters, loggerFactory); -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/SKFunctionTextExtensions.cs b/dotnet/src/SemanticKernel.Core/Functions/SKFunctionTextExtensions.cs deleted file mode 100644 index 4c537f8606d6..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/SKFunctionTextExtensions.cs +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Orchestration; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Class with extension methods for semantic functions. -/// -public static class SKFunctionTextExtensions -{ - /// - /// Extension method to aggregate partitioned results of a semantic function. - /// - /// Semantic Kernel function - /// Input to aggregate. - /// Semantic Kernel context. - /// Separator to use between results. - /// Optional request settings. - /// The to monitor for cancellation requests. The default is . - /// Aggregated results. - public static async Task AggregatePartitionedResultsAsync( - this ISKFunction func, - List partitionedInput, - SKContext context, - string resultsSeparator = "\n", - AIRequestSettings? settings = null, - CancellationToken cancellationToken = default) - { - var results = new List(); - foreach (var partition in partitionedInput) - { - context.Variables.Update(partition); - - var result = await func.InvokeAsync(context, settings, cancellationToken).ConfigureAwait(false); - - context = result.Context; - - results.Add(context.Variables.ToString()); - } - - context.Variables.Update(string.Join(resultsSeparator, results)); - return context; - } -} diff --git a/dotnet/src/SemanticKernel.Core/Functions/SemanticFunction.cs b/dotnet/src/SemanticKernel.Core/Functions/SemanticFunction.cs deleted file mode 100644 index 3f28915cb615..000000000000 --- a/dotnet/src/SemanticKernel.Core/Functions/SemanticFunction.cs +++ /dev/null @@ -1,272 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics; -using System.Linq; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.TemplateEngine; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using the main namespace -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -#pragma warning disable format - -/// -/// A Semantic Kernel "Semantic" prompt function. -/// -[DebuggerDisplay("{DebuggerDisplay,nq}")] -internal sealed class SemanticFunction : ISKFunction, IDisposable -{ - /// - public string Name { get; } - - /// - public string PluginName { get; } - - /// - public string Description { get; } - - /// - /// List of function parameters - /// - public IReadOnlyList Parameters => this._promptTemplate.Parameters; - - /// - /// Create a semantic function instance, given a semantic function configuration. - /// - /// Name of the plugin to which the function being created belongs. - /// Name of the function to create. - /// Prompt template configuration. - /// Prompt template. - /// The to use for logging. If null, no logging will be performed. - /// The to monitor for cancellation requests. The default is . - /// SK function instance. - public static ISKFunction FromSemanticConfig( - string pluginName, - string functionName, - PromptTemplateConfig promptTemplateConfig, - IPromptTemplate promptTemplate, - ILoggerFactory? loggerFactory = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(promptTemplateConfig); - Verify.NotNull(promptTemplate); - - var func = new SemanticFunction( - template: promptTemplate, - description: promptTemplateConfig.Description, - pluginName: pluginName, - functionName: functionName, - loggerFactory: loggerFactory - ) - { - _modelSettings = promptTemplateConfig.ModelSettings - }; - - return func; - } - - /// - public FunctionView Describe() - { - return new FunctionView(this.Name, this.PluginName, this.Description) { Parameters = this.Parameters }; - } - - /// - public async Task InvokeAsync( - SKContext context, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - this.AddDefaultValues(context.Variables); - - return await this.RunPromptAsync(requestSettings, context, cancellationToken).ConfigureAwait(false); - } - - /// - /// Dispose of resources. - /// - public void Dispose() - { - } - - /// - /// JSON serialized string representation of the function. - /// - public override string ToString() - => this.ToString(false); - - /// - /// JSON serialized string representation of the function. - /// - public string ToString(bool writeIndented) - => JsonSerializer.Serialize(this, options: writeIndented ? s_toStringIndentedSerialization : s_toStringStandardSerialization); - - internal SemanticFunction( - IPromptTemplate template, - string pluginName, - string functionName, - string description, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(template); - Verify.ValidPluginName(pluginName); - Verify.ValidFunctionName(functionName); - - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(SemanticFunction)) : NullLogger.Instance; - - this._promptTemplate = template; - Verify.ParametersUniqueness(this.Parameters); - - this.Name = functionName; - this.PluginName = pluginName; - this.Description = description; - - this._view = new(() => new(functionName, pluginName, description, this.Parameters)); - } - - #region private - - private static readonly JsonSerializerOptions s_toStringStandardSerialization = new(); - private static readonly JsonSerializerOptions s_toStringIndentedSerialization = new() { WriteIndented = true }; - private readonly ILogger _logger; - private IAIServiceSelector? _serviceSelector; - private List? _modelSettings; - private readonly Lazy _view; - private readonly IPromptTemplate _promptTemplate; - - private static async Task GetCompletionsResultContentAsync(IReadOnlyList completions, CancellationToken cancellationToken = default) - { - // To avoid any unexpected behavior we only take the first completion result (when running from the Kernel) - return await completions[0].GetCompletionAsync(cancellationToken).ConfigureAwait(false); - } - - [DebuggerBrowsable(DebuggerBrowsableState.Never)] - private string DebuggerDisplay => $"{this.Name} ({this.Description})"; - - /// Add default values to the context variables if the variable is not defined - private void AddDefaultValues(ContextVariables variables) - { - foreach (var parameter in this.Parameters) - { - if (!variables.ContainsKey(parameter.Name) && parameter.DefaultValue != null) - { - variables[parameter.Name] = parameter.DefaultValue; - } - } - } - - private async Task RunPromptAsync( - AIRequestSettings? requestSettings, - SKContext context, - CancellationToken cancellationToken) - { - FunctionResult result; - - try - { - string renderedPrompt = await this._promptTemplate.RenderAsync(context, cancellationToken).ConfigureAwait(false); - // For backward compatibility, use the service selector from the class if it exists, otherwise use the one from the context - var serviceSelector = this._serviceSelector ?? context.ServiceSelector; - (var textCompletion, var defaultRequestSettings) = serviceSelector.SelectAIService(renderedPrompt, context.ServiceProvider, this._modelSettings); - Verify.NotNull(textCompletion); - IReadOnlyList completionResults = await textCompletion.GetCompletionsAsync(renderedPrompt, requestSettings ?? defaultRequestSettings, cancellationToken).ConfigureAwait(false); - string completion = await GetCompletionsResultContentAsync(completionResults, cancellationToken).ConfigureAwait(false); - - // Update the result with the completion - context.Variables.Update(completion); - - var modelResults = completionResults.Select(c => c.ModelResult).ToArray(); - - result = new FunctionResult(this.Name, this.PluginName, context, completion); - - result.Metadata.Add(AIFunctionResultExtensions.ModelResultsMetadataKey, modelResults); - } - catch (Exception ex) when (!ex.IsCriticalException()) - { - this._logger?.LogError(ex, "Semantic function {Plugin}.{Name} execution failed with error {Error}", this.PluginName, this.Name, ex.Message); - throw; - } - - return result; - } - - #endregion - - #region Obsolete - - /// - [Obsolete("Use ISKFunction.ModelSettings instead. This will be removed in a future release.")] - public AIRequestSettings? RequestSettings => this._modelSettings?.FirstOrDefault(); - - /// - [Obsolete("Use ISKFunction.SetAIServiceFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIService(Func serviceFactory) - { - Verify.NotNull(serviceFactory); - - if (this._serviceSelector is DelegatingAIServiceSelector delegatingProvider) - { - delegatingProvider.ServiceFactory = serviceFactory; - } - else - { - var serviceSelector = new DelegatingAIServiceSelector(); - serviceSelector.ServiceFactory = serviceFactory; - this._serviceSelector = serviceSelector; - } - return this; - } - - /// - [Obsolete("Use ISKFunction.SetAIRequestSettingsFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIConfiguration(AIRequestSettings? requestSettings) - { - if (this._serviceSelector is DelegatingAIServiceSelector delegatingProvider) - { - delegatingProvider.RequestSettings = requestSettings; - } - else - { - var configurationProvider = new DelegatingAIServiceSelector(); - configurationProvider.RequestSettings = requestSettings; - this._serviceSelector = configurationProvider; - } - return this; - } - - /// - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use ISKFunction.PluginName instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public string SkillName => this.PluginName; - - /// - [Obsolete("Kernel no longer differentiates between Semantic and Native functions. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public bool IsSemantic => true; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultSkillCollection(IReadOnlyFunctionCollection skills) => this; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultFunctionCollection(IReadOnlyFunctionCollection functions) => this; - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/Kernel.cs b/dotnet/src/SemanticKernel.Core/Kernel.cs deleted file mode 100644 index 5338bb25a804..000000000000 --- a/dotnet/src/SemanticKernel.Core/Kernel.cs +++ /dev/null @@ -1,303 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Globalization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Events; -using Microsoft.SemanticKernel.Functions; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine; - -namespace Microsoft.SemanticKernel; - -/// -/// Semantic kernel class. -/// The kernel provides a function collection to define native and semantic functions, an orchestrator to execute a list of functions. -/// Semantic functions are automatically rendered and executed using an internal prompt template rendering engine. -/// Future versions will allow to: -/// * customize the rendering engine -/// * include branching logic in the functions pipeline -/// * persist execution state for long running pipelines -/// * distribute pipelines over a network -/// * RPC functions and secure environments, e.g. sandboxing and credentials management -/// * auto-generate pipelines given a higher level goal -/// -public sealed class Kernel : IKernel, IDisposable -{ - /// - public ILoggerFactory LoggerFactory { get; } - - /// - public IReadOnlyFunctionCollection Functions => this._functionCollection; - - /// - public IPromptTemplateEngine PromptTemplateEngine { get; } - - /// - /// Return a new instance of the kernel builder, used to build and configure kernel instances. - /// - [Obsolete("This field will be removed in a future release. Initialize KernelBuilder through constructor instead (new KernelBuilder()).")] - public static KernelBuilder Builder => new(); - - /// - public IDelegatingHandlerFactory HttpHandlerFactory { get; } - - /// - public event EventHandler? FunctionInvoking; - - /// - public event EventHandler? FunctionInvoked; - - /// - /// Kernel constructor. See KernelBuilder for an easier and less error prone approach to create kernel instances. - /// - /// function collection - /// AI Service Provider - /// Prompt template engine - /// Semantic text Memory - /// HTTP handler factory - /// The to use for logging. If null, no logging will be performed. - /// AI service selector - public Kernel( - IFunctionCollection functionCollection, - IAIServiceProvider aiServiceProvider, - IPromptTemplateEngine promptTemplateEngine, - ISemanticTextMemory memory, - IDelegatingHandlerFactory httpHandlerFactory, - ILoggerFactory? loggerFactory, - IAIServiceSelector? serviceSelector = null - ) - { - loggerFactory ??= NullLoggerFactory.Instance; - - this.LoggerFactory = loggerFactory; - this.HttpHandlerFactory = httpHandlerFactory; - this.PromptTemplateEngine = promptTemplateEngine; - this._memory = memory; - this._aiServiceProvider = aiServiceProvider; - this._promptTemplateEngine = promptTemplateEngine; - this._functionCollection = functionCollection; - this._aiServiceSelector = serviceSelector ?? new OrderedIAIServiceSelector(); - - this._logger = loggerFactory.CreateLogger(typeof(Kernel)); - } - - /// - public ISKFunction RegisterCustomFunction(ISKFunction customFunction) - { - Verify.NotNull(customFunction); - - this._functionCollection.AddFunction(customFunction); - - return customFunction; - } - - /// - public async Task RunAsync(ContextVariables variables, CancellationToken cancellationToken, params ISKFunction[] pipeline) - { - var context = this.CreateNewContext(variables); - - FunctionResult? functionResult = null; - - int pipelineStepCount = 0; - var allFunctionResults = new List(); - - foreach (ISKFunction skFunction in pipeline) - { -repeat: - cancellationToken.ThrowIfCancellationRequested(); - - try - { - var functionDetails = skFunction.Describe(); - - var functionInvokingArgs = this.OnFunctionInvoking(functionDetails, context); - if (functionInvokingArgs?.CancelToken.IsCancellationRequested ?? false) - { - this._logger.LogInformation("Execution was cancelled on function invoking event of pipeline step {StepCount}: {PluginName}.{FunctionName}.", pipelineStepCount, skFunction.PluginName, skFunction.Name); - break; - } - - if (functionInvokingArgs?.IsSkipRequested ?? false) - { - this._logger.LogInformation("Execution was skipped on function invoking event of pipeline step {StepCount}: {PluginName}.{FunctionName}.", pipelineStepCount, skFunction.PluginName, skFunction.Name); - continue; - } - - functionResult = await skFunction.InvokeAsync(context, cancellationToken: cancellationToken).ConfigureAwait(false); - - context = functionResult.Context; - - var functionInvokedArgs = this.OnFunctionInvoked(functionDetails, functionResult); - - if (functionInvokedArgs is not null) - { - // All changes to the SKContext by invoked handlers may reflect in the original function result - functionResult = new FunctionResult(functionDetails.Name, functionDetails.PluginName, functionInvokedArgs.SKContext, functionInvokedArgs.SKContext.Result); - } - - allFunctionResults.Add(functionResult); - - if (functionInvokedArgs?.CancelToken.IsCancellationRequested ?? false) - { - this._logger.LogInformation("Execution was cancelled on function invoked event of pipeline step {StepCount}: {PluginName}.{FunctionName}.", pipelineStepCount, skFunction.PluginName, skFunction.Name); - break; - } - - if (functionInvokedArgs?.IsRepeatRequested ?? false) - { - this._logger.LogInformation("Execution repeat request on function invoked event of pipeline step {StepCount}: {PluginName}.{FunctionName}.", pipelineStepCount, skFunction.PluginName, skFunction.Name); - goto repeat; - } - } - catch (Exception ex) - { - this._logger.LogError("Plugin {Plugin} function {Function} call fail during pipeline step {Step} with error {Error}:", skFunction.PluginName, skFunction.Name, pipelineStepCount, ex.Message); - throw; - } - - pipelineStepCount++; - } - - return KernelResult.FromFunctionResults(functionResult?.Value, allFunctionResults); - } - - /// - public SKContext CreateNewContext( - ContextVariables? variables = null, - IReadOnlyFunctionCollection? functions = null, - ILoggerFactory? loggerFactory = null, - CultureInfo? culture = null) - { - return new SKContext( - new FunctionRunner(this), - this._aiServiceProvider, - this._aiServiceSelector, - variables, - functions ?? this.Functions, - loggerFactory ?? this.LoggerFactory, - culture); - } - - /// - public T GetService(string? name = null) where T : IAIService - { - var service = this._aiServiceProvider.GetService(name); - if (service != null) - { - return service; - } - - throw new SKException($"Service of type {typeof(T)} and name {name ?? ""} not registered."); - } - - /// - /// Dispose of resources. - /// - public void Dispose() - { - // ReSharper disable once SuspiciousTypeConversion.Global - if (this._memory is IDisposable mem) { mem.Dispose(); } - - // ReSharper disable once SuspiciousTypeConversion.Global - if (this._functionCollection is IDisposable reg) { reg.Dispose(); } - } - - #region private ================================================================================ - - private readonly IFunctionCollection _functionCollection; - private ISemanticTextMemory _memory; - private readonly IPromptTemplateEngine _promptTemplateEngine; - private readonly IAIServiceProvider _aiServiceProvider; - private readonly IAIServiceSelector _aiServiceSelector; - private readonly ILogger _logger; - - /// - /// Execute the OnFunctionInvoking event handlers. - /// - /// Function view details - /// SKContext before function invocation - /// FunctionInvokingEventArgs if the event was handled, null otherwise - private FunctionInvokingEventArgs? OnFunctionInvoking(FunctionView functionView, SKContext context) - { - if (this.FunctionInvoking is not null) - { - var args = new FunctionInvokingEventArgs(functionView, context); - this.FunctionInvoking.Invoke(this, args); - - return args; - } - - return null; - } - - /// - /// Execute the OnFunctionInvoked event handlers. - /// - /// Function view details - /// Function result after invocation - /// FunctionInvokedEventArgs if the event was handled, null otherwise - private FunctionInvokedEventArgs? OnFunctionInvoked(FunctionView functionView, FunctionResult result) - { - if (this.FunctionInvoked is not null) - { - var args = new FunctionInvokedEventArgs(functionView, result); - this.FunctionInvoked.Invoke(this, args); - - return args; - } - - return null; - } - - #endregion - - #region Obsolete =============================================================================== - - /// - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISemanticTextMemory Memory => this._memory; - - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use Kernel.Functions instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public IReadOnlyFunctionCollection Skills => this._functionCollection; -#pragma warning restore CS1591 - - /// - [Obsolete("Func shorthand no longer no longer supported. Use Kernel.Functions collection instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction Func(string pluginName, string functionName) - { - return this.Functions.GetFunction(pluginName, functionName); - } - - /// - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public void RegisterMemory(ISemanticTextMemory memory) - { - this._memory = memory; - } - - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use Kernel.ImportFunctions instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] -#pragma warning disable CS1591 - public IDictionary ImportSkill(object functionsInstance, string? pluginName = null) - { - return this.ImportFunctions(functionsInstance, pluginName); - } -#pragma warning restore CS1591 - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/KernelBuilder.cs b/dotnet/src/SemanticKernel.Core/KernelBuilder.cs deleted file mode 100644 index 201397cf5915..000000000000 --- a/dotnet/src/SemanticKernel.Core/KernelBuilder.cs +++ /dev/null @@ -1,327 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Linq; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TemplateEngine; - -namespace Microsoft.SemanticKernel; - -/// -/// A builder for Semantic Kernel. -/// -public sealed class KernelBuilder -{ - private Func _memoryFactory = () => NullMemory.Instance; - private ILoggerFactory _loggerFactory = NullLoggerFactory.Instance; - private Func? _memoryStorageFactory = null; - private IDelegatingHandlerFactory _httpHandlerFactory = NullHttpHandlerFactory.Instance; - private IPromptTemplateEngine? _promptTemplateEngine; - private readonly AIServiceCollection _aiServices = new(); - private IAIServiceSelector? _serviceSelector; - - private static bool s_promptTemplateEngineInitialized = false; - private static Type? s_promptTemplateEngineType = null; - - /// - /// Create a new kernel instance - /// - /// New kernel instance - public static IKernel Create() - { - var builder = new KernelBuilder(); - return builder.Build(); - } - - /// - /// Build a new kernel instance using the settings passed so far. - /// - /// Kernel instance - public IKernel Build() - { - var instance = new Kernel( - new FunctionCollection(), - this._aiServices.Build(), - this._promptTemplateEngine ?? this.CreateDefaultPromptTemplateEngine(this._loggerFactory), - this._memoryFactory.Invoke(), - this._httpHandlerFactory, - this._loggerFactory, - this._serviceSelector - ); - - // TODO: decouple this from 'UseMemory' kernel extension - if (this._memoryStorageFactory != null) - { -#pragma warning disable CS0618 // This will be removed in a future release. - instance.UseMemory(this._memoryStorageFactory.Invoke()); -#pragma warning restore CS0618 // This will be removed in a future release. - } - - return instance; - } - - /// - /// Add a logger to the kernel to be built. - /// - /// The to use for logging. If null, no logging will be performed. - /// Updated kernel builder including the logger. - public KernelBuilder WithLoggerFactory(ILoggerFactory loggerFactory) - { - Verify.NotNull(loggerFactory); - this._loggerFactory = loggerFactory; - return this; - } - - /// - /// Add a semantic text memory entity to the kernel to be built. - /// - /// Semantic text memory entity to add. - /// Updated kernel builder including the semantic text memory entity. - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public KernelBuilder WithMemory(ISemanticTextMemory memory) - { - Verify.NotNull(memory); - this._memoryFactory = () => memory; - return this; - } - - /// - /// Add a semantic text memory store factory. - /// - /// The store factory. - /// Updated kernel builder including the semantic text memory entity. - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public KernelBuilder WithMemory(Func factory) where TStore : ISemanticTextMemory - { - Verify.NotNull(factory); - this._memoryFactory = () => factory(this._loggerFactory); - return this; - } - - /// - /// Add memory storage to the kernel to be built. - /// - /// Storage to add. - /// Updated kernel builder including the memory storage. - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public KernelBuilder WithMemoryStorage(IMemoryStore storage) - { - Verify.NotNull(storage); - this._memoryStorageFactory = () => storage; - return this; - } - - /// - /// Add memory storage factory to the kernel. - /// - /// The storage factory. - /// Updated kernel builder including the memory storage. - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public KernelBuilder WithMemoryStorage(Func factory) where TStore : IMemoryStore - { - Verify.NotNull(factory); - this._memoryStorageFactory = () => factory(this._loggerFactory); - return this; - } - - /// - /// Add memory storage factory to the kernel. - /// - /// The storage factory. - /// Updated kernel builder including the memory storage. - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public KernelBuilder WithMemoryStorage(Func factory) where TStore : IMemoryStore - { - Verify.NotNull(factory); - this._memoryStorageFactory = () => factory(this._loggerFactory, this._httpHandlerFactory); - return this; - } - - /// - /// Add prompt template engine to the kernel to be built. - /// - /// Prompt template engine to add. - /// Updated kernel builder including the prompt template engine. - public KernelBuilder WithPromptTemplateEngine(IPromptTemplateEngine promptTemplateEngine) - { - Verify.NotNull(promptTemplateEngine); - this._promptTemplateEngine = promptTemplateEngine; - return this; - } - - /// - /// Add a http handler factory to the kernel to be built. - /// - /// Http handler factory to add. - /// Updated kernel builder including the http handler factory. - public KernelBuilder WithHttpHandlerFactory(IDelegatingHandlerFactory httpHandlerFactory) - { - Verify.NotNull(httpHandlerFactory); - this._httpHandlerFactory = httpHandlerFactory; - return this; - } - - /// - /// Add a retry handler factory to the kernel to be built. - /// - /// Retry handler factory to add. - /// Updated kernel builder including the retry handler factory. - [Obsolete("This method is deprecated, use WithHttpHandlerFactory instead")] - public KernelBuilder WithRetryHandlerFactory(IDelegatingHandlerFactory httpHandlerFactory) - { - return this.WithHttpHandlerFactory(httpHandlerFactory); - } - - /// - /// Adds a instance to the services collection - /// - /// The instance. - public KernelBuilder WithDefaultAIService(TService instance) where TService : IAIService - { - this._aiServices.SetService(instance); - return this; - } - - /// - /// Adds a factory method to the services collection - /// - /// The factory method that creates the AI service instances of type . - public KernelBuilder WithDefaultAIService(Func factory) where TService : IAIService - { - this._aiServices.SetService(() => factory(this._loggerFactory)); - return this; - } - - /// - /// Adds a instance to the services collection - /// - /// The service ID - /// The instance. - /// Optional: set as the default AI service for type - public KernelBuilder WithAIService( - string? serviceId, - TService instance, - bool setAsDefault = false) where TService : IAIService - { - this._aiServices.SetService(serviceId, instance, setAsDefault); - return this; - } - - /// - /// Adds a factory method to the services collection - /// - /// The service ID - /// The factory method that creates the AI service instances of type . - /// Optional: set as the default AI service for type - public KernelBuilder WithAIService( - string? serviceId, - Func factory, - bool setAsDefault = false) where TService : IAIService - { - this._aiServices.SetService(serviceId, () => factory(this._loggerFactory), setAsDefault); - return this; - } - - /// - /// Adds a factory method to the services collection - /// - /// The service ID - /// The factory method that creates the AI service instances of type . - /// Optional: set as the default AI service for type - public KernelBuilder WithAIService( - string? serviceId, - Func factory, - bool setAsDefault = false) where TService : IAIService - { - this._aiServices.SetService(serviceId, () => factory(this._loggerFactory, this._httpHandlerFactory), setAsDefault); - return this; - } - - /// - /// Adds a to the builder - /// - public KernelBuilder WithAIServiceSelector(IAIServiceSelector serviceSelector) - { - this._serviceSelector = serviceSelector; - return this; - } - - /// - /// Create a default prompt template engine. - /// - /// This is a temporary solution to avoid breaking existing clients. - /// There will be a separate task to add support for registering instances of IPromptTemplateEngine and obsoleting the current approach. - /// - /// - /// Logger factory to be used by the template engine - /// Instance of . - private IPromptTemplateEngine CreateDefaultPromptTemplateEngine(ILoggerFactory? loggerFactory = null) - { - if (!s_promptTemplateEngineInitialized) - { - s_promptTemplateEngineType = this.GetPromptTemplateEngineType(); - s_promptTemplateEngineInitialized = true; - } - - if (s_promptTemplateEngineType is not null) - { - var constructor = s_promptTemplateEngineType.GetConstructor(new Type[] { typeof(ILoggerFactory) }); - if (constructor is not null) - { -#pragma warning disable CS8601 // Null logger factory is OK - return (IPromptTemplateEngine)constructor.Invoke(new object[] { loggerFactory }); -#pragma warning restore CS8601 - } - } - - return new NullPromptTemplateEngine(); - } - - /// - /// Get the prompt template engine type if available - /// - /// The type for the prompt template engine if available - private Type? GetPromptTemplateEngineType() - { - try - { - var assembly = Assembly.Load("Microsoft.SemanticKernel.TemplateEngine.Basic"); - - return assembly.ExportedTypes.Single(type => - type.Name.Equals("BasicPromptTemplateEngine", StringComparison.Ordinal) && - type.GetInterface(nameof(IPromptTemplateEngine)) is not null); - } - catch (Exception ex) when (!ex.IsCriticalException()) - { - return null; - } - } -} - -/// -/// No-operation IPromptTemplateEngine which performs no rendering of the template. -/// -/// This is a temporary solution to avoid breaking existing clients. -/// -internal sealed class NullPromptTemplateEngine : IPromptTemplateEngine -{ - public Task RenderAsync(string templateText, SKContext context, CancellationToken cancellationToken = default) - { - return Task.FromResult(templateText); - } -} diff --git a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs index 41545d15a716..f9ba83eaeb89 100644 --- a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs +++ b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs @@ -2,169 +2,873 @@ using System; using System.Collections.Generic; +using System.ComponentModel; +using System.IO; using System.Reflection; +using System.Text.Json; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel; -/// Extension methods for interacting with . +/// Provides extension methods for interacting with and related types. public static class KernelExtensions { + #region CreateFunctionFromMethod /// - /// Import a set of functions as a plugin from the given object instance. Only the functions that have the `SKFunction` attribute will be included in the plugin. - /// Once these functions are imported, the prompt templates can use functions to import content at runtime. + /// Creates a instance for a method, specified via a delegate. /// - /// The kernel. - /// Instance of a class containing functions - /// Name of the plugin for function collection and prompt templates. If the value is empty functions are registered in the global namespace. - /// A list of all the semantic functions found in the directory, indexed by function name. - public static IDictionary ImportFunctions( - this IKernel kernel, - object functionsInstance, - string? pluginName = null) + /// The containing services, plugins, and other state for use throughout the operation. + /// The method to be represented via the created . + /// The name to use for the function. If null, it will default to one derived from the method represented by . + /// The description to use for the function. If null, it will default to one derived from the method represented by , if possible (e.g. via a on the method). + /// Optional parameter descriptions. If null, it will default to one derived from the method represented by . + /// Optional return parameter description. If null, it will default to one derived from the method represented by . + /// The created for invoking . + public static KernelFunction CreateFunctionFromMethod( + this Kernel kernel, + Delegate method, + string? functionName = null, + string? description = null, + IEnumerable? parameters = null, + KernelReturnParameterMetadata? returnParameter = null) { Verify.NotNull(kernel); - Verify.NotNull(functionsInstance); + Verify.NotNull(method); - ILogger logger = kernel.LoggerFactory.CreateLogger(kernel.GetType()); - if (string.IsNullOrWhiteSpace(pluginName)) - { - pluginName = FunctionCollection.GlobalFunctionsPluginName; - logger.LogTrace("Importing functions from {0} to the global plugin namespace", functionsInstance.GetType().FullName); - } - else - { - logger.LogTrace("Importing functions from {0} to the {1} namespace", functionsInstance.GetType().FullName, pluginName); - } + return KernelFunctionFactory.CreateFromMethod(method.Method, method.Target, functionName, description, parameters, returnParameter, kernel.LoggerFactory); + } + + /// + /// Creates a instance for a method, specified via an instance + /// and an optional target object if the method is an instance method. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The method to be represented via the created . + /// The target object for the if it represents an instance method. This should be null if and only if is a static method. + /// The name to use for the function. If null, it will default to one derived from the method represented by . + /// The description to use for the function. If null, it will default to one derived from the method represented by , if possible (e.g. via a on the method). + /// Optional parameter descriptions. If null, it will default to one derived from the method represented by . + /// Optional return parameter description. If null, it will default to one derived from the method represented by . + /// The created for invoking . + public static KernelFunction CreateFunctionFromMethod( + this Kernel kernel, + MethodInfo method, + object? target = null, + string? functionName = null, + string? description = null, + IEnumerable? parameters = null, + KernelReturnParameterMetadata? returnParameter = null) + { + Verify.NotNull(kernel); + Verify.NotNull(method); + + return KernelFunctionFactory.CreateFromMethod(method, target, functionName, description, parameters, returnParameter, kernel.LoggerFactory); + } + #endregion + + #region CreateFunctionFromPrompt + + /// + /// Creates a instance for a prompt specified via a prompt template. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function. + /// Default execution settings to use when invoking this prompt function. + /// The name to use for the function. If null, it will default to a randomly generated name. + /// The description to use for the function. + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The created for invoking the prompt. + public static KernelFunction CreateFunctionFromPrompt( + this Kernel kernel, + string promptTemplate, + PromptExecutionSettings? executionSettings = null, + string? functionName = null, + string? description = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null) + { + Verify.NotNull(kernel); + Verify.NotNull(promptTemplate); + + return KernelFunctionFactory.CreateFromPrompt( + promptTemplate, + executionSettings, + functionName, + description, + templateFormat, + promptTemplateFactory, + kernel.LoggerFactory); + } + + /// + /// Creates a instance for a prompt specified via a prompt template configuration. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Configuration information describing the prompt. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The created for invoking the prompt. + public static KernelFunction CreateFunctionFromPrompt( + this Kernel kernel, + PromptTemplateConfig promptConfig, + IPromptTemplateFactory? promptTemplateFactory = null) + { + Verify.NotNull(kernel); + Verify.NotNull(promptConfig); + + return KernelFunctionFactory.CreateFromPrompt(promptConfig, promptTemplateFactory, kernel.LoggerFactory); + } + #endregion + + #region CreatePluginFromType + /// Creates a plugin that wraps a new instance of the specified type . + /// Specifies the type of the object to wrap. + /// The containing services, plugins, and other state for use throughout the operation. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// A containing s for all relevant members of . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static KernelPlugin CreatePluginFromType(this Kernel kernel, string? pluginName = null) + { + Verify.NotNull(kernel); + + return KernelPluginFactory.CreateFromType(pluginName, kernel.Services); + } + #endregion + + #region CreatePluginFromObject + /// Creates a plugin that wraps the specified target object. + /// The containing services, plugins, and other state for use throughout the operation. + /// The instance of the class to be wrapped. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// A containing s for all relevant members of . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static KernelPlugin CreatePluginFromObject(this Kernel kernel, object target, string? pluginName = null) + { + Verify.NotNull(kernel); + + return KernelPluginFactory.CreateFromObject(target, pluginName, kernel.LoggerFactory); + } + #endregion + + #region CreatePluginFromFunctions + /// Creates a plugin that contains the specified functions. + /// The containing services, plugins, and other state for use throughout the operation. + /// The name for the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin CreatePluginFromFunctions(this Kernel kernel, string pluginName, IEnumerable? functions) => + CreatePluginFromFunctions(kernel, pluginName, description: null, functions); - MethodInfo[] methods = functionsInstance.GetType().GetMethods(BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public); - logger.LogTrace("Importing plugin name: {0}. Potential methods found: {1}", pluginName, methods.Length); + /// Creates a plugin that contains the specified functions. + /// The containing services, plugins, and other state for use throughout the operation. + /// The name for the plugin. + /// A description of the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin CreatePluginFromFunctions(this Kernel kernel, string pluginName, string? description = null, IEnumerable? functions = null) + { + Verify.NotNull(kernel); + + return KernelPluginFactory.CreateFromFunctions(pluginName, description, functions); + } + #endregion + + #region ImportPlugin/AddFromType + /// Creates a plugin that wraps a new instance of the specified type and imports it into the 's plugin collection. + /// Specifies the type of the object to wrap. + /// The containing services, plugins, and other state for use throughout the operation. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// A containing s for all relevant members of . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static KernelPlugin ImportPluginFromType(this Kernel kernel, string? pluginName = null) + { + KernelPlugin plugin = CreatePluginFromType(kernel, pluginName); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// Creates a plugin that wraps a new instance of the specified type and adds it into the plugin collection. + /// Specifies the type of the object to wrap. + /// The plugin collection to which the new plugin should be added. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// Service provider from which to resolve dependencies, such as . + /// A containing s for all relevant members of . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static KernelPlugin AddFromType(this ICollection plugins, string? pluginName = null, IServiceProvider? serviceProvider = null) + { + Verify.NotNull(plugins); + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(pluginName, serviceProvider); + plugins.Add(plugin); + return plugin; + } + + /// Creates a plugin that wraps a new instance of the specified type and adds it into the plugin collection. + /// Specifies the type of the object to wrap. + /// The plugin collection to which the new plugin should be added. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// The same instance as . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static IKernelBuilderPlugins AddFromType(this IKernelBuilderPlugins plugins, string? pluginName = null) + { + Verify.NotNull(plugins); + + plugins.Services.AddSingleton(serviceProvider => KernelPluginFactory.CreateFromType(pluginName, serviceProvider)); + + return plugins; + } + + /// Adds the to the . + /// The plugin collection to which the plugin should be added. + /// The plugin to add. + /// The same instance as . + public static IKernelBuilderPlugins Add(this IKernelBuilderPlugins plugins, KernelPlugin plugin) + { + Verify.NotNull(plugins); + Verify.NotNull(plugin); + + plugins.Services.AddSingleton(plugin); + + return plugins; + } + #endregion + + #region ImportPlugin/AddFromObject + /// Creates a plugin that wraps the specified target object and imports it into the 's plugin collection. + /// The containing services, plugins, and other state for use throughout the operation. + /// The instance of the class to be wrapped. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// A containing s for all relevant members of . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static KernelPlugin ImportPluginFromObject(this Kernel kernel, object target, string? pluginName = null) + { + KernelPlugin plugin = CreatePluginFromObject(kernel, target, pluginName); + kernel.Plugins.Add(plugin); + return plugin; + } + + /// Creates a plugin that wraps the specified target object and adds it into the plugin collection. + /// The plugin collection to which the new plugin should be added. + /// The instance of the class to be wrapped. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// Service provider from which to resolve dependencies, such as . + /// A containing s for all relevant members of . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static KernelPlugin AddFromObject(this ICollection plugins, object target, string? pluginName = null, IServiceProvider? serviceProvider = null) + { + Verify.NotNull(plugins); + + KernelPlugin plugin = KernelPluginFactory.CreateFromObject(target, pluginName, serviceProvider?.GetService()); + plugins.Add(plugin); + return plugin; + } + + /// Creates a plugin that wraps the specified target object and adds it into the plugin collection. + /// The plugin collection to which the new plugin should be added. + /// The instance of the class to be wrapped. + /// + /// Name of the plugin for function collection and prompt templates. If the value is null, a plugin name is derived from the type of the . + /// + /// The same instance as . + /// + /// Public methods that have the attribute will be included in the plugin. + /// + public static IKernelBuilderPlugins AddFromObject(this IKernelBuilderPlugins plugins, object target, string? pluginName = null) + { + Verify.NotNull(plugins); + + plugins.Services.AddSingleton(serviceProvider => KernelPluginFactory.CreateFromObject(target, pluginName, serviceProvider?.GetService())); + + return plugins; + } + #endregion + + #region ImportPlugin/AddFromFunctions + /// Creates a plugin that contains the specified functions and imports it into the 's plugin collection. + /// The containing services, plugins, and other state for use throughout the operation. + /// The name for the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin ImportPluginFromFunctions(this Kernel kernel, string pluginName, IEnumerable? functions) => + ImportPluginFromFunctions(kernel, pluginName, description: null, functions); + + /// Creates a plugin that contains the specified functions and imports it into the 's plugin collection. + /// The containing services, plugins, and other state for use throughout the operation. + /// The name for the plugin. + /// A description of the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin ImportPluginFromFunctions(this Kernel kernel, string pluginName, string? description = null, IEnumerable? functions = null) + { + KernelPlugin plugin = CreatePluginFromFunctions(kernel, pluginName, description, functions); + kernel.Plugins.Add(plugin); + return plugin; + } - // Filter out non-SKFunctions and fail if two functions have the same name - Dictionary result = new(StringComparer.OrdinalIgnoreCase); - foreach (MethodInfo method in methods) + /// Creates a plugin that contains the specified functions and adds it into the plugin collection. + /// The plugin collection to which the new plugin should be added. + /// The name for the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin AddFromFunctions(this ICollection plugins, string pluginName, IEnumerable? functions) => + AddFromFunctions(plugins, pluginName, description: null, functions); + + /// Creates a plugin that contains the specified functions and adds it into the plugin collection. + /// The plugin collection to which the new plugin should be added. + /// The name for the plugin. + /// A description of the plugin. + /// The initial functions to be available as part of the plugin. + /// A containing the functions provided in . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static KernelPlugin AddFromFunctions(this ICollection plugins, string pluginName, string? description = null, IEnumerable? functions = null) + { + Verify.NotNull(plugins); + + var plugin = new DefaultKernelPlugin(pluginName, description, functions); + plugins.Add(plugin); + return plugin; + } + + /// Creates a plugin that wraps the specified target object and adds it into the plugin collection. + /// The plugin collection to which the new plugin should be added. + /// The name for the plugin. + /// The initial functions to be available as part of the plugin. + /// The same instance as . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static IKernelBuilderPlugins AddFromFunctions(this IKernelBuilderPlugins plugins, string pluginName, IEnumerable? functions) => + AddFromFunctions(plugins, pluginName, description: null, functions); + + /// Creates a plugin that wraps the specified target object and adds it into the plugin collection. + /// The plugin collection to which the new plugin should be added. + /// The name for the plugin. + /// A description of the plugin. + /// The initial functions to be available as part of the plugin. + /// The same instance as . + /// is null. + /// is an invalid plugin name. + /// contains a null function. + /// contains two functions with the same name. + public static IKernelBuilderPlugins AddFromFunctions(this IKernelBuilderPlugins plugins, string pluginName, string? description = null, IEnumerable? functions = null) + { + Verify.NotNull(plugins); + + plugins.Services.AddSingleton(KernelPluginFactory.CreateFromFunctions(pluginName, description, functions)); + + return plugins; + } + #endregion + + #region CreatePluginFromDirectory + /// Creates a plugin containing one function per child directory of the specified . + /// + /// + /// A plugin directory contains a set of subdirectories, one for each function in the form of a prompt. + /// This method accepts the path of the plugin directory. Each subdirectory's name is used as the function name + /// and may contain only alphanumeric chars and underscores. + /// + /// + /// The following directory structure, with pluginDirectory = "D:\plugins\OfficePlugin", + /// will create a plugin with three functions: + /// D:\plugins\ + /// |__ OfficePlugin\ # pluginDirectory + /// |__ ScheduleMeeting # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// |__ SummarizeEmailThread # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// |__ MergeWordAndExcelDocs # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// + /// + /// See https://github.com/microsoft/semantic-kernel/tree/main/samples/plugins for examples in the Semantic Kernel repository. + /// + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Path to the directory containing the plugin. + /// The name of the plugin. If null, the name is derived from the directory name. + /// + /// The to use when interpreting discovered prompts into s. + /// If null, a default factory will be used. + /// + /// A containing prompt functions created from the specified directory. + public static KernelPlugin CreatePluginFromPromptDirectory( + this Kernel kernel, + string pluginDirectory, + string? pluginName = null, + IPromptTemplateFactory? promptTemplateFactory = null) + { + Verify.NotNull(kernel); + + return CreatePluginFromPromptDirectory(pluginDirectory, pluginName, promptTemplateFactory, kernel.Services); + } + + /// Creates a plugin containing one function per child directory of the specified . + private static KernelPlugin CreatePluginFromPromptDirectory( + string pluginDirectory, + string? pluginName = null, + IPromptTemplateFactory? promptTemplateFactory = null, + IServiceProvider? services = null) + { + const string ConfigFile = "config.json"; + const string PromptFile = "skprompt.txt"; + + Verify.DirectoryExists(pluginDirectory); + pluginName ??= new DirectoryInfo(pluginDirectory).Name; + + ILoggerFactory loggerFactory = services?.GetService() ?? NullLoggerFactory.Instance; + + var factory = promptTemplateFactory ?? new KernelPromptTemplateFactory(loggerFactory); + + var functions = new List(); + ILogger logger = loggerFactory.CreateLogger(typeof(Kernel)) ?? NullLogger.Instance; + + foreach (string functionDirectory in Directory.EnumerateDirectories(pluginDirectory)) { - if (method.GetCustomAttribute() is not null) + var functionName = Path.GetFileName(functionDirectory); + + // Continue only if prompt template exists + var promptPath = Path.Combine(functionDirectory, PromptFile); + if (!File.Exists(promptPath)) { - ISKFunction function = SKFunction.FromNativeMethod(method, functionsInstance, pluginName, kernel.LoggerFactory); - if (result.ContainsKey(function.Name)) - { - throw new SKException("Function overloads are not supported, please differentiate function names"); - } + continue; + } - result.Add(function.Name, function); + // Load prompt configuration. Note: the configuration is optional. + var configPath = Path.Combine(functionDirectory, ConfigFile); + var promptConfig = File.Exists(configPath) ? + PromptTemplateConfig.FromJson(File.ReadAllText(configPath)) : + new PromptTemplateConfig(); + promptConfig.Name = functionName; + + if (logger.IsEnabled(LogLevel.Trace)) + { + logger.LogTrace("Config {0}: {1}", functionName, JsonSerializer.Serialize(promptConfig, JsonOptionsCache.WriteIndented)); } - } - logger.LogTrace("Methods imported {0}", result.Count); + // Load prompt template + promptConfig.Template = File.ReadAllText(promptPath); + IPromptTemplate promptTemplateInstance = factory.Create(promptConfig); - foreach (KeyValuePair f in result) - { - kernel.RegisterCustomFunction(f.Value); + if (logger.IsEnabled(LogLevel.Trace)) + { + logger.LogTrace("Registering function {0}.{1} loaded from {2}", pluginName, functionName, functionDirectory); + } + + functions.Add(KernelFunctionFactory.CreateFromPrompt(promptTemplateInstance, promptConfig, loggerFactory)); } - return result; + return KernelPluginFactory.CreateFromFunctions(pluginName, null, functions); } + #endregion + #region ImportPlugin/AddFromPromptDirectory /// - /// Run a single synchronous or asynchronous . + /// Creates a plugin containing one function per child directory of the specified + /// and imports it into the 's plugin collection. /// - /// The kernel. - /// A Semantic Kernel function to run - /// Input to process - /// The to monitor for cancellation requests. The default is . - /// Result of the function - public static Task RunAsync( - this IKernel kernel, - ISKFunction skFunction, - ContextVariables? variables = null, - CancellationToken cancellationToken = default) + /// + /// + /// A plugin directory contains a set of subdirectories, one for each function in the form of a prompt. + /// This method accepts the path of the plugin directory. Each subdirectory's name is used as the function name + /// and may contain only alphanumeric chars and underscores. + /// + /// + /// The following directory structure, with pluginDirectory = "D:\plugins\OfficePlugin", + /// will create a plugin with three functions: + /// D:\plugins\ + /// |__ OfficePlugin\ # pluginDirectory + /// |__ ScheduleMeeting # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// |__ SummarizeEmailThread # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// |__ MergeWordAndExcelDocs # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// + /// + /// See https://github.com/microsoft/semantic-kernel/tree/main/samples/plugins for examples in the Semantic Kernel repository. + /// + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Path to the directory containing the plugin, e.g. "/myAppPlugins/StrategyPlugin" + /// The name of the plugin. If null, the name is derived from the directory name. + /// + /// The to use when interpreting discovered prompts into s. + /// If null, a default factory will be used. + /// + /// A containing prompt functions created from the specified directory. + public static KernelPlugin ImportPluginFromPromptDirectory( + this Kernel kernel, + string pluginDirectory, + string? pluginName = null, + IPromptTemplateFactory? promptTemplateFactory = null) { - Verify.NotNull(kernel); - return kernel.RunAsync(variables ?? new(), cancellationToken, skFunction); + KernelPlugin plugin = CreatePluginFromPromptDirectory(kernel, pluginDirectory, pluginName, promptTemplateFactory); + kernel.Plugins.Add(plugin); + return plugin; } /// - /// Run a pipeline composed of synchronous and asynchronous functions. + /// Creates a plugin containing one function per child directory of the specified + /// and adds it into the plugin collection. /// - /// The kernel. - /// List of functions - /// Result of the function composition - public static Task RunAsync( - this IKernel kernel, - params ISKFunction[] pipeline) + /// + /// + /// A plugin directory contains a set of subdirectories, one for each function in the form of a prompt. + /// This method accepts the path of the plugin directory. Each subdirectory's name is used as the function name + /// and may contain only alphanumeric chars and underscores. + /// + /// + /// The following directory structure, with pluginDirectory = "D:\plugins\OfficePlugin", + /// will create a plugin with three functions: + /// D:\plugins\ + /// |__ OfficePlugin\ # pluginDirectory + /// |__ ScheduleMeeting # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// |__ SummarizeEmailThread # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// |__ MergeWordAndExcelDocs # function directory + /// |__ skprompt.txt # prompt template + /// |__ config.json # settings (optional file) + /// + /// + /// See https://github.com/microsoft/semantic-kernel/tree/main/samples/plugins for examples in the Semantic Kernel repository. + /// + /// + /// The plugin collection to which the new plugin should be added. + /// Path to the directory containing the plugin, e.g. "/myAppPlugins/StrategyPlugin" + /// The name of the plugin. If null, the name is derived from the directory name. + /// + /// The to use when interpreting discovered prompts into s. + /// If null, a default factory will be used. + /// + /// The same instance as . + public static IKernelBuilderPlugins AddFromPromptDirectory( + this IKernelBuilderPlugins plugins, + string pluginDirectory, + string? pluginName = null, + IPromptTemplateFactory? promptTemplateFactory = null) { - Verify.NotNull(kernel); - return kernel.RunAsync(new ContextVariables(), pipeline); + Verify.NotNull(plugins); + + plugins.Services.AddSingleton(services => + CreatePluginFromPromptDirectory(pluginDirectory, pluginName, promptTemplateFactory, services)); + + return plugins; } + #endregion + #region InvokePromptAsync /// - /// Run a pipeline composed of synchronous and asynchronous functions. + /// Invokes a prompt specified via a prompt template. /// - /// The kernel. - /// Input to process - /// List of functions - /// Result of the function composition - public static Task RunAsync( - this IKernel kernel, - string input, - params ISKFunction[] pipeline) + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function. + /// The arguments to pass to the function's invocation, including any . + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The result of the function's execution. + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. + /// The function failed to invoke successfully. + /// The 's invocation was canceled. + /// The to monitor for cancellation requests. The default is . + public static Task InvokePromptAsync( + this Kernel kernel, + string promptTemplate, + KernelArguments? arguments = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null, + CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - return kernel.RunAsync(new ContextVariables(input), pipeline); + Verify.NotNullOrWhiteSpace(promptTemplate); + + KernelFunction function = KernelFunctionFromPrompt.Create( + promptTemplate, + templateFormat: templateFormat, + promptTemplateFactory: promptTemplateFactory, + loggerFactory: kernel.LoggerFactory); + + return kernel.InvokeAsync(function, arguments, cancellationToken); } /// - /// Run a pipeline composed of synchronous and asynchronous functions. + /// Invokes a prompt specified via a prompt template and returns the results of type . /// - /// The kernel. - /// Input to process - /// List of functions - /// Result of the function composition - public static Task RunAsync( - this IKernel kernel, - ContextVariables variables, - params ISKFunction[] pipeline) + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function. + /// The arguments to pass to the function's invocation, including any . + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The to monitor for cancellation requests. The default is . + /// The of the function result value. + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. + /// The function failed to invoke successfully. + /// The 's invocation was canceled. + public static Task InvokePromptAsync( + this Kernel kernel, + string promptTemplate, + KernelArguments? arguments = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null, + CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - return kernel.RunAsync(variables, CancellationToken.None, pipeline); + Verify.NotNullOrWhiteSpace(promptTemplate); + + KernelFunction function = KernelFunctionFromPrompt.Create( + promptTemplate, + templateFormat: templateFormat, + promptTemplateFactory: promptTemplateFactory, + loggerFactory: kernel.LoggerFactory); + + return kernel.InvokeAsync(function, arguments, cancellationToken); } /// - /// Run a pipeline composed of synchronous and asynchronous functions. + /// Invokes a prompt specified via a prompt template and returns the results of type . /// - /// The kernel. + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function. + /// The arguments to pass to the function's invocation, including any . + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// + /// The of the function result value. + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. + /// The function failed to invoke successfully. + /// The 's invocation was canceled. + [EditorBrowsable(EditorBrowsableState.Never)] + public static Task InvokePromptAsync( + this Kernel kernel, + string promptTemplate, + KernelArguments? arguments, + string? templateFormat, + IPromptTemplateFactory? promptTemplateFactory) + { + return InvokePromptAsync( + kernel, + promptTemplate, + arguments, + templateFormat, + promptTemplateFactory, + CancellationToken.None); + } + #endregion + + #region InvokePromptStreamingAsync + /// + /// Invokes a prompt specified via a prompt template and streams its results. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function. + /// The arguments to pass to the function's invocation, including any . + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// /// The to monitor for cancellation requests. The default is . - /// List of functions - /// Result of the function composition - public static Task RunAsync( - this IKernel kernel, - CancellationToken cancellationToken, - params ISKFunction[] pipeline) + /// An for streaming the results of the function's invocation. + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public static IAsyncEnumerable InvokePromptStreamingAsync( + this Kernel kernel, + string promptTemplate, + KernelArguments? arguments = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null, + CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - return kernel.RunAsync(new ContextVariables(), cancellationToken, pipeline); + Verify.NotNullOrWhiteSpace(promptTemplate); + + KernelFunction function = KernelFunctionFromPrompt.Create( + promptTemplate, + templateFormat: templateFormat, + promptTemplateFactory: promptTemplateFactory, + loggerFactory: kernel.LoggerFactory); + + return function.InvokeStreamingAsync(kernel, arguments, cancellationToken); } /// - /// Run a pipeline composed of synchronous and asynchronous functions. + /// Invokes a prompt specified via a prompt template and streams its results of type . /// - /// The kernel. - /// Input to process + /// The containing services, plugins, and other state for use throughout the operation. + /// Prompt template for the function. + /// The arguments to pass to the function's invocation, including any . + /// The template format of . This must be provided if is not null. + /// + /// The to use when interpreting the into a . + /// If null, a default factory will be used. + /// /// The to monitor for cancellation requests. The default is . - /// List of functions - /// Result of the function composition - public static Task RunAsync( - this IKernel kernel, - string input, - CancellationToken cancellationToken, - params ISKFunction[] pipeline) + /// An for streaming the results of the function's invocation. + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. + /// + /// The function will not be invoked until an enumerator is retrieved from the returned + /// and its iteration initiated via an initial call to . + /// + public static IAsyncEnumerable InvokePromptStreamingAsync( + this Kernel kernel, + string promptTemplate, + KernelArguments? arguments = null, + string? templateFormat = null, + IPromptTemplateFactory? promptTemplateFactory = null, + CancellationToken cancellationToken = default) { Verify.NotNull(kernel); - return kernel.RunAsync(new ContextVariables(input), cancellationToken, pipeline); + Verify.NotNullOrWhiteSpace(promptTemplate); + + KernelFunction function = KernelFunctionFromPrompt.Create( + promptTemplate, + templateFormat: templateFormat, + promptTemplateFactory: promptTemplateFactory, + loggerFactory: kernel.LoggerFactory); + + return function.InvokeStreamingAsync(kernel, arguments, cancellationToken); + } + #endregion + + #region Build for IKernelBuilder + /// Constructs a new instance of using all of the settings configured on the builder. + /// The new instance. + /// + /// Every call to produces a new instance. The resulting + /// instances will not share the same plugins collection or services provider (unless there are no services). + /// + public static Kernel Build(this IKernelBuilder builder) + { + Verify.NotNull(builder); + + if (builder is KernelBuilder kb && !kb.AllowBuild) + { + throw new InvalidOperationException( + "Build is not permitted on instances returned from AddKernel. " + + "Resolve the Kernel from the service provider."); + } + + IServiceProvider serviceProvider = EmptyServiceProvider.Instance; + if (builder.Services is { Count: > 0 } services) + { + // This is a workaround for Microsoft.Extensions.DependencyInjection's GetKeyedServices not currently supporting + // enumerating all services for a given type regardless of key. + // https://github.com/dotnet/runtime/issues/91466 + // We need this support to, for example, allow IServiceSelector to pick from multiple named instances of an AI + // service based on their characteristics. Until that is addressed, we work around it by injecting as a service all + // of the keys used for a given type, such that Kernel can then query for this dictionary and enumerate it. This means + // that such functionality will work when KernelBuilder is used to build the kernel but not when the IServiceProvider + // is created via other means, such as if Kernel is directly created by DI. However, it allows us to create the APIs + // the way we want them for the longer term and then subsequently fix the implementation when M.E.DI is fixed. + Dictionary> typeToKeyMappings = new(); + foreach (ServiceDescriptor serviceDescriptor in services) + { + if (!typeToKeyMappings.TryGetValue(serviceDescriptor.ServiceType, out HashSet? keys)) + { + typeToKeyMappings[serviceDescriptor.ServiceType] = keys = new(); + } + + keys.Add(serviceDescriptor.ServiceKey); + } + services.AddKeyedSingleton(Kernel.KernelServiceTypeToKeyMappings, typeToKeyMappings); + + serviceProvider = services.BuildServiceProvider(); + } + + return new Kernel(serviceProvider); } + #endregion } diff --git a/dotnet/src/SemanticKernel.Core/Memory/MemoryBuilder.cs b/dotnet/src/SemanticKernel.Core/Memory/MemoryBuilder.cs new file mode 100644 index 000000000000..05c70271b78a --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/Memory/MemoryBuilder.cs @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Embeddings; + +namespace Microsoft.SemanticKernel.Memory; + +/// +/// A builder for Memory plugin. +/// +[Experimental("SKEXP0003")] +public sealed class MemoryBuilder +{ + private Func? _memoryStoreFactory = null; + private Func? _embeddingGenerationFactory = null; + private HttpClient? _httpClient; + private ILoggerFactory _loggerFactory = NullLoggerFactory.Instance; + + /// + /// Build a new instance of using the settings passed so far. + /// + /// Instance of . + public ISemanticTextMemory Build() + { + var memoryStore = this._memoryStoreFactory?.Invoke() ?? + throw new KernelException($"{nameof(IMemoryStore)} dependency was not provided. Use {nameof(WithMemoryStore)} method."); + + var embeddingGeneration = this._embeddingGenerationFactory?.Invoke() ?? + throw new KernelException($"{nameof(ITextEmbeddingGenerationService)} dependency was not provided. Use {nameof(WithTextEmbeddingGeneration)} method."); + + return new SemanticTextMemory(memoryStore, embeddingGeneration); + } + + /// + /// Add a logger factory. + /// + /// The to use for logging. If null, no logging will be performed. + /// Updated Memory builder including the logger factory. + public MemoryBuilder WithLoggerFactory(ILoggerFactory loggerFactory) + { + Verify.NotNull(loggerFactory); + this._loggerFactory = loggerFactory; + return this; + } + + /// + /// Add an HttpClient. + /// + /// to add. + /// Updated Memory builder including the client. + public MemoryBuilder WithHttpClient(HttpClient httpClient) + { + Verify.NotNull(httpClient); + this._httpClient = httpClient; + return this; + } + + /// + /// Add memory store. + /// + /// Store to add. + /// Updated Memory builder including the memory store. + public MemoryBuilder WithMemoryStore(IMemoryStore store) + { + Verify.NotNull(store); + this._memoryStoreFactory = () => store; + return this; + } + + /// + /// Add memory store factory. + /// + /// The store factory. + /// Updated Memory builder including the memory store. + public MemoryBuilder WithMemoryStore(Func factory) where TStore : IMemoryStore + { + Verify.NotNull(factory); + this._memoryStoreFactory = () => factory(this._loggerFactory); + return this; + } + + /// + /// Add memory store factory. + /// + /// The store factory. + /// Updated Memory builder including the memory store. + public MemoryBuilder WithMemoryStore(Func factory) where TStore : IMemoryStore + { + Verify.NotNull(factory); + this._memoryStoreFactory = () => factory(this._loggerFactory, this._httpClient); + return this; + } + + /// + /// Add text embedding generation. + /// + /// The text embedding generation. + /// Updated Memory builder including the text embedding generation. + public MemoryBuilder WithTextEmbeddingGeneration(ITextEmbeddingGenerationService textEmbeddingGeneration) + { + Verify.NotNull(textEmbeddingGeneration); + this._embeddingGenerationFactory = () => textEmbeddingGeneration; + return this; + } + + /// + /// Add text embedding generation. + /// + /// The text embedding generation factory. + /// Updated Memory builder including the text embedding generation. + public MemoryBuilder WithTextEmbeddingGeneration( + Func factory) where TEmbeddingGeneration : ITextEmbeddingGenerationService + { + Verify.NotNull(factory); + this._embeddingGenerationFactory = () => factory(this._loggerFactory, this._httpClient); + return this; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Memory/MemoryConfiguration.cs b/dotnet/src/SemanticKernel.Core/Memory/MemoryConfiguration.cs deleted file mode 100644 index fb08aeb3cb93..000000000000 --- a/dotnet/src/SemanticKernel.Core/Memory/MemoryConfiguration.cs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Diagnostics.CodeAnalysis; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Memory; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of IKernel -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Kernel extension to configure the semantic memory with custom settings -/// -public static class MemoryConfiguration -{ - /// - /// Set the semantic memory to use the given memory storage and embeddings service. - /// - /// Kernel instance - /// Memory storage - /// Kernel service id for embedding generation - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static void UseMemory(this IKernel kernel, IMemoryStore storage, string? embeddingsServiceId = null) - { - var embeddingGenerator = kernel.GetService(embeddingsServiceId); - - UseMemory(kernel, embeddingGenerator, storage); - } - - /// - /// Set the semantic memory to use the given memory storage and embedding generator. - /// - /// Kernel instance - /// Embedding generator - /// Memory storage - [SuppressMessage("Reliability", "CA2000:Dispose objects before losing scope", Justification = "The embeddingGenerator object is disposed by the kernel")] - [Obsolete("Memory functionality will be placed in separate Microsoft.SemanticKernel.Plugins.Memory package. This will be removed in a future release. See sample dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs in the semantic-kernel repository.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public static void UseMemory(this IKernel kernel, ITextEmbeddingGeneration embeddingGenerator, IMemoryStore storage) - { - Verify.NotNull(storage); - Verify.NotNull(embeddingGenerator); - - kernel.RegisterMemory(new SemanticTextMemory(storage, embeddingGenerator)); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Memory/SemanticTextMemory.cs b/dotnet/src/SemanticKernel.Core/Memory/SemanticTextMemory.cs index 32fae7dba086..d66523109d63 100644 --- a/dotnet/src/SemanticKernel.Core/Memory/SemanticTextMemory.cs +++ b/dotnet/src/SemanticKernel.Core/Memory/SemanticTextMemory.cs @@ -2,11 +2,12 @@ using System; using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI.Embeddings; +using Microsoft.SemanticKernel.Embeddings; namespace Microsoft.SemanticKernel.Memory; @@ -14,9 +15,10 @@ namespace Microsoft.SemanticKernel.Memory; /// Implementation of . Provides methods to save, retrieve, and search for text information /// in a semantic memory store. /// +[Experimental("SKEXP0003")] public sealed class SemanticTextMemory : ISemanticTextMemory { - private readonly ITextEmbeddingGeneration _embeddingGenerator; + private readonly ITextEmbeddingGenerationService _embeddingGenerator; private readonly IMemoryStore _storage; /// @@ -26,7 +28,7 @@ public sealed class SemanticTextMemory : ISemanticTextMemory /// The text embedding generator to use for generating embeddings. public SemanticTextMemory( IMemoryStore storage, - ITextEmbeddingGeneration embeddingGenerator) + ITextEmbeddingGenerationService embeddingGenerator) { this._embeddingGenerator = embeddingGenerator; this._storage = storage; @@ -39,9 +41,10 @@ public async Task SaveInformationAsync( string id, string? description = null, string? additionalMetadata = null, + Kernel? kernel = null, CancellationToken cancellationToken = default) { - var embedding = await this._embeddingGenerator.GenerateEmbeddingAsync(text, cancellationToken).ConfigureAwait(false); + var embedding = await this._embeddingGenerator.GenerateEmbeddingAsync(text, kernel, cancellationToken).ConfigureAwait(false); MemoryRecord data = MemoryRecord.LocalRecord( id: id, text: text, description: description, additionalMetadata: additionalMetadata, embedding: embedding); @@ -61,9 +64,10 @@ public async Task SaveReferenceAsync( string externalSourceName, string? description = null, string? additionalMetadata = null, + Kernel? kernel = null, CancellationToken cancellationToken = default) { - var embedding = await this._embeddingGenerator.GenerateEmbeddingAsync(text, cancellationToken).ConfigureAwait(false); + var embedding = await this._embeddingGenerator.GenerateEmbeddingAsync(text, kernel, cancellationToken).ConfigureAwait(false); var data = MemoryRecord.ReferenceRecord(externalId: externalId, sourceName: externalSourceName, description: description, additionalMetadata: additionalMetadata, embedding: embedding); @@ -80,6 +84,7 @@ public async Task SaveReferenceAsync( string collection, string key, bool withEmbedding = false, + Kernel? kernel = null, CancellationToken cancellationToken = default) { MemoryRecord? record = await this._storage.GetAsync(collection, key, withEmbedding, cancellationToken).ConfigureAwait(false); @@ -93,6 +98,7 @@ public async Task SaveReferenceAsync( public async Task RemoveAsync( string collection, string key, + Kernel? kernel = null, CancellationToken cancellationToken = default) { await this._storage.RemoveAsync(collection, key, cancellationToken).ConfigureAwait(false); @@ -105,9 +111,10 @@ public async IAsyncEnumerable SearchAsync( int limit = 1, double minRelevanceScore = 0.0, bool withEmbeddings = false, + Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { - ReadOnlyMemory queryEmbedding = await this._embeddingGenerator.GenerateEmbeddingAsync(query, cancellationToken).ConfigureAwait(false); + ReadOnlyMemory queryEmbedding = await this._embeddingGenerator.GenerateEmbeddingAsync(query, kernel, cancellationToken).ConfigureAwait(false); IAsyncEnumerable<(MemoryRecord, double)> results = this._storage.GetNearestMatchesAsync( collectionName: collection, @@ -124,7 +131,7 @@ public async IAsyncEnumerable SearchAsync( } /// - public async Task> GetCollectionsAsync(CancellationToken cancellationToken = default) + public async Task> GetCollectionsAsync(Kernel? kernel = null, CancellationToken cancellationToken = default) { return await this._storage.GetCollectionsAsync(cancellationToken).ToListAsync(cancellationToken).ConfigureAwait(false); } diff --git a/dotnet/src/SemanticKernel.Core/Orchestration/ContextVariablesConverter.cs b/dotnet/src/SemanticKernel.Core/Orchestration/ContextVariablesConverter.cs deleted file mode 100644 index 6dfbdaffd5f5..000000000000 --- a/dotnet/src/SemanticKernel.Core/Orchestration/ContextVariablesConverter.cs +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Converter for to/from JSON. -/// -public class ContextVariablesConverter : JsonConverter -{ - /// - /// Read the JSON and convert to ContextVariables. - /// - /// The JSON reader. - /// The type to convert. - /// The JSON serializer options. - /// The deserialized . - public override ContextVariables Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - var keyValuePairs = JsonSerializer.Deserialize>>(ref reader, options); - var context = new ContextVariables(); - - foreach (var kvp in keyValuePairs!) - { - if (string.IsNullOrWhiteSpace(kvp.Key)) - { - // Json deserialization behaves differently in different versions of .NET. In some cases, the above "Deserialize" call - // throws on a null key, and in others it does not. This check is to ensure that we throw in all cases. - throw new JsonException("'Key' property cannot be null or empty."); - } - - context.Set(kvp.Key, kvp.Value); - } - - return context; - } - - /// - /// Write the ContextVariables to JSON. - /// - /// The JSON writer. - /// The to write. - /// The JSON serializer options. - public override void Write(Utf8JsonWriter writer, ContextVariables value, JsonSerializerOptions options) - { - writer.WriteStartArray(); - - foreach (var kvp in value) - { - writer.WriteStartObject(); - writer.WriteString("Key", kvp.Key); - writer.WriteString("Value", kvp.Value); - writer.WriteEndObject(); - } - - writer.WriteEndArray(); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Orchestration/FunctionRunner.cs b/dotnet/src/SemanticKernel.Core/Orchestration/FunctionRunner.cs deleted file mode 100644 index bb89b166f867..000000000000 --- a/dotnet/src/SemanticKernel.Core/Orchestration/FunctionRunner.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Linq; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Orchestration; - -/// -/// Function runner implementation. -/// -internal class FunctionRunner : IFunctionRunner -{ - private readonly IKernel _kernel; - - /// - /// Initializes a new instance of the class. - /// - /// The kernel instance. - public FunctionRunner(IKernel kernel) - { - this._kernel = kernel; - } - - /// - public async Task RunAsync(ISKFunction skFunction, ContextVariables? variables = null, CancellationToken cancellationToken = default) - { - return (await this._kernel.RunAsync(skFunction, variables, cancellationToken).ConfigureAwait(false)) - .FunctionResults.First(); - } - - /// - public Task RunAsync(string pluginName, string functionName, ContextVariables? variables = null, CancellationToken cancellationToken = default) - { - var function = this._kernel.Functions.GetFunction(pluginName, functionName); - return this.RunAsync(function, variables, cancellationToken); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Planning/IPlan.cs b/dotnet/src/SemanticKernel.Core/Planning/IPlan.cs deleted file mode 100644 index e4437ef1e27d..000000000000 --- a/dotnet/src/SemanticKernel.Core/Planning/IPlan.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Planning; - -/// -/// Interface for standard Semantic Kernel callable plan. -/// -[Obsolete("This interface is obsoleted, use ISKFunction interface instead")] -public interface IPlan : ISKFunction -{ -} diff --git a/dotnet/src/SemanticKernel.Core/Planning/InstrumentedPlan.cs b/dotnet/src/SemanticKernel.Core/Planning/InstrumentedPlan.cs deleted file mode 100644 index 83c087f6f7aa..000000000000 --- a/dotnet/src/SemanticKernel.Core/Planning/InstrumentedPlan.cs +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Planning; - -/// -/// Standard Semantic Kernel callable plan with instrumentation. -/// -internal sealed class InstrumentedPlan : ISKFunction -{ - /// - public string Name => this._plan.Name; - - /// - public string PluginName => this._plan.PluginName; - - /// - public string Description => this._plan.Description; - - /// - /// Initialize a new instance of the class. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public InstrumentedPlan( - ISKFunction plan, - ILoggerFactory? loggerFactory = null) - { - this._plan = plan; - this._logger = loggerFactory is not null ? loggerFactory.CreateLogger(typeof(InstrumentedPlan)) : NullLogger.Instance; - } - - /// - public FunctionView Describe() - { - return this._plan.Describe(); - } - - /// - public async Task InvokeAsync( - SKContext context, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - return await this.InvokeWithInstrumentationAsync(() => - this._plan.InvokeAsync(context, requestSettings, cancellationToken)).ConfigureAwait(false); - } - - #region private ================================================================================ - - private readonly ISKFunction _plan; - private readonly ILogger _logger; - - /// - /// Instance of for plan-related metrics. - /// - private static readonly Meter s_meter = new(typeof(Plan).FullName); - - /// - /// Instance of to measure and track the time of plan execution. - /// - private static readonly Histogram s_executionTimeHistogram = - s_meter.CreateHistogram( - name: "SK.Plan.Execution.ExecutionTime", - unit: "ms", - description: "Duration of plan execution"); - - /// - /// Instance of to keep track of the total number of plan executions. - /// - private static readonly Counter s_executionTotalCounter = - s_meter.CreateCounter( - name: "SK.Plan.Execution.ExecutionTotal", - description: "Total number of plan executions"); - - /// - /// Instance of to keep track of the number of successful plan executions. - /// - private static readonly Counter s_executionSuccessCounter = - s_meter.CreateCounter( - name: "SK.Plan.Execution.ExecutionSuccess", - description: "Number of successful plan executions"); - - /// - /// Instance of to keep track of the number of failed plan executions. - /// - private static readonly Counter s_executionFailureCounter = - s_meter.CreateCounter( - name: "SK.Plan.Execution.ExecutionFailure", - description: "Number of failed plan executions"); - - /// - /// Wrapper for instrumentation to be used in multiple invocation places. - /// - /// Delegate to instrument. - private async Task InvokeWithInstrumentationAsync(Func> func) - { - this._logger.LogInformation("Plan execution started."); - - var stopwatch = new Stopwatch(); - stopwatch.Start(); - - FunctionResult result; - - try - { - result = await func().ConfigureAwait(false); - } - catch (Exception ex) - { - this._logger.LogWarning("Plan execution status: {Status}", "Failed"); - this._logger.LogError(ex, "Plan execution exception details: {Message}", ex.Message); - - s_executionFailureCounter.Add(1); - throw; - } - finally - { - stopwatch.Stop(); - s_executionTotalCounter.Add(1); - s_executionTimeHistogram.Record(stopwatch.ElapsedMilliseconds); - } - - this._logger.LogInformation("Plan execution status: {Status}", "Success"); - this._logger.LogInformation("Plan execution finished in {ExecutionTime}ms", stopwatch.ElapsedMilliseconds); - - s_executionSuccessCounter.Add(1); - - return result; - } - - #endregion - - #region Obsolete ======================================================================= - - /// - [Obsolete("Use ISKFunction.RequestSettingsFactory instead. This will be removed in a future release.")] - public AIRequestSettings? RequestSettings => this._plan.RequestSettings; - - /// - [Obsolete("Use ISKFunction.SetAIRequestSettingsFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIConfiguration(AIRequestSettings? requestSettings) => - this._plan.SetAIConfiguration(requestSettings); - - /// - [Obsolete("Use ISKFunction.SetAIServiceFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIService(Func serviceFactory) => - this._plan.SetAIService(serviceFactory); - - /// - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use ISKFunction.PluginName instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public string SkillName => this._plan.PluginName; - - /// - [Obsolete("Kernel no longer differentiates between Semantic and Native functions. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public bool IsSemantic => this._plan.IsSemantic; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultSkillCollection(IReadOnlyFunctionCollection skills) => this; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultFunctionCollection(IReadOnlyFunctionCollection functions) => this; - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/Planning/KernelPlanExtensions.cs b/dotnet/src/SemanticKernel.Core/Planning/KernelPlanExtensions.cs deleted file mode 100644 index 6918130cfed9..000000000000 --- a/dotnet/src/SemanticKernel.Core/Planning/KernelPlanExtensions.cs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Planning; - -#pragma warning disable IDE0130 -// ReSharper disable once CheckNamespace - Using NS of IKernel -namespace Microsoft.SemanticKernel; -#pragma warning restore IDE0130 - -/// -/// Extension methods for running plans using a kernel -/// -public static class KernelPlanExtensions -{ - /// - /// Import a plan into the kernel - /// - /// Kernel instance to use - /// Plan to import - /// Function definition for the plan - public static ISKFunction ImportPlan(this IKernel kernel, Plan plan) - { - return kernel.RegisterCustomFunction(plan); - } - - /// - /// Import a plan into the kernel - /// - /// Kernel instance to use - /// Json representation of the plan - /// Function definition for the plan - public static ISKFunction ImportPlanFromJson(this IKernel kernel, string json) - { - return kernel.RegisterCustomFunction(Plan.FromJson(json, kernel.Functions)); - } - - /// - /// Run the next step in a plan asynchronously - /// - /// Kernel instance to use - /// Plan to run - /// The to monitor for cancellation requests. The default is . - /// Result of the plan execution - public static Task StepAsync(this IKernel kernel, Plan plan, CancellationToken cancellationToken = default) - { - return kernel.StepAsync(plan.State, plan, cancellationToken); - } - - /// - /// Run the next step in a plan asynchronously - /// - /// Kernel instance to use - /// Input to use - /// Plan to run - /// The to monitor for cancellation requests. The default is . - public static Task StepAsync(this IKernel kernel, string input, Plan plan, CancellationToken cancellationToken = default) - { - return kernel.StepAsync(new ContextVariables(input), plan, cancellationToken); - } - - /// - /// Run the next step in a plan asynchronously - /// - /// Kernel instance to use - /// Input to process - /// Plan to run - /// The to monitor for cancellation requests. The default is . - /// Result of the plan execution - public static Task StepAsync(this IKernel kernel, ContextVariables variables, Plan plan, CancellationToken cancellationToken = default) - { - return plan.RunNextStepAsync(kernel, variables, cancellationToken); - } -} diff --git a/dotnet/src/SemanticKernel.Core/Planning/Plan.cs b/dotnet/src/SemanticKernel.Core/Planning/Plan.cs deleted file mode 100644 index ba64d6aaa3bc..000000000000 --- a/dotnet/src/SemanticKernel.Core/Planning/Plan.cs +++ /dev/null @@ -1,669 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics; -using System.Linq; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Text.RegularExpressions; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.Planning; - -/// -/// Standard Semantic Kernel callable plan. -/// Plan is used to create trees of s. -/// -[DebuggerDisplay("{DebuggerDisplay,nq}")] -public sealed class Plan : ISKFunction -{ - /// - /// State of the plan - /// - [JsonPropertyName("state")] - [JsonConverter(typeof(ContextVariablesConverter))] - public ContextVariables State { get; } = new(); - - /// - /// Steps of the plan - /// - [JsonPropertyName("steps")] - public IReadOnlyList Steps => this._steps.AsReadOnly(); - - /// - /// Parameters for the plan, used to pass information to the next step - /// - [JsonPropertyName("parameters")] - [JsonConverter(typeof(ContextVariablesConverter))] - public ContextVariables Parameters { get; set; } = new(); - - /// - /// Outputs for the plan, used to pass information to the caller - /// - [JsonPropertyName("outputs")] - public IList Outputs { get; set; } = new List(); - - /// - /// Gets whether the plan has a next step. - /// - [JsonIgnore] - public bool HasNextStep => this.NextStepIndex < this.Steps.Count; - - /// - /// Gets the next step index. - /// - [JsonPropertyName("next_step_index")] - public int NextStepIndex { get; private set; } - - #region ISKFunction implementation - - /// - [JsonPropertyName("name")] - public string Name { get; set; } = string.Empty; - - /// - [JsonPropertyName("plugin_name")] - public string PluginName { get; set; } = string.Empty; - - /// - [JsonPropertyName("description")] - public string Description { get; set; } = string.Empty; - - /// - [JsonPropertyName("model_settings")] - public List? ModelSettings { get; private set; } - - #endregion ISKFunction implementation - - /// - /// Initializes a new instance of the class with a goal description. - /// - /// The goal of the plan used as description. - public Plan(string goal) - { - this.Name = GetRandomPlanName(); - this.Description = goal; - this.PluginName = nameof(Plan); - } - - /// - /// Initializes a new instance of the class with a goal description and steps. - /// - /// The goal of the plan used as description. - /// The steps to add. - public Plan(string goal, params ISKFunction[] steps) : this(goal) - { - this.AddSteps(steps); - } - - /// - /// Initializes a new instance of the class with a goal description and steps. - /// - /// The goal of the plan used as description. - /// The steps to add. - public Plan(string goal, params Plan[] steps) : this(goal) - { - this.AddSteps(steps); - } - - /// - /// Initializes a new instance of the class with a function. - /// - /// The function to execute. - public Plan(ISKFunction function) - { - this.SetFunction(function); - } - - /// - /// Initializes a new instance of the class with a function and steps. - /// - /// The name of the plan. - /// The name of the plugin. - /// The description of the plan. - /// The index of the next step. - /// The state of the plan. - /// The parameters of the plan. - /// The outputs of the plan. - /// The steps of the plan. - [JsonConstructor] - public Plan( - string name, - string pluginName, - string description, - int nextStepIndex, - ContextVariables state, - ContextVariables parameters, - IList outputs, - IReadOnlyList steps) - { - this.Name = name; - this.PluginName = pluginName; - this.Description = description; - this.NextStepIndex = nextStepIndex; - this.State = state; - this.Parameters = parameters; - this.Outputs = outputs; - this._steps.Clear(); - this.AddSteps(steps.ToArray()); - } - - /// - /// Deserialize a JSON string into a Plan object. - /// TODO: the context should never be null, it's required internally - /// - /// JSON string representation of a Plan - /// The collection of available functions.. - /// Whether to require functions to be registered. Only used when context is not null. - /// An instance of a Plan object. - /// If Context is not supplied, plan will not be able to execute. - public static Plan FromJson(string json, IReadOnlyFunctionCollection? functions = null, bool requireFunctions = true) - { - var plan = JsonSerializer.Deserialize(json, new JsonSerializerOptions { IncludeFields = true }) ?? new Plan(string.Empty); - - if (functions != null) - { - plan = SetAvailableFunctions(plan, functions, requireFunctions); - } - - return plan; - } - - /// - /// Get JSON representation of the plan. - /// - /// Whether to emit indented JSON - /// Plan serialized using JSON format - public string ToJson(bool indented = false) - { - return JsonSerializer.Serialize(this, new JsonSerializerOptions { WriteIndented = indented }); - } - - /// - /// Adds one or more existing plans to the end of the current plan as steps. - /// - /// The plans to add as steps to the current plan. - /// - /// When you add a plan as a step to the current plan, the steps of the added plan are executed after the steps of the current plan have completed. - /// - public void AddSteps(params Plan[] steps) - { - this._steps.AddRange(steps); - } - - /// - /// Adds one or more new steps to the end of the current plan. - /// - /// The steps to add to the current plan. - /// - /// When you add a new step to the current plan, it is executed after the previous step in the plan has completed. Each step can be a function call or another plan. - /// - public void AddSteps(params ISKFunction[] steps) - { - this._steps.AddRange(steps.Select(step => step is Plan plan ? plan : new Plan(step))); - } - - /// - /// Runs the next step in the plan using the provided kernel instance and variables. - /// - /// The kernel instance to use for executing the plan. - /// The variables to use for the execution of the plan. - /// The to monitor for cancellation requests. The default is . - /// A task representing the asynchronous execution of the plan's next step. - /// - /// This method executes the next step in the plan using the specified kernel instance and context variables. - /// The context variables contain the necessary information for executing the plan, such as the functions and logger. - /// The method returns a task representing the asynchronous execution of the plan's next step. - /// - public Task RunNextStepAsync(IKernel kernel, ContextVariables variables, CancellationToken cancellationToken = default) - { - var context = kernel.CreateNewContext(variables); - - return this.InvokeNextStepAsync(context, cancellationToken); - } - - /// - /// Invoke the next step of the plan - /// - /// Context to use - /// The to monitor for cancellation requests. The default is . - /// The updated plan - /// If an error occurs while running the plan - public async Task InvokeNextStepAsync(SKContext context, CancellationToken cancellationToken = default) - { - if (this.HasNextStep) - { - var step = this.Steps[this.NextStepIndex]; - - // Merge the state with the current context variables for step execution - var functionVariables = this.GetNextStepVariables(context.Variables, step); - - // Execute the step - var result = await context.Runner.RunAsync(step, functionVariables, cancellationToken).ConfigureAwait(false); - - var resultValue = result.Context.Result.Trim(); - - #region Update State - - // Update state with result - this.State.Update(resultValue); - - // Update Plan Result in State with matching outputs (if any) - if (this.Outputs.Intersect(step.Outputs).Any()) - { - if (this.State.TryGetValue(DefaultResultKey, out string? currentPlanResult)) - { - this.State.Set(DefaultResultKey, $"{currentPlanResult}\n{resultValue}"); - } - else - { - this.State.Set(DefaultResultKey, resultValue); - } - } - - // Update state with outputs (if any) - foreach (var item in step.Outputs) - { - if (result.Context.Variables.TryGetValue(item, out string? val)) - { - this.State.Set(item, val); - } - else - { - this.State.Set(item, resultValue); - } - } - - #endregion Update State - - this.NextStepIndex++; - } - - return this; - } - - #region ISKFunction implementation - - /// - public FunctionView Describe() - { - if (this.Function is not null) - { - return this.Function.Describe(); - } - - // The parameter mapping definitions from Plan -> Function - var stepParameters = this.Steps.SelectMany(s => s.Parameters); - - // The parameter descriptions from the Function - var stepDescriptions = this.Steps.SelectMany(s => s.Describe().Parameters); - - // The parameters for the Plan - var parameters = this.Parameters.Select(p => - { - var matchingParameter = stepParameters.FirstOrDefault(sp => sp.Value.Equals($"${p.Key}", StringComparison.OrdinalIgnoreCase)); - var stepDescription = stepDescriptions.FirstOrDefault(sd => sd.Name.Equals(matchingParameter.Key, StringComparison.OrdinalIgnoreCase)); - - return new ParameterView(p.Key, stepDescription?.Description, stepDescription?.DefaultValue, stepDescription?.Type, stepDescription?.IsRequired); - } - ).ToList(); - - return new(this.Name, this.PluginName, this.Description, parameters); - } - - /// - public async Task InvokeAsync( - SKContext context, - AIRequestSettings? requestSettings = null, - CancellationToken cancellationToken = default) - { - var result = new FunctionResult(this.Name, this.PluginName, context); - - if (this.Function is not null) - { - // Merge state with the current context variables. - // Then filter the variables to only those needed for the next step. - // This is done to prevent the function from having access to variables that it shouldn't. - AddVariablesToContext(this.State, context); - var functionVariables = this.GetNextStepVariables(context.Variables, this); - var functionContext = context.Clone(functionVariables, context.Functions); - - // Execute the step - result = await this.Function - .WithInstrumentation(context.LoggerFactory) - .InvokeAsync(functionContext, requestSettings, cancellationToken) - .ConfigureAwait(false); - this.UpdateFunctionResultWithOutputs(result); - } - else - { - // loop through steps and execute until completion - while (this.HasNextStep) - { - AddVariablesToContext(this.State, context); - await this.InvokeNextStepAsync(context, cancellationToken).ConfigureAwait(false); - this.UpdateContextWithOutputs(context); - - result = new FunctionResult(this.Name, this.PluginName, context, context.Result); - this.UpdateFunctionResultWithOutputs(result); - } - } - - return result; - } - - #endregion ISKFunction implementation - - /// - /// Expand variables in the input string. - /// - /// Variables to use for expansion. - /// Input string to expand. - /// Expanded string. - internal string ExpandFromVariables(ContextVariables variables, string input) - { - var result = input; - var matches = s_variablesRegex.Matches(input); - var orderedMatches = matches.Cast().Select(m => m.Groups["var"].Value).Distinct().OrderByDescending(m => m.Length); - - foreach (var varName in orderedMatches) - { - if (variables.TryGetValue(varName, out string? value) || this.State.TryGetValue(varName, out value)) - { - result = result.Replace($"${varName}", value); - } - } - - return result; - } - - /// - /// Set functions for a plan and its steps. - /// - /// Plan to set functions for. - /// The collection of available functions. - /// Whether to throw an exception if a function is not found. - /// The plan with functions set. - private static Plan SetAvailableFunctions(Plan plan, IReadOnlyFunctionCollection functions, bool requireFunctions = true) - { - if (plan.Steps.Count == 0) - { - Verify.NotNull(functions); - - if (functions.TryGetFunction(plan.PluginName, plan.Name, out var planFunction)) - { - plan.SetFunction(planFunction); - } - else if (requireFunctions) - { - throw new SKException($"Function '{plan.PluginName}.{plan.Name}' not found in function collection"); - } - } - else - { - foreach (var step in plan.Steps) - { - SetAvailableFunctions(step, functions, requireFunctions); - } - } - - return plan; - } - - /// - /// Add any missing variables from a plan state variables to the context. - /// - private static void AddVariablesToContext(ContextVariables vars, SKContext context) - { - // Loop through vars and add anything missing to context - foreach (var item in vars) - { - if (!context.Variables.TryGetValue(item.Key, out string? value) || string.IsNullOrEmpty(value)) - { - context.Variables.Set(item.Key, item.Value); - } - } - } - - /// - /// Update the context with the outputs from the current step. - /// - /// The context to update. - /// The updated context. - private SKContext UpdateContextWithOutputs(SKContext context) - { - var resultString = this.State.TryGetValue(DefaultResultKey, out string? result) ? result : this.State.ToString(); - context.Variables.Update(resultString); - - // copy previous step's variables to the next step - foreach (var item in this._steps[this.NextStepIndex - 1].Outputs) - { - if (this.State.TryGetValue(item, out string? val)) - { - context.Variables.Set(item, val); - } - else - { - context.Variables.Set(item, resultString); - } - } - - return context; - } - - /// - /// Update the function result with the outputs from the current state. - /// - /// The function result to update. - /// The updated function result. - private FunctionResult UpdateFunctionResultWithOutputs(FunctionResult functionResult) - { - foreach (var output in this.Outputs) - { - if (this.State.TryGetValue(output, out var value)) - { - functionResult.Metadata[output] = value; - } - else if (functionResult.Context.Variables.TryGetValue(output, out var val)) - { - functionResult.Metadata[output] = val; - } - } - - return functionResult; - } - - /// - /// Get the variables for the next step in the plan. - /// - /// The current context variables. - /// The next step in the plan. - /// The context variables for the next step in the plan. - private ContextVariables GetNextStepVariables(ContextVariables variables, Plan step) - { - // Priority for Input - // - Parameters (expand from variables if needed) - // - SKContext.Variables - // - Plan.State - // - Empty if sending to another plan - // - Plan.Description - - var input = string.Empty; - if (!string.IsNullOrEmpty(step.Parameters.Input)) - { - input = this.ExpandFromVariables(variables, step.Parameters.Input!); - } - else if (!string.IsNullOrEmpty(variables.Input)) - { - input = variables.Input; - } - else if (!string.IsNullOrEmpty(this.State.Input)) - { - input = this.State.Input; - } - else if (step.Steps.Count > 0) - { - input = string.Empty; - } - else if (!string.IsNullOrEmpty(this.Description)) - { - input = this.Description; - } - - var stepVariables = new ContextVariables(input); - - // Priority for remaining stepVariables is: - // - Function Parameters (pull from variables or state by a key value) - // - Step Parameters (pull from variables or state by a key value) - // - All other variables. These are carried over in case the function wants access to the ambient content. - var functionParameters = step.Describe(); - foreach (var param in functionParameters.Parameters) - { - if (param.Name.Equals(ContextVariables.MainKey, StringComparison.OrdinalIgnoreCase)) - { - continue; - } - - if (variables.TryGetValue(param.Name, out string? value)) - { - stepVariables.Set(param.Name, value); - } - else if (this.State.TryGetValue(param.Name, out value) && !string.IsNullOrEmpty(value)) - { - stepVariables.Set(param.Name, value); - } - } - - foreach (var item in step.Parameters) - { - // Don't overwrite variable values that are already set - if (stepVariables.ContainsKey(item.Key)) - { - continue; - } - - var expandedValue = this.ExpandFromVariables(variables, item.Value); - if (!expandedValue.Equals(item.Value, StringComparison.OrdinalIgnoreCase)) - { - stepVariables.Set(item.Key, expandedValue); - } - else if (variables.TryGetValue(item.Key, out string? value)) - { - stepVariables.Set(item.Key, value); - } - else if (this.State.TryGetValue(item.Key, out value)) - { - stepVariables.Set(item.Key, value); - } - else - { - stepVariables.Set(item.Key, expandedValue); - } - } - - foreach (KeyValuePair item in variables) - { - if (!stepVariables.ContainsKey(item.Key)) - { - stepVariables.Set(item.Key, item.Value); - } - } - - return stepVariables; - } - - private void SetFunction(ISKFunction function) - { - this.Function = function; - this.Name = function.Name; - this.PluginName = function.PluginName; - this.Description = function.Description; - -#pragma warning disable CS0618 // Type or member is obsolete - this.RequestSettings = function.RequestSettings; - this.IsSemantic = function.IsSemantic; -#pragma warning restore CS0618 // Type or member is obsolete - } - - private static string GetRandomPlanName() => "plan" + Guid.NewGuid().ToString("N"); - - private ISKFunction? Function { get; set; } - - private readonly List _steps = new(); - - private static readonly Regex s_variablesRegex = new(@"\$(?\w+)"); - - private const string DefaultResultKey = "PLAN.RESULT"; - - [DebuggerBrowsable(DebuggerBrowsableState.Never)] - private string DebuggerDisplay - { - get - { - string display = this.Description; - - if (!string.IsNullOrWhiteSpace(this.Name)) - { - display = $"{this.Name} ({display})"; - } - - if (this._steps.Count > 0) - { - display += $", Steps = {this._steps.Count}, NextStep = {this.NextStepIndex}"; - } - - return display; - } - } - - #region Obsolete - - /// - [Obsolete("Use ISKFunction.ModelSettings instead. This will be removed in a future release.")] - public AIRequestSettings? RequestSettings { get; private set; } - - /// - [Obsolete("Use ISKFunction.SetAIServiceFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIService(Func serviceFactory) - { - return this.Function is not null ? this.Function.SetAIService(serviceFactory) : this; - } - - /// - [Obsolete("Use ISKFunction.SetAIRequestSettingsFactory instead. This will be removed in a future release.")] - public ISKFunction SetAIConfiguration(AIRequestSettings? requestSettings) - { - return this.Function is not null ? this.Function.SetAIConfiguration(requestSettings) : this; - } - - /// - [JsonIgnore] - [Obsolete("Methods, properties and classes which include Skill in the name have been renamed. Use ISKFunction.PluginName instead. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public string SkillName => this.PluginName; - - /// - [JsonIgnore] - [Obsolete("Kernel no longer differentiates between Semantic and Native functions. This will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public bool IsSemantic { get; private set; } - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultSkillCollection(IReadOnlyFunctionCollection skills) => this; - - /// - [Obsolete("This method is a nop and will be removed in a future release.")] - [EditorBrowsable(EditorBrowsableState.Never)] - public ISKFunction SetDefaultFunctionCollection(IReadOnlyFunctionCollection functions) => this; - - #endregion -} diff --git a/dotnet/src/SemanticKernel.Core/Planning/PlanExtensions.cs b/dotnet/src/SemanticKernel.Core/Planning/PlanExtensions.cs deleted file mode 100644 index 456cf87a0799..000000000000 --- a/dotnet/src/SemanticKernel.Core/Planning/PlanExtensions.cs +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Linq; -using Microsoft.Extensions.Logging; - -namespace Microsoft.SemanticKernel.Planning; - -/// -/// Extension methods for type. -/// -public static class PlanExtensions -{ - /// - /// Constructs string representation of without sensitive data. - /// - /// Instance of for string construction. - /// Optional indentation. - public static string ToSafePlanString(this Plan plan, string indent = " ") - { - string planString = string.Join("\n", plan.Steps.Select(step => - { - if (step.Steps.Count == 0) - { - string pluginName = step.PluginName; - string stepName = step.Name; - - return $"{indent}{indent}- {string.Join(".", pluginName, stepName)}"; - } - - return step.ToSafePlanString(indent + indent); - })); - - return planString; - } - - /// - /// Constructs string representation of . - /// - /// Instance of for string construction. - /// Optional indentation. - public static string ToPlanString(this Plan plan, string indent = " ") - { - string planString = string.Join("\n", plan.Steps.Select(step => - { - if (step.Steps.Count == 0) - { - string pluginName = step.PluginName; - string stepName = step.Name; - - string parameters = string.Join(" ", step.Parameters.Select(param => $"{param.Key}='{param.Value}'")); - if (!string.IsNullOrEmpty(parameters)) - { - parameters = $" {parameters}"; - } - - string? outputs = step.Outputs.FirstOrDefault(); - if (!string.IsNullOrEmpty(outputs)) - { - outputs = $" => {outputs}"; - } - - return $"{indent}{indent}- {string.Join(".", pluginName, stepName)}{parameters}{outputs}"; - } - - return step.ToPlanString(indent + indent); - })); - - return planString; - } - - /// - /// Returns decorated instance of with enabled instrumentation. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - [Obsolete("Use concrete class Plan WithInstrumentation instead")] - public static IPlan WithInstrumentation(this IPlan plan, ILoggerFactory? loggerFactory = null) - { - throw new NotSupportedException("This method is obsolete, use concrete class Plan WithInstrumentation instead"); - } - - /// - /// Returns decorated instance of with plan enabled instrumentation. - /// - /// Instance of to decorate. - /// The to use for logging. If null, no logging will be performed. - public static ISKFunction WithInstrumentation(this Plan plan, ILoggerFactory? loggerFactory = null) - { - return new InstrumentedPlan(plan, loggerFactory); - } -} diff --git a/dotnet/src/SemanticKernel.Core/PromptTemplate/AggregatorPromptTemplateFactory.cs b/dotnet/src/SemanticKernel.Core/PromptTemplate/AggregatorPromptTemplateFactory.cs new file mode 100644 index 000000000000..f088e356d25f --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/PromptTemplate/AggregatorPromptTemplateFactory.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides a which aggregates multiple prompt template factories. +/// +/// +/// Attempts via to create an from a +/// will iterate through the aggregated factories, using +/// the result from the first to successfully handle the supplied configuration. +/// /// +public sealed class AggregatorPromptTemplateFactory : IPromptTemplateFactory +{ + private readonly IPromptTemplateFactory?[] _promptTemplateFactories; + + /// Initializes the instance. + /// Ordered instances to aggregate. + public AggregatorPromptTemplateFactory(params IPromptTemplateFactory[] promptTemplateFactories) + { + Verify.NotNullOrEmpty(promptTemplateFactories); + foreach (IPromptTemplateFactory promptTemplateFactory in promptTemplateFactories) + { + Verify.NotNull(promptTemplateFactory, nameof(promptTemplateFactories)); + } + + this._promptTemplateFactories = promptTemplateFactories; + } + + /// + public bool TryCreate(PromptTemplateConfig templateConfig, [NotNullWhen(true)] out IPromptTemplate? result) + { + Verify.NotNull(templateConfig); + + foreach (var promptTemplateFactory in this._promptTemplateFactories) + { + if (promptTemplateFactory?.TryCreate(templateConfig, out result) is true && result is not null) + { + return true; + } + } + + result = null; + return false; + } +} diff --git a/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplate.cs b/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplate.cs new file mode 100644 index 000000000000..806f7c4d5ac1 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplate.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.TemplateEngine; + +namespace Microsoft.SemanticKernel; + +/// +/// Given a prompt, that might contain references to variables and functions: +/// - Get the list of references +/// - Resolve each reference +/// - Variable references are resolved using the context variables +/// - Function references are resolved invoking those functions +/// - Functions can be invoked passing in variables +/// - Functions do not receive the context variables, unless specified using a special variable +/// - Functions can be invoked in order and in parallel so the context variables must be immutable when invoked within the template +/// +internal sealed class KernelPromptTemplate : IPromptTemplate +{ + /// + /// Constructor for PromptTemplate. + /// + /// Prompt template configuration + /// Logger factory + public KernelPromptTemplate(PromptTemplateConfig promptConfig, ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(promptConfig, nameof(promptConfig)); + Verify.NotNull(promptConfig.Template, nameof(promptConfig.Template)); + + loggerFactory ??= NullLoggerFactory.Instance; + this._logger = loggerFactory.CreateLogger(typeof(KernelPromptTemplate)) ?? NullLogger.Instance; + + this._blocks = this.ExtractBlocks(promptConfig, loggerFactory); + AddMissingInputVariables(this._blocks, promptConfig); + } + + /// + public Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(kernel); + + return this.RenderAsync(this._blocks, kernel, arguments, cancellationToken); + } + + #region private + private readonly ILogger _logger; + private readonly List _blocks; + + /// + /// Given a prompt template string, extract all the blocks (text, variables, function calls) + /// + /// A list of all the blocks, ie the template tokenized in text, variables and function calls + private List ExtractBlocks(PromptTemplateConfig config, ILoggerFactory loggerFactory) + { + string templateText = config.Template; + + if (this._logger.IsEnabled(LogLevel.Trace)) + { + this._logger.LogTrace("Extracting blocks from template: {0}", templateText); + } + + var blocks = new TemplateTokenizer(loggerFactory).Tokenize(templateText); + + foreach (var block in blocks) + { + if (!block.IsValid(out var error)) + { + throw new KernelException(error); + } + } + + return blocks; + } + + /// + /// Given a list of blocks render each block and compose the final result. + /// + /// Template blocks generated by ExtractBlocks. + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments. + /// The to monitor for cancellation requests. The default is . + /// The prompt template ready to be used for an AI request. + private async Task RenderAsync(List blocks, Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken = default) + { + var result = new StringBuilder(); + foreach (var block in blocks) + { + switch (block) + { + case ITextRendering staticBlock: + result.Append(InternalTypeConverter.ConvertToString(staticBlock.Render(arguments), kernel.Culture)); + break; + + case ICodeRendering dynamicBlock: + result.Append(InternalTypeConverter.ConvertToString(await dynamicBlock.RenderCodeAsync(kernel, arguments, cancellationToken).ConfigureAwait(false), kernel.Culture)); + break; + + default: + Debug.Fail($"Unexpected block type {block?.GetType()}, the block doesn't have a rendering method"); + break; + } + } + + return result.ToString(); + } + + /// + /// Augments 's with any variables + /// not already contained there but that are referenced in the prompt template. + /// + private static void AddMissingInputVariables(List blocks, PromptTemplateConfig config) + { + // Add all of the existing input variables to our known set. We'll avoid adding any + // dynamically discovered input variables with the same name. + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (InputVariable iv in config.InputVariables) + { + seen.Add(iv.Name); + } + + // Enumerate every block in the template, adding any variables that are referenced. + foreach (Block block in blocks) + { + switch (block.Type) + { + case BlockTypes.Variable: + // Add all variables from variable blocks, e.g. "{{$a}}". + AddIfMissing(((VarBlock)block).Name); + break; + + case BlockTypes.Code: + foreach (Block codeBlock in ((CodeBlock)block).Blocks) + { + switch (codeBlock.Type) + { + case BlockTypes.Variable: + // Add all variables from code blocks, e.g. "{{p.bar $b}}". + AddIfMissing(((VarBlock)codeBlock).Name); + break; + + case BlockTypes.NamedArg when ((NamedArgBlock)codeBlock).VarBlock is { } varBlock: + // Add all variables from named arguments, e.g. "{{p.bar b = $b}}". + AddIfMissing(varBlock.Name); + break; + } + } + break; + } + } + + void AddIfMissing(string variableName) + { + if (!string.IsNullOrEmpty(variableName) && seen.Add(variableName)) + { + config.InputVariables.Add(new InputVariable { Name = variableName }); + } + } + } + #endregion +} diff --git a/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplateFactory.cs b/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplateFactory.cs new file mode 100644 index 000000000000..47f9dd4ff4c1 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplateFactory.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides an implementation of for the template format. +/// +/// +/// This is used as the default when no other factory is provided. +/// +public sealed class KernelPromptTemplateFactory : IPromptTemplateFactory +{ + private readonly ILoggerFactory _loggerFactory; + + /// + /// Initializes a new instance of the class. + /// + /// The to use for logging. If null, no logging will be performed. + public KernelPromptTemplateFactory(ILoggerFactory? loggerFactory = null) + { + this._loggerFactory = loggerFactory ?? NullLoggerFactory.Instance; + } + + /// + public bool TryCreate(PromptTemplateConfig templateConfig, [NotNullWhen(true)] out IPromptTemplate? result) + { + Verify.NotNull(templateConfig); + + if (templateConfig.TemplateFormat.Equals(PromptTemplateConfig.SemanticKernelTemplateFormat, System.StringComparison.Ordinal)) + { + result = new KernelPromptTemplate(templateConfig, this._loggerFactory); + return true; + } + + result = null; + return false; + } +} diff --git a/dotnet/src/SemanticKernel.Core/Reliability/NullHttpRetryHandler.cs b/dotnet/src/SemanticKernel.Core/Reliability/NullHttpRetryHandler.cs deleted file mode 100644 index a12528df0526..000000000000 --- a/dotnet/src/SemanticKernel.Core/Reliability/NullHttpRetryHandler.cs +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Reliability; - -/// -/// A factory for creating instances of . -/// -public class NullHttpRetryHandlerFactory : IDelegatingHandlerFactory -{ - /// - /// Creates a new instance of . - /// - /// The to use for logging. If null, no logging will be performed. - /// A new instance of . - public DelegatingHandler Create(ILoggerFactory? loggerFactory) - { - return new NullHttpRetryHandler(); - } -} - -/// -/// A HTTP retry handler that does not retry. -/// -/// -/// This handler is useful when you want to disable retry functionality in your HTTP requests. -/// -public class NullHttpRetryHandler : DelegatingHandler -{ -} diff --git a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj index 116f47301d39..d9850f4bf796 100644 --- a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj +++ b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj @@ -7,6 +7,8 @@ netstandard2.0 true true + $(NoWarn);SKEXP0004 + true @@ -27,11 +29,16 @@ - + + + + + + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.Core/Services/AIServiceCollection.cs b/dotnet/src/SemanticKernel.Core/Services/AIServiceCollection.cs deleted file mode 100644 index abb861239cb3..000000000000 --- a/dotnet/src/SemanticKernel.Core/Services/AIServiceCollection.cs +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; - -namespace Microsoft.SemanticKernel.Services; - -/// -/// A collection of AI services that can be registered and built into an . -/// -[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1711:Identifiers should not have incorrect suffix")] -public class AIServiceCollection -{ - // A constant key for the default service - private const string DefaultKey = "__DEFAULT__"; - - // A dictionary that maps a service type to a nested dictionary of names and service instances or factories - private readonly Dictionary>> _services = new(); - - // A dictionary that maps a service type to the name of the default service - private readonly Dictionary _defaultIds = new(); - - /// - /// Registers a singleton service instance with the default name. - /// - /// The type of the service. - /// The service instance. - /// The service instance is null. - public void SetService(T service) where T : IAIService - => this.SetService(DefaultKey, service, true); - - /// - /// Registers a singleton service instance with an optional name and default flag. - /// - /// The type of the service. - /// The name of the service, or null for the default service. - /// The service instance. - /// Whether the service should be the default for its type. - /// The service instance is null. - /// The name is empty or whitespace. - public void SetService(string? name, T service, bool setAsDefault = false) where T : IAIService - => this.SetService(name, (() => service), setAsDefault); - - /// - /// Registers a transient service factory with the default name. - /// - /// The type of the service. - /// The factory function to create the service instance. - /// The factory function is null. - public void SetService(Func factory) where T : IAIService - => this.SetService(DefaultKey, factory, true); - - /// - /// Registers a transient service factory with an optional name and default flag. - /// - /// The type of the service. - /// The name of the service, or null for the default service. - /// The factory function to create the service instance. - /// Whether the service should be the default for its type. - /// The factory function is null. - /// The name is empty or whitespace. - public void SetService(string? name, Func factory, bool setAsDefault = false) where T : IAIService - { - // Validate the factory function - if (factory == null) - { - throw new ArgumentNullException(nameof(factory)); - } - - // Get or create the nested dictionary for the service type - var type = typeof(T); - if (!this._services.TryGetValue(type, out var namedServices)) - { - namedServices = new(); - this._services[type] = namedServices; - } - - // Set as the default if the name is empty, or the default flag is true, - // or there is no default name for the service type. - if (name == null || setAsDefault || !this.HasDefault()) - { - // Update the default name for the service type - this._defaultIds[type] = name ?? DefaultKey; - } - - var objectFactory = factory as Func; - - // Register the factory with the given name - namedServices[name ?? DefaultKey] = objectFactory - ?? throw new InvalidOperationException("Service factory is an invalid format"); - } - - /// - /// Builds an from the registered services and default names. - /// - /// An containing the registered services. - public IAIServiceProvider Build() - { - // Create a clone of the services and defaults Dictionaries to prevent further changes - // by the services provider. - var servicesClone = this._services.ToDictionary( - typeCollection => typeCollection.Key, - typeCollection => typeCollection.Value.ToDictionary( - service => service.Key, - service => service.Value)); - - var defaultsClone = this._defaultIds.ToDictionary( - typeDefault => typeDefault.Key, - typeDefault => typeDefault.Value); - - return new AIServiceProvider(servicesClone, defaultsClone); - } - - private bool HasDefault() where T : IAIService - => this._defaultIds.TryGetValue(typeof(T), out var defaultName) - && !string.IsNullOrEmpty(defaultName); -} diff --git a/dotnet/src/SemanticKernel.Core/Services/AIServiceProvider.cs b/dotnet/src/SemanticKernel.Core/Services/AIServiceProvider.cs deleted file mode 100644 index d3cec01a3031..000000000000 --- a/dotnet/src/SemanticKernel.Core/Services/AIServiceProvider.cs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Services; -/// -/// Provides AI services by managing a collection of named service instances. -/// -public class AIServiceProvider : NamedServiceProvider, IAIServiceProvider -{ - /// - /// Initializes a new instance of the class. - /// - /// A dictionary of service types and their corresponding named instances. - /// A dictionary of service types and their default instance names. - public AIServiceProvider(Dictionary>> services, Dictionary defaultIds) - : base(services, defaultIds) - { - } -} diff --git a/dotnet/src/SemanticKernel.Core/Services/NamedServiceProvider.cs b/dotnet/src/SemanticKernel.Core/Services/NamedServiceProvider.cs deleted file mode 100644 index ba5c903e4cd2..000000000000 --- a/dotnet/src/SemanticKernel.Core/Services/NamedServiceProvider.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; - -namespace Microsoft.SemanticKernel.Services; - -/// -/// Provides named services of type . Allows for the registration and retrieval of services by name. -/// -/// The type of service provided by this provider. -public class NamedServiceProvider : INamedServiceProvider -{ - // A dictionary that maps a service type to a nested dictionary of names and service instances or factories - private readonly Dictionary>> _services; - - // A dictionary that maps a service type to the name of the default service - private readonly Dictionary _defaultIds; - - /// - /// Initializes a new instance of the class. - /// - /// A dictionary that maps a service type to a nested dictionary of names and service instances or factories. - /// A dictionary that maps a service type to the name of the default service. - public NamedServiceProvider( - Dictionary>> services, - Dictionary defaultIds) - { - this._services = services; - this._defaultIds = defaultIds; - } - - /// - public T? GetService(string? name = null) where T : TService - { - // Return the service, casting or invoking the factory if needed - var factory = this.GetServiceFactory(name); - if (factory is Func) - { - return factory.Invoke(); - } - - return default; - } - - /// - private string? GetDefaultServiceName() where T : TService - { - // Returns the name of the default service for the given type, or null if none - var type = typeof(T); - if (this._defaultIds.TryGetValue(type, out var name)) - { - return name; - } - - return null; - } - - private Func? GetServiceFactory(string? name = null) where T : TService - { - // Get the nested dictionary for the service type - if (this._services.TryGetValue(typeof(T), out var namedServices)) - { - Func? serviceFactory = null; - - // If the name is not specified, try to load the default factory - name ??= this.GetDefaultServiceName(); - if (name != null) - { - // Check if there is a service registered with the given name - namedServices.TryGetValue(name, out serviceFactory); - } - - return serviceFactory as Func; - } - - return null; - } -} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/Block.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/Block.cs similarity index 85% rename from dotnet/src/Extensions/TemplateEngine.Basic/Blocks/Block.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/Block.cs index 3e5205281926..e6659cd11d99 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/Block.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/Block.cs @@ -3,12 +3,12 @@ using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +namespace Microsoft.SemanticKernel.TemplateEngine; /// /// Base class for blocks parsed from a prompt template /// -public abstract class Block +internal abstract class Block { internal virtual BlockTypes Type => BlockTypes.Undefined; @@ -32,7 +32,7 @@ public abstract class Block private protected Block(string? content, ILoggerFactory? loggerFactory) { this.Content = content ?? string.Empty; - this.Logger = loggerFactory is not null ? loggerFactory.CreateLogger(this.GetType()) : NullLogger.Instance; + this.Logger = loggerFactory?.CreateLogger(this.GetType()) ?? NullLogger.Instance; } /// diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/BlockTypes.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/BlockTypes.cs similarity index 75% rename from dotnet/src/Extensions/TemplateEngine.Basic/Blocks/BlockTypes.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/BlockTypes.cs index 74e5833a4ad9..c4823d250812 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/BlockTypes.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/BlockTypes.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +namespace Microsoft.SemanticKernel.TemplateEngine; internal enum BlockTypes { diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/CodeBlock.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/CodeBlock.cs new file mode 100644 index 000000000000..f0f438a3b459 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/CodeBlock.cs @@ -0,0 +1,231 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.TemplateEngine; + +#pragma warning disable CA2254 // error strings are used also internally, not just for logging +#pragma warning disable CA1031 // IsCriticalException is an internal utility and should not be used by extensions + +// ReSharper disable TemplateIsNotCompileTimeConstantProblem +internal sealed class CodeBlock : Block, ICodeRendering +{ + internal override BlockTypes Type => BlockTypes.Code; + + /// + /// Initializes a new instance of the class. + /// + /// Block content + /// The to use for logging. If null, no logging will be performed. + public CodeBlock(string? content, ILoggerFactory? loggerFactory = null) + : this(new CodeTokenizer(loggerFactory).Tokenize(content), content?.Trim(), loggerFactory) + { + } + + /// + /// Initializes a new instance of the class. + /// + /// A list of blocks + /// Block content + /// The to use for logging. If null, no logging will be performed. + public CodeBlock(List tokens, string? content, ILoggerFactory? loggerFactory = null) + : base(content?.Trim(), loggerFactory) + { + this._tokens = tokens; + } + + /// + /// Gets the list of blocks. + /// + public List Blocks => this._tokens; + + /// + public override bool IsValid(out string errorMsg) + { + errorMsg = ""; + + foreach (Block token in this._tokens) + { + if (!token.IsValid(out errorMsg)) + { + this.Logger.LogError(errorMsg); + return false; + } + } + + if (this._tokens.Count > 0 && this._tokens[0].Type == BlockTypes.NamedArg) + { + errorMsg = "Unexpected named argument found. Expected function name first."; + this.Logger.LogError(errorMsg); + return false; + } + + if (this._tokens.Count > 1 && !this.IsValidFunctionCall(out errorMsg)) + { + return false; + } + + this._validated = true; + + return true; + } + + /// + public ValueTask RenderCodeAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) + { + if (!this._validated && !this.IsValid(out var error)) + { + throw new KernelException(error); + } + + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("Rendering code: `{Content}`", this.Content); + } + + return this._tokens[0].Type switch + { + BlockTypes.Value or BlockTypes.Variable => new ValueTask(((ITextRendering)this._tokens[0]).Render(arguments)), + BlockTypes.FunctionId => this.RenderFunctionCallAsync((FunctionIdBlock)this._tokens[0], kernel, arguments, cancellationToken), + _ => throw new KernelException($"Unexpected first token type: {this._tokens[0].Type:G}"), + }; + } + + #region private ================================================================================ + + private bool _validated; + private readonly List _tokens; + + private async ValueTask RenderFunctionCallAsync(FunctionIdBlock fBlock, Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) + { + // If the code syntax is {{functionName $varName}} use $varName instead of $input + // If the code syntax is {{functionName 'value'}} use "value" instead of $input + if (this._tokens.Count > 1) + { + //Cloning the original arguments to avoid side effects - arguments added to the original arguments collection as a result of rendering template variables. + arguments = this.EnrichFunctionArguments(kernel, fBlock, arguments is null ? new KernelArguments() : new KernelArguments(arguments)); + } + try + { + var result = await kernel.InvokeAsync(fBlock.PluginName, fBlock.FunctionName, arguments, cancellationToken).ConfigureAwait(false); + + return result.Value; + } + catch (Exception ex) + { + this.Logger.LogError(ex, "Function {Plugin}.{Function} execution failed with error {Error}", fBlock.PluginName, fBlock.FunctionName, ex.Message); + throw; + } + } + + private bool IsValidFunctionCall(out string errorMsg) + { + errorMsg = ""; + if (this._tokens[0].Type != BlockTypes.FunctionId) + { + errorMsg = $"Unexpected second token found: {this._tokens[1].Content}"; + this.Logger.LogError(errorMsg); + return false; + } + + if (this._tokens[1].Type is not BlockTypes.Value and not BlockTypes.Variable and not BlockTypes.NamedArg) + { + errorMsg = "The first arg of a function must be a quoted string, variable or named argument"; + this.Logger.LogError(errorMsg); + return false; + } + + for (int i = 2; i < this._tokens.Count; i++) + { + if (this._tokens[i].Type is not BlockTypes.NamedArg) + { + errorMsg = $"Functions only support named arguments after the first argument. Argument {i} is not named."; + this.Logger.LogError(errorMsg); + return false; + } + } + + return true; + } + + /// + /// Adds function arguments. If the first argument is not a named argument, it is added to the arguments collection as the 'input' argument. + /// Additionally, for the prompt expression - {{MyPlugin.MyFunction p1=$v1}}, the value of the v1 variable will be resolved from the original arguments collection. + /// Then, the new argument, p1, will be added to the arguments. + /// + /// Kernel instance. + /// Function block. + /// The prompt rendering arguments. + /// The function arguments. + /// Occurs when any argument other than the first is not a named argument. + private KernelArguments EnrichFunctionArguments(Kernel kernel, FunctionIdBlock fBlock, KernelArguments arguments) + { + var firstArg = this._tokens[1]; + + // Sensitive data, logging as trace, disabled by default + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("Passing variable/value: `{Content}`", firstArg.Content); + } + + // Get the function metadata + var functionMetadata = kernel.Plugins.GetFunction(fBlock.PluginName, fBlock.FunctionName).Metadata; + + // Check if the function has parameters to be set + if (functionMetadata.Parameters.Count == 0) + { + throw new ArgumentException($"Function {fBlock.PluginName}.{fBlock.FunctionName} does not take any arguments but it is being called in the template with {this._tokens.Count - 1} arguments."); + } + + string? firstPositionalParameterName = null; + object? firstPositionalInputValue = null; + var namedArgsStartIndex = 1; + + if (firstArg.Type is not BlockTypes.NamedArg) + { + // Gets the function first parameter name + firstPositionalParameterName = functionMetadata.Parameters[0].Name; + + firstPositionalInputValue = ((ITextRendering)this._tokens[1]).Render(arguments); + // Type check is avoided and marshalling is done by the function itself + + // Keep previous trust information when updating the input + arguments[firstPositionalParameterName] = firstPositionalInputValue; + namedArgsStartIndex++; + } + + for (int i = namedArgsStartIndex; i < this._tokens.Count; i++) + { + // When casting fails because the block isn't a NamedArg, arg is null + if (this._tokens[i] is not NamedArgBlock arg) + { + var errorMsg = "Functions support up to one positional argument"; + this.Logger.LogError(errorMsg); + throw new KernelException($"Unexpected first token type: {this._tokens[i].Type:G}"); + } + + // Sensitive data, logging as trace, disabled by default + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("Passing variable/value: `{Content}`", arg.Content); + } + + // Check if the positional parameter clashes with a named parameter + if (firstPositionalParameterName is not null && string.Equals(firstPositionalParameterName, arg.Name, StringComparison.OrdinalIgnoreCase)) + { + throw new ArgumentException($"Ambiguity found as a named parameter '{arg.Name}' cannot be set for the first parameter when there is also a positional value: '{firstPositionalInputValue}' provided. Function: {fBlock.PluginName}.{fBlock.FunctionName}"); + } + + arguments[arg.Name] = arg.GetValue(arguments); + } + + return arguments; + } + #endregion +} +// ReSharper restore TemplateIsNotCompileTimeConstantProblem +#pragma warning restore CA2254 diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/FunctionIdBlock.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/FunctionIdBlock.cs similarity index 82% rename from dotnet/src/Extensions/TemplateEngine.Basic/Blocks/FunctionIdBlock.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/FunctionIdBlock.cs index 216da326c688..8a416174ea60 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/FunctionIdBlock.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/FunctionIdBlock.cs @@ -3,10 +3,8 @@ using System.Linq; using System.Text.RegularExpressions; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +namespace Microsoft.SemanticKernel.TemplateEngine; internal sealed class FunctionIdBlock : Block, ITextRendering { @@ -23,7 +21,7 @@ public FunctionIdBlock(string? text, ILoggerFactory? loggerFactory = null) if (functionNameParts.Length > 2) { this.Logger.LogError("Invalid function name `{FunctionName}`.", this.Content); - throw new SKException($"Invalid function name `{this.Content}`. A function name can contain at most one dot separating the plugin name from the function name"); + throw new KernelException($"Invalid function name `{this.Content}`. A function name can contain at most one dot separating the plugin name from the function name"); } if (functionNameParts.Length == 2) @@ -54,7 +52,8 @@ public override bool IsValid(out string errorMsg) return true; } - public string Render(ContextVariables? variables) + /// + public object? Render(KernelArguments? arguments) { return this.Content; } diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs new file mode 100644 index 000000000000..d1dcea92bf50 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ICodeRendering.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.TemplateEngine; + +/// +/// Interface of dynamic blocks that need async IO to be rendered. +/// +internal interface ICodeRendering +{ + /// + /// Render the block using the given context, potentially using external I/O. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The arguments + /// The to monitor for cancellation requests. The default is . + /// Rendered content + public ValueTask RenderCodeAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs new file mode 100644 index 000000000000..87044226e4d1 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ITextRendering.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft. All rights reserved. +namespace Microsoft.SemanticKernel.TemplateEngine; + +/// +/// Interface of static blocks that don't need async IO to be rendered. +/// +internal interface ITextRendering +{ + /// + /// Render the block using only the given arguments. + /// + /// Optional arguments the block rendering + /// Rendered content + public object? Render(KernelArguments? arguments); +} diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/NamedArgBlock.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/NamedArgBlock.cs new file mode 100644 index 000000000000..2da0df2dd1b2 --- /dev/null +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/NamedArgBlock.cs @@ -0,0 +1,206 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.RegularExpressions; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.TemplateEngine; + +/// +/// A that represents a named argument for a function call. +/// For example, in the template {{ MyPlugin.MyFunction var1="foo" }}, var1="foo" is a named arg block. +/// +internal sealed class NamedArgBlock : Block, ITextRendering +{ + /// + /// Returns the . + /// + internal override BlockTypes Type => BlockTypes.NamedArg; + + /// + /// Gets the name of the function argument. + /// + internal string Name { get; } = string.Empty; + + /// + /// VarBlock associated with this named argument. + /// + internal VarBlock? VarBlock { get; } + + /// + /// Initializes a new instance of the class. + /// + /// Raw text parsed from the prompt template. + /// The to use for logging. If null, no logging will be performed. + /// + public NamedArgBlock(string? text, ILoggerFactory? logger = null) + : base(NamedArgBlock.TrimWhitespace(text), logger) + { + if (!TryGetNameAndValue(this.Content, out string argName, out string argValue)) + { + this.Logger.LogError("Invalid named argument `{Text}`", text); + throw new KernelException($"A function named argument must contain a name and value separated by a '{Symbols.NamedArgBlockSeparator}' character."); + } + + this.Name = argName; + this._argNameAsVarBlock = new VarBlock($"{Symbols.VarPrefix}{argName}"); + + if (argValue[0] == Symbols.VarPrefix) + { + this.VarBlock = new VarBlock(argValue); + } + else + { + this._valBlock = new ValBlock(argValue); + } + } + + /// + /// Attempts to extract the name and value of a named argument block from a string + /// + /// String from which to extract a name and value + /// Name extracted from argument block, when successful. Empty string otherwise. + /// Value extracted from argument block, when successful. Empty string otherwise. + /// true when a name and value are successfully extracted from the given text, false otherwise + internal static bool TryGetNameAndValue(string? text, out string name, out string value) + { + name = string.Empty; + value = string.Empty; + + if (!string.IsNullOrEmpty(text)) + { + string[] argBlockParts = text!.Split(new char[] { Symbols.NamedArgBlockSeparator }, StringSplitOptions.RemoveEmptyEntries); + + if (argBlockParts.Length == 2) + { + name = argBlockParts[0]; + value = argBlockParts[1]; + + return true; + } + } + + return false; + } + + /// + /// Gets the rendered value of the function argument. If the value is a , the value stays the same. + /// If the value is a , the value of the variable is determined by the arguments passed in. + /// + /// Arguments to use for rendering the named argument value when the value is a . + /// + internal object? GetValue(KernelArguments? arguments) + { + var valueIsValidValBlock = this._valBlock != null && this._valBlock.IsValid(out var errorMessage); + if (valueIsValidValBlock) + { + return this._valBlock!.Render(arguments); + } + + var valueIsValidVarBlock = this.VarBlock != null && this.VarBlock.IsValid(out var errorMessage2); + if (valueIsValidVarBlock) + { + return this.VarBlock!.Render(arguments); + } + + return string.Empty; + } + + /// + public object? Render(KernelArguments? arguments) + { + return this.Content; + } + + /// + /// Returns whether the named arg block has valid syntax. + /// + /// An error message that gets set when the named arg block is not valid. + /// +#pragma warning disable CA2254 // error strings are used also internally, not just for logging + public override bool IsValid(out string errorMsg) + { + errorMsg = string.Empty; + if (string.IsNullOrEmpty(this.Name)) + { + errorMsg = "A named argument must have a name"; + this.Logger.LogError(errorMsg); + return false; + } + + if (this._valBlock != null && !this._valBlock.IsValid(out var valErrorMsg)) + { + errorMsg = $"There was an issue with the named argument value for '{this.Name}': {valErrorMsg}"; + this.Logger.LogError(errorMsg); + return false; + } + else if (this.VarBlock != null && !this.VarBlock.IsValid(out var variableErrorMsg)) + { + errorMsg = $"There was an issue with the named argument value for '{this.Name}': {variableErrorMsg}"; + this.Logger.LogError(errorMsg); + return false; + } + else if (this._valBlock == null && this.VarBlock == null) + { + errorMsg = "A named argument must have a value"; + this.Logger.LogError(errorMsg); + return false; + } + + // Argument names share the same validation as variables + if (!this._argNameAsVarBlock.IsValid(out var argNameErrorMsg)) + { + errorMsg = Regex.Replace(argNameErrorMsg, "a variable", "An argument", RegexOptions.IgnoreCase); + errorMsg = Regex.Replace(errorMsg, "the variable", "The argument", RegexOptions.IgnoreCase); + return false; + } + + return true; + } +#pragma warning restore CA2254 + + #region private ================================================================================ + + private readonly VarBlock _argNameAsVarBlock; + private readonly ValBlock? _valBlock; + + private static string? TrimWhitespace(string? text) + { + if (text == null) + { + return text; + } + + string[] trimmedParts = NamedArgBlock.GetTrimmedParts(text); + return (trimmedParts?.Length) switch + { + 1 => trimmedParts[0], + 2 => $"{trimmedParts[0]}{Symbols.NamedArgBlockSeparator}{trimmedParts[1]}", + _ => null, + }; + } + + private static string[] GetTrimmedParts(string? text) + { + if (text == null) + { + return System.Array.Empty(); + } + + string[] parts = text.Split(new char[] { Symbols.NamedArgBlockSeparator }, 2); + string[] result = new string[parts.Length]; + if (parts.Length > 0) + { + result[0] = parts[0].Trim(); + } + + if (parts.Length > 1) + { + result[1] = parts[1].Trim(); + } + + return result; + } + + #endregion +} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/Symbols.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/Symbols.cs similarity index 89% rename from dotnet/src/Extensions/TemplateEngine.Basic/Blocks/Symbols.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/Symbols.cs index c0beefb7ba69..ac69be06288f 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/Symbols.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/Symbols.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +namespace Microsoft.SemanticKernel.TemplateEngine; internal static class Symbols { diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/TextBlock.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/TextBlock.cs similarity index 80% rename from dotnet/src/Extensions/TemplateEngine.Basic/Blocks/TextBlock.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/TextBlock.cs index 99d7c2e5174a..9ead97f0e4c3 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/TextBlock.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/TextBlock.cs @@ -1,9 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Orchestration; -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +namespace Microsoft.SemanticKernel.TemplateEngine; internal sealed class TextBlock : Block, ITextRendering { @@ -25,7 +24,8 @@ public override bool IsValid(out string errorMsg) return true; } - public string Render(ContextVariables? variables) + /// + public object? Render(KernelArguments? arguments) { return this.Content; } diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ValBlock.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ValBlock.cs similarity index 93% rename from dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ValBlock.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ValBlock.cs index 1ae186faa6f5..0493586411b7 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/ValBlock.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/ValBlock.cs @@ -1,9 +1,8 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Orchestration; -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +namespace Microsoft.SemanticKernel.TemplateEngine; internal sealed class ValBlock : Block, ITextRendering { @@ -61,7 +60,8 @@ public override bool IsValid(out string errorMsg) } #pragma warning restore CA2254 - public string Render(ContextVariables? variables) + /// + public object? Render(KernelArguments? arguments) { return this._value; } diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/VarBlock.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/VarBlock.cs similarity index 85% rename from dotnet/src/Extensions/TemplateEngine.Basic/Blocks/VarBlock.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/VarBlock.cs index d348d63f6276..d0b3f92405f2 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/Blocks/VarBlock.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/Blocks/VarBlock.cs @@ -2,10 +2,8 @@ using System.Text.RegularExpressions; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -namespace Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +namespace Microsoft.SemanticKernel.TemplateEngine; internal sealed class VarBlock : Block, ITextRendering { @@ -63,25 +61,26 @@ public override bool IsValid(out string errorMsg) } #pragma warning restore CA2254 - public string Render(ContextVariables? variables) + /// + public object? Render(KernelArguments? arguments) { - if (variables == null) { return string.Empty; } + if (arguments == null) { return null; } if (string.IsNullOrEmpty(this.Name)) { const string ErrMsg = "Variable rendering failed, the variable name is empty"; this.Logger.LogError(ErrMsg); - throw new SKException(ErrMsg); + throw new KernelException(ErrMsg); } - if (variables.TryGetValue(this.Name, out string? value)) + if (arguments.TryGetValue(this.Name, out object? value)) { return value; } this.Logger.LogWarning("Variable `{0}{1}` not found", Symbols.VarPrefix, this.Name); - return string.Empty; + return null; } private static readonly Regex s_validNameRegex = new("^[a-zA-Z0-9_]*$"); diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/CodeTokenizer.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/CodeTokenizer.cs similarity index 91% rename from dotnet/src/Extensions/TemplateEngine.Basic/CodeTokenizer.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/CodeTokenizer.cs index 954fd0083237..44206060aaf0 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/CodeTokenizer.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/CodeTokenizer.cs @@ -5,10 +5,8 @@ using System.Text; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; -namespace Microsoft.SemanticKernel.TemplateEngine.Basic; +namespace Microsoft.SemanticKernel.TemplateEngine; /// /// Simple tokenizer used for default SK template code language. @@ -87,21 +85,12 @@ public List Tokenize(string? text) // 1 char only edge case if (text.Length == 1) { - switch (nextChar) + blocks.Add(nextChar switch { - case Symbols.VarPrefix: - blocks.Add(new VarBlock(text, this._loggerFactory)); - break; - - case Symbols.DblQuote: - case Symbols.SglQuote: - blocks.Add(new ValBlock(text, this._loggerFactory)); - break; - - default: - blocks.Add(new FunctionIdBlock(text, this._loggerFactory)); - break; - } + Symbols.VarPrefix => new VarBlock(text, this._loggerFactory), + Symbols.DblQuote or Symbols.SglQuote => new ValBlock(text, this._loggerFactory), + _ => new FunctionIdBlock(text, this._loggerFactory), + }); return blocks; } @@ -231,7 +220,7 @@ public List Tokenize(string? text) namedArgValuePrefix = currentChar; if (!IsQuote((char)namedArgValuePrefix) && namedArgValuePrefix != Symbols.VarPrefix) { - throw new SKException($"Named argument values need to be prefixed with a quote or {Symbols.VarPrefix}."); + throw new KernelException($"Named argument values need to be prefixed with a quote or {Symbols.VarPrefix}."); } } currentTokenContent.Append(currentChar); @@ -245,7 +234,7 @@ public List Tokenize(string? text) { if (!spaceSeparatorFound) { - throw new SKException("Tokens must be separated by one space least"); + throw new KernelException("Tokens must be separated by one space least"); } if (IsQuote(currentChar)) @@ -303,7 +292,7 @@ public List Tokenize(string? text) break; case TokenTypes.None: - throw new SKException("Tokens must be separated by one space least"); + throw new KernelException("Tokens must be separated by one space least"); } return blocks; @@ -333,14 +322,13 @@ private static bool CanBeEscaped(char c) Justification = "Does not throw an exception by design.")] private static bool IsValidNamedArg(string tokenContent) { - try + if (NamedArgBlock.TryGetNameAndValue(tokenContent, out string _, out string _)) { var tokenContentAsNamedArg = new NamedArgBlock(tokenContent); - return tokenContentAsNamedArg.IsValid(out var error); - } - catch - { - return false; + + return tokenContentAsNamedArg.IsValid(out string _); } + + return false; } } diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/PromptTemplate.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/PromptTemplate.cs deleted file mode 100644 index 74b9912c9786..000000000000 --- a/dotnet/src/SemanticKernel.Core/TemplateEngine/PromptTemplate.cs +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Orchestration; - -namespace Microsoft.SemanticKernel.TemplateEngine; - -/// -/// Prompt template. -/// -public sealed class PromptTemplate : IPromptTemplate -{ - private readonly string _template; - private readonly IPromptTemplateEngine _templateEngine; - - // ReSharper disable once NotAccessedField.Local - private readonly PromptTemplateConfig _promptConfig; - - /// - /// Constructor for PromptTemplate. - /// - /// Template. - /// Prompt template configuration. - /// Kernel in which template is to take effect. - public PromptTemplate(string template, PromptTemplateConfig promptTemplateConfig, IKernel kernel) - : this(template, promptTemplateConfig, kernel.PromptTemplateEngine) - { - } - - /// - /// Constructor for PromptTemplate. - /// - /// Template. - /// Prompt template configuration. - /// Prompt template engine. - public PromptTemplate( - string template, - PromptTemplateConfig promptTemplateConfig, - IPromptTemplateEngine promptTemplateEngine) - { - this._template = template; - this._templateEngine = promptTemplateEngine; - this._promptConfig = promptTemplateConfig; - - this._params = new(() => this.InitParameters()); - } - - /// - /// The list of parameters used by the function, using JSON settings and template variables. - /// - /// List of parameters - public IReadOnlyList Parameters - => this._params.Value; - - /// - /// Render the template using the information in the context - /// - /// Kernel execution context helpers - /// The to monitor for cancellation requests. The default is . - /// Prompt rendered to string - public async Task RenderAsync(SKContext executionContext, CancellationToken cancellationToken) - { - return await this._templateEngine.RenderAsync(this._template, executionContext, cancellationToken).ConfigureAwait(false); - } - - private readonly Lazy> _params; - - private List InitParameters() - { - // Parameters from config.json - Dictionary result = new(this._promptConfig.Input.Parameters.Count, StringComparer.OrdinalIgnoreCase); - foreach (var p in this._promptConfig.Input.Parameters) - { - result[p.Name] = new ParameterView(p.Name, p.Description, p.DefaultValue); - } - - return result.Values.ToList(); - } -} diff --git a/dotnet/src/SemanticKernel.Core/TemplateEngine/PromptTemplateConfig.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/PromptTemplateConfig.cs deleted file mode 100644 index b3588903b41c..000000000000 --- a/dotnet/src/SemanticKernel.Core/TemplateEngine/PromptTemplateConfig.cs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.TemplateEngine; - -/// -/// Prompt template configuration. -/// -public class PromptTemplateConfig -{ - /// - /// Input parameter for semantic functions. - /// - public class InputParameter - { - /// - /// Name of the parameter to pass to the function. - /// e.g. when using "{{$input}}" the name is "input", when using "{{$style}}" the name is "style", etc. - /// - [JsonPropertyName("name")] - [JsonPropertyOrder(1)] - public string Name { get; set; } = string.Empty; - - /// - /// Parameter description for UI apps and planner. Localization is not supported here. - /// - [JsonPropertyName("description")] - [JsonPropertyOrder(2)] - public string Description { get; set; } = string.Empty; - - /// - /// Default value when nothing is provided. - /// - [JsonPropertyName("defaultValue")] - [JsonPropertyOrder(3)] - public string DefaultValue { get; set; } = string.Empty; - } - - /// - /// Input configuration (list of all input parameters for a semantic function). - /// - public class InputConfig - { - /// - /// Gets or sets the list of input parameters. - /// - [JsonPropertyName("parameters")] - [JsonPropertyOrder(1)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public List Parameters { get; set; } = new(); - } - - /// - /// Schema - Not currently used. - /// - [JsonPropertyName("schema")] - [JsonPropertyOrder(1)] - public int Schema { get; set; } = 1; - - /// - /// Description - /// - [JsonPropertyName("description")] - [JsonPropertyOrder(2)] - public string Description { get; set; } = string.Empty; - - /// - /// Input configuration (that is, list of all input parameters). - /// - [JsonPropertyName("input")] - [JsonPropertyOrder(3)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public InputConfig Input { get; set; } = new(); - - /// - /// Model request settings. - /// Initially only a single model request settings is supported. - /// - [JsonPropertyName("models")] - [JsonPropertyOrder(4)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public List ModelSettings { get; set; } = new(); - - /// - /// Return the default - /// - public AIRequestSettings GetDefaultRequestSettings() - { - return this.ModelSettings.FirstOrDefault(); - } - - #region Obsolete - /// - /// Type, such as "completion", "embeddings", etc. - /// - /// TODO: use enum - [JsonPropertyName("type")] - [JsonPropertyOrder(5)] - [Obsolete("Type property is no longer required. This will be removed in a future release.")] - public string Type { get; set; } = "completion"; - - /// - /// Completion configuration parameters. - /// - [JsonPropertyName("completion")] - [JsonPropertyOrder(6)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - [Obsolete("Completion is no longer no longer supported. Use PromptTemplateConfig.ModelSettings collection instead. This will be removed in a future release.")] - public AIRequestSettings? Completion - { - get { return this.GetDefaultRequestSettings(); } - set - { - if (value is not null) - { - this.ModelSettings.Add(value); - } - } - } - - /// - /// Default AI services to use. - /// - [JsonPropertyName("default_services")] - [JsonPropertyOrder(7)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - [Obsolete("DefaultServices property is not being used. This will be removed in a future release.")] - public List DefaultServices { get; set; } = new(); - #endregion - - /// - /// Creates a prompt template configuration from JSON. - /// - /// JSON of the prompt template configuration. - /// Prompt template configuration. - /// Thrown when the deserialization returns null. - public static PromptTemplateConfig FromJson(string json) - { - var result = Json.Deserialize(json); - return result ?? throw new ArgumentException("Unable to deserialize prompt template config from argument. The deserialization returned null.", nameof(json)); - } -} diff --git a/dotnet/src/Extensions/TemplateEngine.Basic/TemplateTokenizer.cs b/dotnet/src/SemanticKernel.Core/TemplateEngine/TemplateTokenizer.cs similarity index 93% rename from dotnet/src/Extensions/TemplateEngine.Basic/TemplateTokenizer.cs rename to dotnet/src/SemanticKernel.Core/TemplateEngine/TemplateTokenizer.cs index 10d00b1e2f51..274102771df0 100644 --- a/dotnet/src/Extensions/TemplateEngine.Basic/TemplateTokenizer.cs +++ b/dotnet/src/SemanticKernel.Core/TemplateEngine/TemplateTokenizer.cs @@ -3,10 +3,8 @@ using System.Collections.Generic; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; -namespace Microsoft.SemanticKernel.TemplateEngine.Basic; +namespace Microsoft.SemanticKernel.TemplateEngine; /// /// Simple tokenizer used for default SK template language. @@ -49,7 +47,7 @@ public TemplateTokenizer(ILoggerFactory? loggerFactory = null) /// /// Text to parse /// List of blocks found in the text - public IList Tokenize(string? text) + public List Tokenize(string? text) { // An empty block consists of 4 chars: "{{}}" const int EmptyCodeBlockLength = 4; @@ -158,7 +156,7 @@ public IList Tokenize(string? text) case BlockTypes.Variable: if (codeBlocks.Count > 1) { - throw new SKException($"Invalid token detected after the variable: {contentWithoutDelimiters}"); + throw new KernelException($"Invalid token detected after the variable: {contentWithoutDelimiters}"); } blocks.Add(codeBlocks[0]); @@ -167,7 +165,7 @@ public IList Tokenize(string? text) case BlockTypes.Value: if (codeBlocks.Count > 1) { - throw new SKException($"Invalid token detected after the value: {contentWithoutDelimiters}"); + throw new KernelException($"Invalid token detected after the value: {contentWithoutDelimiters}"); } blocks.Add(codeBlocks[0]); @@ -182,7 +180,7 @@ public IList Tokenize(string? text) case BlockTypes.Undefined: case BlockTypes.NamedArg: default: - throw new SKException($"Code tokenizer returned an incorrect first token type {codeBlocks[0].Type:G}"); + throw new KernelException($"Code tokenizer returned an incorrect first token type {codeBlocks[0].Type:G}"); } } diff --git a/dotnet/src/SemanticKernel.Core/Text/TextChunker.cs b/dotnet/src/SemanticKernel.Core/Text/TextChunker.cs index a31a915c9233..700c011c2d6d 100644 --- a/dotnet/src/SemanticKernel.Core/Text/TextChunker.cs +++ b/dotnet/src/SemanticKernel.Core/Text/TextChunker.cs @@ -3,6 +3,7 @@ using System; using System.Collections.Generic; using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text; @@ -13,6 +14,7 @@ namespace Microsoft.SemanticKernel.Text; /// For plain text, split looking at new lines first, then periods, and so on. /// For markdown, split looking at punctuation first, and so on. /// +[Experimental("SKEXP0055")] public static class TextChunker { /// @@ -33,12 +35,9 @@ public static class TextChunker /// Maximum number of tokens per line. /// Function to count tokens in a string. If not supplied, the default counter will be used. /// List of lines. - public static List SplitPlainTextLines(string text, int maxTokensPerLine, TokenCounter? tokenCounter = null) - { - tokenCounter ??= DefaultTokenCounter; - - return InternalSplitLines(text, maxTokensPerLine, trim: true, s_plaintextSplitOptions, tokenCounter); - } + [Experimental("SKEXP0055")] + public static List SplitPlainTextLines(string text, int maxTokensPerLine, TokenCounter? tokenCounter = null) => + InternalSplitLines(text, maxTokensPerLine, trim: true, s_plaintextSplitOptions, tokenCounter); /// /// Split markdown text into lines. @@ -47,12 +46,9 @@ public static List SplitPlainTextLines(string text, int maxTokensPerLine /// Maximum number of tokens per line. /// Function to count tokens in a string. If not supplied, the default counter will be used. /// List of lines. - public static List SplitMarkDownLines(string text, int maxTokensPerLine, TokenCounter? tokenCounter = null) - { - tokenCounter ??= DefaultTokenCounter; - - return InternalSplitLines(text, maxTokensPerLine, trim: true, s_markdownSplitOptions, tokenCounter); - } + [Experimental("SKEXP0055")] + public static List SplitMarkDownLines(string text, int maxTokensPerLine, TokenCounter? tokenCounter = null) => + InternalSplitLines(text, maxTokensPerLine, trim: true, s_markdownSplitOptions, tokenCounter); /// /// Split plain text into paragraphs. @@ -63,12 +59,9 @@ public static List SplitMarkDownLines(string text, int maxTokensPerLine, /// Text to be prepended to each individual chunk. /// Function to count tokens in a string. If not supplied, the default counter will be used. /// List of paragraphs. - public static List SplitPlainTextParagraphs(List lines, int maxTokensPerParagraph, int overlapTokens = 0, string? chunkHeader = null, TokenCounter? tokenCounter = null) - { - tokenCounter ??= DefaultTokenCounter; - - return InternalSplitTextParagraphs(lines, maxTokensPerParagraph, overlapTokens, chunkHeader, (text, maxTokens) => InternalSplitLines(text, maxTokens, trim: false, s_plaintextSplitOptions, tokenCounter), tokenCounter); - } + [Experimental("SKEXP0055")] + public static List SplitPlainTextParagraphs(List lines, int maxTokensPerParagraph, int overlapTokens = 0, string? chunkHeader = null, TokenCounter? tokenCounter = null) => + InternalSplitTextParagraphs(lines, maxTokensPerParagraph, overlapTokens, chunkHeader, static (text, maxTokens, tokenCounter) => InternalSplitLines(text, maxTokens, trim: false, s_plaintextSplitOptions, tokenCounter), tokenCounter); /// /// Split markdown text into paragraphs. @@ -79,23 +72,21 @@ public static List SplitPlainTextParagraphs(List lines, int maxT /// Text to be prepended to each individual chunk. /// Function to count tokens in a string. If not supplied, the default counter will be used. /// List of paragraphs. - public static List SplitMarkdownParagraphs(List lines, int maxTokensPerParagraph, int overlapTokens = 0, string? chunkHeader = null, TokenCounter? tokenCounter = null) - { - tokenCounter ??= DefaultTokenCounter; + [Experimental("SKEXP0055")] + public static List SplitMarkdownParagraphs(List lines, int maxTokensPerParagraph, int overlapTokens = 0, string? chunkHeader = null, TokenCounter? tokenCounter = null) => + InternalSplitTextParagraphs(lines, maxTokensPerParagraph, overlapTokens, chunkHeader, static (text, maxTokens, tokenCounter) => InternalSplitLines(text, maxTokens, trim: false, s_markdownSplitOptions, tokenCounter), tokenCounter); - return InternalSplitTextParagraphs(lines, maxTokensPerParagraph, overlapTokens, chunkHeader, (text, maxTokens) => InternalSplitLines(text, maxTokens, trim: false, s_markdownSplitOptions, tokenCounter), tokenCounter); - } - - private static List InternalSplitTextParagraphs(List lines, int maxTokensPerParagraph, int overlapTokens, string? chunkHeader, Func> longLinesSplitter, TokenCounter tokenCounter) + [Experimental("SKEXP0055")] + private static List InternalSplitTextParagraphs(List lines, int maxTokensPerParagraph, int overlapTokens, string? chunkHeader, Func> longLinesSplitter, TokenCounter? tokenCounter) { if (maxTokensPerParagraph <= 0) { - throw new ArgumentException("maxTokensPerParagraph should be a positive number"); + throw new ArgumentException("maxTokensPerParagraph should be a positive number", nameof(maxTokensPerParagraph)); } if (maxTokensPerParagraph <= overlapTokens) { - throw new ArgumentException("overlapTokens cannot be larger than maxTokensPerParagraph"); + throw new ArgumentException("overlapTokens cannot be larger than maxTokensPerParagraph", nameof(maxTokensPerParagraph)); } if (lines.Count == 0) @@ -103,30 +94,45 @@ private static List InternalSplitTextParagraphs(List lines, int return new List(); } - var chunkHeaderTokens = chunkHeader is { Length: > 0 } ? tokenCounter(chunkHeader) : 0; + var chunkHeaderTokens = chunkHeader is { Length: > 0 } ? GetTokenCount(chunkHeader, tokenCounter) : 0; var adjustedMaxTokensPerParagraph = maxTokensPerParagraph - overlapTokens - chunkHeaderTokens; // Split long lines first - IEnumerable truncatedLines = lines.SelectMany(line => longLinesSplitter(line, adjustedMaxTokensPerParagraph)); + IEnumerable truncatedLines = lines.SelectMany(line => longLinesSplitter(line, adjustedMaxTokensPerParagraph, tokenCounter)); - var paragraphs = BuildParagraph(truncatedLines, adjustedMaxTokensPerParagraph, longLinesSplitter, tokenCounter); + var paragraphs = BuildParagraph(truncatedLines, adjustedMaxTokensPerParagraph, tokenCounter); var processedParagraphs = ProcessParagraphs(paragraphs, adjustedMaxTokensPerParagraph, overlapTokens, chunkHeader, longLinesSplitter, tokenCounter); return processedParagraphs; } - private static List BuildParagraph(IEnumerable truncatedLines, int maxTokensPerParagraph, Func> longLinesSplitter, TokenCounter tokenCounter) + [Experimental("SKEXP0055")] + private static List BuildParagraph(IEnumerable truncatedLines, int maxTokensPerParagraph, TokenCounter? tokenCounter) { StringBuilder paragraphBuilder = new(); List paragraphs = new(); foreach (string line in truncatedLines) { - if (paragraphBuilder.Length > 0 && tokenCounter(paragraphBuilder.ToString()) + tokenCounter(line) + 1 >= maxTokensPerParagraph) + if (paragraphBuilder.Length > 0) { - // Complete the paragraph and prepare for the next - paragraphs.Add(paragraphBuilder.ToString().Trim()); - paragraphBuilder.Clear(); + string? paragraph = null; + + int currentCount = GetTokenCount(line, tokenCounter) + 1; + if (currentCount < maxTokensPerParagraph) + { + currentCount += tokenCounter is null ? + GetDefaultTokenCount(paragraphBuilder.Length) : + tokenCounter(paragraph = paragraphBuilder.ToString()); + } + + if (currentCount >= maxTokensPerParagraph) + { + // Complete the paragraph and prepare for the next + paragraph ??= paragraphBuilder.ToString(); + paragraphs.Add(paragraph.Trim()); + paragraphBuilder.Clear(); + } } paragraphBuilder.AppendLine(line); @@ -141,18 +147,16 @@ private static List BuildParagraph(IEnumerable truncatedLines, i return paragraphs; } - private static List ProcessParagraphs(List paragraphs, int adjustedMaxTokensPerParagraph, int overlapTokens, string? chunkHeader, Func> longLinesSplitter, TokenCounter tokenCounter) + [Experimental("SKEXP0055")] + private static List ProcessParagraphs(List paragraphs, int adjustedMaxTokensPerParagraph, int overlapTokens, string? chunkHeader, Func> longLinesSplitter, TokenCounter? tokenCounter) { - var processedParagraphs = new List(); - var paragraphStringBuilder = new StringBuilder(); - // distribute text more evenly in the last paragraphs when the last paragraph is too short. if (paragraphs.Count > 1) { var lastParagraph = paragraphs[paragraphs.Count - 1]; var secondLastParagraph = paragraphs[paragraphs.Count - 2]; - if (tokenCounter(lastParagraph) < adjustedMaxTokensPerParagraph / 4) + if (GetTokenCount(lastParagraph, tokenCounter) < adjustedMaxTokensPerParagraph / 4) { var lastParagraphTokens = lastParagraph.Split(s_spaceChar, StringSplitOptions.RemoveEmptyEntries); var secondLastParagraphTokens = secondLastParagraph.Split(s_spaceChar, StringSplitOptions.RemoveEmptyEntries); @@ -171,6 +175,9 @@ private static List ProcessParagraphs(List paragraphs, int adjus } } + var processedParagraphs = new List(); + var paragraphStringBuilder = new StringBuilder(); + for (int i = 0; i < paragraphs.Count; i++) { paragraphStringBuilder.Clear(); @@ -185,14 +192,13 @@ private static List ProcessParagraphs(List paragraphs, int adjus if (overlapTokens > 0 && i < paragraphs.Count - 1) { var nextParagraph = paragraphs[i + 1]; - var split = longLinesSplitter(nextParagraph, overlapTokens); + var split = longLinesSplitter(nextParagraph, overlapTokens, tokenCounter); paragraphStringBuilder.Append(paragraph); - if (split.FirstOrDefault() is string overlap) + if (split.Count != 0) { - paragraphStringBuilder.Append(' '); - paragraphStringBuilder.Append(overlap); + paragraphStringBuilder.Append(' ').Append(split[0]); } } else @@ -206,11 +212,12 @@ private static List ProcessParagraphs(List paragraphs, int adjus return processedParagraphs; } - private static List InternalSplitLines(string text, int maxTokensPerLine, bool trim, string?[] splitOptions, TokenCounter tokenCounter) + [Experimental("SKEXP0055")] + private static List InternalSplitLines(string text, int maxTokensPerLine, bool trim, string?[] splitOptions, TokenCounter? tokenCounter) { var result = new List(); - text = text.NormalizeLineEndings(); + text = text.Replace("\r\n", "\n"); // normalize line endings result.Add(text); for (int i = 0; i < splitOptions.Length; i++) { @@ -226,7 +233,8 @@ private static List InternalSplitLines(string text, int maxTokensPerLine return result; } - private static (List, bool) Split(List input, int maxTokens, ReadOnlySpan separators, bool trim, TokenCounter tokenCounter) + [Experimental("SKEXP0055")] + private static (List, bool) Split(List input, int maxTokens, ReadOnlySpan separators, bool trim, TokenCounter? tokenCounter) { bool inputWasSplit = false; List result = new(); @@ -240,12 +248,18 @@ private static (List, bool) Split(List input, int maxTokens, Rea return (result, inputWasSplit); } - private static (List, bool) Split(ReadOnlySpan input, string? inputString, int maxTokens, ReadOnlySpan separators, bool trim, TokenCounter tokenCounter) + [Experimental("SKEXP0055")] + private static (List, bool) Split(ReadOnlySpan input, string? inputString, int maxTokens, ReadOnlySpan separators, bool trim, TokenCounter? tokenCounter) { Debug.Assert(inputString is null || input.SequenceEqual(inputString.AsSpan())); List result = new(); var inputWasSplit = false; - if (tokenCounter(input.ToString()) > maxTokens) + + int inputTokenCount = tokenCounter is null ? + GetDefaultTokenCount(input.Length) : + tokenCounter(inputString ??= input.ToString()); + + if (inputTokenCount > maxTokens) { inputWasSplit = true; @@ -310,8 +324,11 @@ private static (List, bool) Split(ReadOnlySpan input, string? inpu return (result, inputWasSplit); } - private static int DefaultTokenCounter(string input) + private static int GetTokenCount(string input, TokenCounter? tokenCounter) => tokenCounter is null ? GetDefaultTokenCount(input.Length) : tokenCounter(input); + + private static int GetDefaultTokenCount(int length) { - return input.Length / 4; + Debug.Assert(length >= 0); + return length >> 2; } } diff --git a/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj b/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj index 3bd7bdee934d..213c744f1b3c 100644 --- a/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj +++ b/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj @@ -12,12 +12,7 @@ Empowers app owners to integrate cutting-edge LLM technology quickly and easily into their apps. - - - - - - + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatCompletionServiceExtensionTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatCompletionServiceExtensionTests.cs deleted file mode 100644 index 86788477d4e7..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatCompletionServiceExtensionTests.cs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ChatCompletion; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.AI.ChatCompletion; - -/// -/// Unit tests of . -/// -public class ChatCompletionServiceExtensionsTests -{ - [Fact] - public void ItCanAddChatCompletionServiceInstance() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - - // Act - services.SetService(serviceId, instance); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(serviceId, out var instanceRetrieved)); - Assert.Same(instance, instanceRetrieved); - } - - [Fact] - public void ItCanAddChatCompletionServiceFactory() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - var factory = new Func(() => instance); - - // Act - services.SetService(serviceId, factory); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(serviceId, out _)); - } - - [Fact] - public void ItCanSetDefaultChatCompletionService() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance1 = Mock.Of(); - var instance2 = Mock.Of(); - services.SetService(serviceId1, instance1); - - // Act - services.SetService(serviceId2, instance2, true); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(out var instanceRetrieved)); - Assert.Same(instance2, instanceRetrieved); - } - - [Fact] - public void ItReturnsFalseIfNoDefaultChatCompletionServiceIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - // Assert - Assert.False(provider.TryGetService(out var instanceRetrieved)); - } - - [Fact] - public void ItReturnsTrueIfHasChatCompletionServiceWithValidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance); - - // Act - var provider = services.Build(); - var result = provider.HasChatCompletionService(serviceId); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasChatCompletionServiceWithInvalidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance = Mock.Of(); - services.SetService(serviceId1, instance); - var provider = services.Build(); - - // Act - var result = provider.HasChatCompletionService(serviceId2); - - // Assert - Assert.False(result); - } - - [Fact] - public void ItReturnsTrueIfHasChatCompletionServiceWithNullIdAndDefaultIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance, setAsDefault: true); - var provider = services.Build(); - - // Act - var result = provider.HasChatCompletionService(); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasChatCompletionServiceWithNullIdAndNoDefaultExists() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - // Act - var result = provider.HasChatCompletionService(); - - // Assert - Assert.False(result); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs new file mode 100644 index 000000000000..5db81bda3909 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/ChatHistoryTests.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.SemanticKernel.ChatCompletion; +using Xunit; + +namespace SemanticKernel.UnitTests.AI.ChatCompletion; + +/// +/// Unit tests of . +/// +public class ChatHistoryTests +{ + [Fact] + public void ItCanBeSerialised() + { + // Arrange + var options = new JsonSerializerOptions(); + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + chatHistory.AddMessage(AuthorRole.Assistant, "Hi"); + + // Act + var chatHistoryJson = JsonSerializer.Serialize(chatHistory); + + // Assert + Assert.NotNull(chatHistoryJson); + Assert.Equal("[{\"Role\":{\"Label\":\"user\"},\"Content\":\"Hello\",\"Items\":null,\"ModelId\":null,\"Metadata\":null},{\"Role\":{\"Label\":\"assistant\"},\"Content\":\"Hi\",\"Items\":null,\"ModelId\":null,\"Metadata\":null}]", chatHistoryJson); + } + + [Fact] + public void ItCanBeDeserialised() + { + // Arrange + var options = new JsonSerializerOptions(); + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + chatHistory.AddMessage(AuthorRole.Assistant, "Hi"); + var chatHistoryJson = JsonSerializer.Serialize(chatHistory, options); + + // Act + var chatHistoryDeserialised = JsonSerializer.Deserialize(chatHistoryJson, options); + + // Assert + Assert.NotNull(chatHistoryDeserialised); + Assert.Equal(chatHistory.Count, chatHistoryDeserialised.Count); + for (var i = 0; i < chatHistory.Count; i++) + { + Assert.Equal(chatHistory[i].Role.Label, chatHistoryDeserialised[i].Role.Label); + Assert.Equal(chatHistory[i].Content, chatHistoryDeserialised[i].Content); + } + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/Embeddings/TextEmbeddingServiceExtensionTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/Embeddings/TextEmbeddingServiceExtensionTests.cs deleted file mode 100644 index 0638ce0085d3..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/AI/Embeddings/TextEmbeddingServiceExtensionTests.cs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.Embeddings; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.AI.Embeddings; - -/// -/// Unit tests of . -/// -public class TextEmbeddingServiceExtensionsTests -{ - [Fact] - public void ItCanAddTextEmbeddingServiceInstance() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - - // Act - services.SetService(serviceId, instance); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(serviceId, out var instanceRetrieved)); - Assert.Same(instance, instanceRetrieved); - } - - [Fact] - public void ItCanAddTextEmbeddingServiceFactory() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - var factory = new Func(() => instance); - - // Act - services.SetService(serviceId, factory); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(serviceId, out _)); - } - - [Fact] - public void ItCanSetDefaultTextEmbeddingService() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance1 = Mock.Of(); - var instance2 = Mock.Of(); - services.SetService(serviceId1, instance1); - - // Act - services.SetService(serviceId2, instance2, setAsDefault: true); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(out var instanceRetrieved)); - Assert.Same(instance2, instanceRetrieved); - } - - [Fact] - public void ItReturnsFalseIfNoDefaultTextEmbeddingServiceIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - // Assert - Assert.False(provider.TryGetService(out var instanceRetrieved)); - } - - [Fact] - public void ItReturnsTrueIfHasTextEmbeddingServiceWithValidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance); - var provider = services.Build(); - - // Act - var result = provider.HasTextEmbeddingService(serviceId); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasTextEmbeddingServiceWithInvalidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance = Mock.Of(); - services.SetService(serviceId1, instance); - var provider = services.Build(); - - // Act - var result = provider.HasTextEmbeddingService(serviceId2); - - // Assert - Assert.False(result); - } - - [Fact] - public void ItReturnsTrueIfHasTextEmbeddingServiceWithNullIdAndDefaultIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance, setAsDefault: true); - var provider = services.Build(); - - // Act - var result = provider.HasTextEmbeddingService(); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasTextEmbeddingServiceWithNullIdAndNoDefaultExists() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - // Act - var result = provider.HasTextEmbeddingService(); - - // Assert - Assert.False(result); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ImageGeneration/ImageCompletionServiceExtensionTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ImageGeneration/ImageCompletionServiceExtensionTests.cs deleted file mode 100644 index 165f5e636d76..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/AI/ImageGeneration/ImageCompletionServiceExtensionTests.cs +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.ImageGeneration; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.AI.ImageGeneration; - -/// -/// Unit tests of . -/// -public class ImageGenerationServiceExtensionsTests -{ - [Fact] - public void ItCanSetServiceImageGenerationInstance() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - - // Act - services.SetService(serviceId, instance); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(serviceId, out var instanceRetrieved)); - Assert.Same(instance, instanceRetrieved); - } - - [Fact] - public void ItCanSetServiceImageGenerationFactory() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - var factory = new Func(() => instance); - - // Act - services.SetService(serviceId, factory); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(out var instanceRetrieved)); - } - - [Fact] - public void ItCanSetDefaultImageGenerationService() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance1 = Mock.Of(); - var instance2 = Mock.Of(); - services.SetService(serviceId1, instance1); - - // Act - services.SetService(serviceId2, instance2, setAsDefault: true); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(out var instanceRetrieved)); - Assert.Same(instance2, instanceRetrieved); - } - - [Fact] - public void ItReturnsFalseIfNoDefaultImageGenerationServiceIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - Assert.False(provider.TryGetService(out var instanceRetrieved)); - } - - [Fact] - public void ItReturnsTrueIfHasImageGenerationServiceWithValidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance); - var provider = services.Build(); - - // Act - var result = provider.HasImageGenerationService(serviceId); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasImageGenerationServiceWithInvalidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance = Mock.Of(); - services.SetService(serviceId1, instance); - var provider = services.Build(); - - // Act - var result = provider.HasImageGenerationService(serviceId2); - - // Assert - Assert.False(result); - } - - [Fact] - public void ItReturnsTrueIfHasImageGenerationServiceWithNullIdAndDefaultIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance, setAsDefault: true); - var provider = services.Build(); - - // Act - var result = provider.HasImageGenerationService(); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasImageGenerationServiceWithNullIdAndNoDefaultExists() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - // Act - var result = provider.HasImageGenerationService(); - - // Assert - Assert.False(result); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/TextCompletion/TextCompletionServiceExtensionTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/TextCompletion/TextCompletionServiceExtensionTests.cs deleted file mode 100644 index 689b88d115dd..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/AI/TextCompletion/TextCompletionServiceExtensionTests.cs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.AI.TextCompletion; - -/// -/// Unit tests of . -/// -public class TextCompletionServiceExtensionsTests -{ - [Fact] - public void ItCanAddTextCompletionServiceInstance() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - - // Act - services.SetService(serviceId, instance); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(serviceId, out var instanceRetrieved)); - Assert.Same(instance, instanceRetrieved); - } - - [Fact] - public void ItCanAddTextCompletionServiceFactory() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - var factory = new Func(() => instance); - - // Act - services.SetService(serviceId, factory); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(serviceId, out var _)); - } - - [Fact] - public void ItCanSetDefaultTextCompletionService() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance1 = Mock.Of(); - var instance2 = Mock.Of(); - services.SetService(serviceId1, instance1); - - // Act - services.SetService(serviceId2, instance2, setAsDefault: true); - var provider = services.Build(); - - // Assert - Assert.True(provider.TryGetService(out var instanceRetrieved)); - Assert.Same(instance2, instanceRetrieved); - } - - [Fact] - public void ItReturnsFalseIfNoDefaultTextCompletionServiceIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - // Assert - Assert.False(provider.TryGetService(out var _)); - } - - [Fact] - public void ItReturnsTrueIfHasTextCompletionServiceWithValidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance); - var provider = services.Build(); - - // Act - var result = provider.HasTextCompletionService(serviceId); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasTextCompletionServiceWithInvalidId() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId1 = "test1"; - var serviceId2 = "test2"; - var instance = Mock.Of(); - services.SetService(serviceId1, instance); - var provider = services.Build(); - - // Act - var result = provider.HasTextCompletionService(serviceId2); - - // Assert - Assert.False(result); - } - - [Fact] - public void ItReturnsTrueIfHasTextCompletionServiceWithNullIdAndDefaultIsSet() - { - // Arrange - var services = new AIServiceCollection(); - var serviceId = "test"; - var instance = Mock.Of(); - services.SetService(serviceId, instance, setAsDefault: true); - var provider = services.Build(); - - // Act - var result = provider.HasTextCompletionService(); - - // Assert - Assert.True(result); - } - - [Fact] - public void ItReturnsFalseIfHasTextCompletionServiceWithNullIdAndNoDefaultExists() - { - // Arrange - var services = new AIServiceCollection(); - var provider = services.Build(); - - // Act - var result = provider.HasTextCompletionService(); - - // Assert - Assert.False(result); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/ImageContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/ImageContentTests.cs new file mode 100644 index 000000000000..cac559777f3c --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/ImageContentTests.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Contents; + +/// +/// Unit tests for class. +/// +public sealed class ImageContentTests +{ + [Fact] + public void ToStringReturnsString() + { + // Arrange + var content1 = new ImageContent(null!); + var content2 = new ImageContent(new Uri("https://endpoint/")); + + // Act + var result1 = content1.ToString(); + var result2 = content2.ToString(); + + // Assert + Assert.Empty(result1); + Assert.Equal("https://endpoint/", result2); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Events/FunctionInvokedEventArgsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Events/FunctionInvokedEventArgsTests.cs new file mode 100644 index 000000000000..0a338523b9ba --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Events/FunctionInvokedEventArgsTests.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Globalization; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Events; + +#pragma warning disable CS0618 // Events are deprecated + +public class FunctionInvokedEventArgsTests +{ + [Fact] + public void ResultValuePropertyShouldBeInitializedByOriginalOne() + { + //Arrange + var originalResults = new FunctionResult(KernelFunctionFactory.CreateFromMethod(() => { }), 36, CultureInfo.InvariantCulture); + + var sut = new FunctionInvokedEventArgs(KernelFunctionFactory.CreateFromMethod(() => { }), new KernelArguments(), originalResults); + + //Assert + Assert.Equal(36, sut.ResultValue); + } + + [Fact] + public void ResultValuePropertyShouldBeUpdated() + { + //Arrange + var originalResults = new FunctionResult(KernelFunctionFactory.CreateFromMethod(() => { }), 36, CultureInfo.InvariantCulture); + + var sut = new FunctionInvokedEventArgs(KernelFunctionFactory.CreateFromMethod(() => { }), new KernelArguments(), originalResults); + + //Act + sut.SetResultValue(72); + + //Assert + Assert.Equal(72, sut.ResultValue); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Filters/KernelFilterTests.cs b/dotnet/src/SemanticKernel.UnitTests/Filters/KernelFilterTests.cs new file mode 100644 index 000000000000..9c28f9eeece5 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Filters/KernelFilterTests.cs @@ -0,0 +1,657 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TextGeneration; +using Moq; +using Xunit; + +namespace SemanticKernel.UnitTests.Filters; + +public class KernelFilterTests +{ + [Fact] + public async Task PreInvocationFunctionFilterIsTriggeredAsync() + { + // Arrange + var functionInvocations = 0; + var filterInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var kernel = this.GetKernelWithFilters(onFunctionInvoking: (context) => + { + filterInvocations++; + }); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(1, functionInvocations); + Assert.Equal(1, filterInvocations); + } + + [Fact] + public async Task PreInvocationFunctionFilterChangesArgumentAsync() + { + // Arrange + const string OriginalInput = "OriginalInput"; + const string NewInput = "NewInput"; + + var kernel = this.GetKernelWithFilters(onFunctionInvoking: (context) => + { + context.Arguments["originalInput"] = NewInput; + }); + + var function = KernelFunctionFactory.CreateFromMethod((string originalInput) => originalInput); + + // Act + var result = await kernel.InvokeAsync(function, new() { ["originalInput"] = OriginalInput }); + + // Assert + Assert.Equal(NewInput, result.GetValue()); + } + + [Fact] + public async Task PreInvocationFunctionFilterCancellationWorksCorrectlyAsync() + { + // Arrange + var functionInvocations = 0; + var preFilterInvocations = 0; + var postFilterInvocations = 0; + + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var kernel = this.GetKernelWithFilters( + onFunctionInvoking: (context) => + { + preFilterInvocations++; + context.Cancel = true; + }, + onFunctionInvoked: (context) => + { + postFilterInvocations++; + }); + + // Act + var exception = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function)); + + // Assert + Assert.Equal(1, preFilterInvocations); + Assert.Equal(0, functionInvocations); + Assert.Equal(0, postFilterInvocations); + Assert.Same(function, exception.Function); + Assert.Null(exception.FunctionResult); + } + + [Fact] + public async Task PreInvocationFunctionFilterCancellationWorksCorrectlyOnStreamingAsync() + { + // Arrange + var functionInvocations = 0; + var filterInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var kernel = this.GetKernelWithFilters(onFunctionInvoking: (context) => + { + filterInvocations++; + context.Cancel = true; + }); + + // Act + IAsyncEnumerable enumerable = function.InvokeStreamingAsync(kernel); + IAsyncEnumerator enumerator = enumerable.GetAsyncEnumerator(); + + Assert.Equal(0, filterInvocations); + + var exception = await Assert.ThrowsAsync(async () => await enumerator.MoveNextAsync()); + + // Assert + Assert.Equal(1, filterInvocations); + Assert.Equal(0, functionInvocations); + Assert.Same(function, exception.Function); + Assert.Same(kernel, exception.Kernel); + Assert.Null(exception.FunctionResult); + } + + [Fact] + public async Task PostInvocationFunctionFilterIsTriggeredAsync() + { + // Arrange + var functionInvocations = 0; + var filterInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var kernel = this.GetKernelWithFilters(onFunctionInvoked: (context) => + { + filterInvocations++; + }); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(1, functionInvocations); + Assert.Equal(1, filterInvocations); + } + + [Fact] + public async Task PostInvocationFunctionFilterReturnsModifiedResultAsync() + { + // Arrange + const int OriginalResult = 42; + const int NewResult = 84; + + var function = KernelFunctionFactory.CreateFromMethod(() => OriginalResult); + + var kernel = this.GetKernelWithFilters(onFunctionInvoked: (context) => + { + context.SetResultValue(NewResult); + }); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(NewResult, result.GetValue()); + } + + [Fact] + public async Task PostInvocationFunctionFilterCancellationWorksCorrectlyAsync() + { + // Arrange + const int Result = 42; + + var function = KernelFunctionFactory.CreateFromMethod(() => Result); + var args = new KernelArguments() { { "a", "b" } }; + + var kernel = this.GetKernelWithFilters(onFunctionInvoked: (context) => + { + context.Cancel = true; + }); + + // Act + var exception = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function, args)); + + // Assert + Assert.Same(kernel, exception.Kernel); + Assert.Same(function, exception.Function); + Assert.Same(args, exception.Arguments); + Assert.NotNull(exception.FunctionResult); + Assert.Equal(Result, exception.FunctionResult.GetValue()); + } + + [Fact] + public async Task PostInvocationFunctionFilterCancellationWithModifiedResultAsync() + { + // Arrange + const int OriginalResult = 42; + const int NewResult = 84; + + var function = KernelFunctionFactory.CreateFromMethod(() => OriginalResult); + var args = new KernelArguments() { { "a", "b" } }; + + var kernel = this.GetKernelWithFilters(onFunctionInvoked: (context) => + { + context.SetResultValue(NewResult); + context.Cancel = true; + }); + + // Act + var exception = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function, args)); + + // Assert + Assert.Same(kernel, exception.Kernel); + Assert.Same(function, exception.Function); + Assert.Same(args, exception.Arguments); + Assert.NotNull(exception.FunctionResult); + Assert.Equal(NewResult, exception.FunctionResult.GetValue()); + } + + [Fact] + public async Task PostInvocationFunctionFilterIsNotTriggeredOnStreamingAsync() + { + // Arrange + var functionInvocations = 0; + var preFilterInvocations = 0; + var postFilterInvocations = 0; + + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var kernel = this.GetKernelWithFilters( + onFunctionInvoking: (context) => + { + preFilterInvocations++; + }, + onFunctionInvoked: (context) => + { + postFilterInvocations++; + }); + + // Act + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + } + + // Assert + Assert.Equal(1, functionInvocations); + Assert.Equal(1, preFilterInvocations); + Assert.Equal(0, postFilterInvocations); + } + + [Fact] + public async Task FunctionFiltersWithPromptsWorkCorrectlyAsync() + { + // Arrange + var preFilterInvocations = 0; + var postFilterInvocations = 0; + var mockTextGeneration = this.GetMockTextGeneration(); + + var kernel = this.GetKernelWithFilters(textGenerationService: mockTextGeneration.Object, + onFunctionInvoking: (context) => + { + preFilterInvocations++; + }, + onFunctionInvoked: (context) => + { + postFilterInvocations++; + }); + + var function = KernelFunctionFactory.CreateFromPrompt("Write a simple phrase about UnitTests"); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(1, preFilterInvocations); + Assert.Equal(1, postFilterInvocations); + mockTextGeneration.Verify(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(1)); + } + + [Fact] + public async Task PromptFiltersAreNotTriggeredForMethodsAsync() + { + // Arrange + var functionInvocations = 0; + var preFilterInvocations = 0; + var postFilterInvocations = 0; + + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var kernel = this.GetKernelWithFilters( + onPromptRendering: (context) => + { + preFilterInvocations++; + }, + onPromptRendered: (context) => + { + postFilterInvocations++; + }); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(1, functionInvocations); + Assert.Equal(0, preFilterInvocations); + Assert.Equal(0, postFilterInvocations); + } + + [Fact] + public async Task PromptFiltersAreTriggeredForPromptsAsync() + { + // Arrange + var preFilterInvocations = 0; + var postFilterInvocations = 0; + var mockTextGeneration = this.GetMockTextGeneration(); + + var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + + var kernel = this.GetKernelWithFilters(textGenerationService: mockTextGeneration.Object, + onPromptRendering: (context) => + { + preFilterInvocations++; + }, + onPromptRendered: (context) => + { + postFilterInvocations++; + }); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(1, preFilterInvocations); + Assert.Equal(1, postFilterInvocations); + } + + [Fact] + public async Task PromptFiltersAreTriggeredForPromptsStreamingAsync() + { + // Arrange + var preFilterInvocations = 0; + var postFilterInvocations = 0; + var mockTextGeneration = this.GetMockTextGeneration(); + + var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + + var kernel = this.GetKernelWithFilters(textGenerationService: mockTextGeneration.Object, + onPromptRendering: (context) => + { + preFilterInvocations++; + }, + onPromptRendered: (context) => + { + postFilterInvocations++; + }); + + // Act + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + } + + // Assert + Assert.Equal(1, preFilterInvocations); + Assert.Equal(1, postFilterInvocations); + } + + [Fact] + public async Task PostInvocationPromptFilterChangesRenderedPromptAsync() + { + // Arrange + var mockTextGeneration = this.GetMockTextGeneration(); + var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + var kernel = this.GetKernelWithFilters(textGenerationService: mockTextGeneration.Object, + onPromptRendered: (context) => + { + context.RenderedPrompt += " - updated from filter"; + }); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + mockTextGeneration.Verify(m => m.GetTextContentsAsync("Prompt - updated from filter", It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task PostInvocationPromptFilterCancellationWorksCorrectlyAsync() + { + // Arrange + var mockTextGeneration = this.GetMockTextGeneration(); + var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + var kernel = this.GetKernelWithFilters(textGenerationService: mockTextGeneration.Object, + onPromptRendered: (context) => + { + context.Cancel = true; + }); + + // Act + var exception = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function)); + + // Assert + Assert.Same(function, exception.Function); + Assert.Same(kernel, exception.Kernel); + Assert.Null(exception.FunctionResult); + } + + [Fact] + public async Task FunctionAndPromptFiltersAreExecutedInCorrectOrderAsync() + { + // Arrange + var builder = Kernel.CreateBuilder(); + var mockTextGeneration = this.GetMockTextGeneration(); + var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + + var executionOrder = new List(); + + var functionFilter1 = new FakeFunctionFilter( + (context) => executionOrder.Add("FunctionFilter1-Invoking"), + (context) => executionOrder.Add("FunctionFilter1-Invoked")); + + var functionFilter2 = new FakeFunctionFilter( + (context) => executionOrder.Add("FunctionFilter2-Invoking"), + (context) => executionOrder.Add("FunctionFilter2-Invoked")); + + var promptFilter1 = new FakePromptFilter( + (context) => executionOrder.Add("PromptFilter1-Rendering"), + (context) => executionOrder.Add("PromptFilter1-Rendered")); + + var promptFilter2 = new FakePromptFilter( + (context) => executionOrder.Add("PromptFilter2-Rendering"), + (context) => executionOrder.Add("PromptFilter2-Rendered")); + + builder.Services.AddSingleton(functionFilter1); + builder.Services.AddSingleton(functionFilter2); + + builder.Services.AddSingleton(promptFilter1); + builder.Services.AddSingleton(promptFilter2); + + builder.Services.AddSingleton(mockTextGeneration.Object); + + var kernel = builder.Build(); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal("FunctionFilter1-Invoking", executionOrder[0]); + Assert.Equal("FunctionFilter2-Invoking", executionOrder[1]); + Assert.Equal("PromptFilter1-Rendering", executionOrder[2]); + Assert.Equal("PromptFilter2-Rendering", executionOrder[3]); + Assert.Equal("PromptFilter1-Rendered", executionOrder[4]); + Assert.Equal("PromptFilter2-Rendered", executionOrder[5]); + Assert.Equal("FunctionFilter1-Invoked", executionOrder[6]); + Assert.Equal("FunctionFilter2-Invoked", executionOrder[7]); + } + + [Fact] + public async Task MultipleFunctionFiltersCancellationWorksCorrectlyAsync() + { + // Arrange + var functionInvocations = 0; + var filterInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var functionFilter1 = new FakeFunctionFilter(onFunctionInvoking: (context) => + { + filterInvocations++; + context.Cancel = true; + }); + + var functionFilter2 = new FakeFunctionFilter(onFunctionInvoking: (context) => + { + Assert.True(context.Cancel); + + filterInvocations++; + context.Cancel = false; + }); + + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(functionFilter1); + builder.Services.AddSingleton(functionFilter2); + + var kernel = builder.Build(); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(1, functionInvocations); + Assert.Equal(2, filterInvocations); + } + + [Fact] + public async Task DifferentWaysOfAddingFunctionFiltersWorkCorrectlyAsync() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); + var executionOrder = new List(); + + var functionFilter1 = new FakeFunctionFilter((context) => executionOrder.Add("FunctionFilter1-Invoking")); + var functionFilter2 = new FakeFunctionFilter((context) => executionOrder.Add("FunctionFilter2-Invoking")); + + var builder = Kernel.CreateBuilder(); + + // Act + + // Case #1 - Add filter to services + builder.Services.AddSingleton(functionFilter1); + + var kernel = builder.Build(); + + // Case #2 - Add filter to kernel + kernel.FunctionFilters.Add(functionFilter2); + + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal("FunctionFilter1-Invoking", executionOrder[0]); + Assert.Equal("FunctionFilter2-Invoking", executionOrder[1]); + } + + [Fact] + public async Task DifferentWaysOfAddingPromptFiltersWorkCorrectlyAsync() + { + // Arrange + var mockTextGeneration = this.GetMockTextGeneration(); + var function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + var executionOrder = new List(); + + var promptFilter1 = new FakePromptFilter((context) => executionOrder.Add("PromptFilter1-Rendering")); + var promptFilter2 = new FakePromptFilter((context) => executionOrder.Add("PromptFilter2-Rendering")); + + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(mockTextGeneration.Object); + + // Act + // Case #1 - Add filter to services + builder.Services.AddSingleton(promptFilter1); + + var kernel = builder.Build(); + + // Case #2 - Add filter to kernel + kernel.PromptFilters.Add(promptFilter2); + + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal("PromptFilter1-Rendering", executionOrder[0]); + Assert.Equal("PromptFilter2-Rendering", executionOrder[1]); + } + + [Fact] + public async Task InsertFilterInMiddleOfPipelineTriggersFiltersInCorrectOrderAsync() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); + var executionOrder = new List(); + + var functionFilter1 = new FakeFunctionFilter( + (context) => executionOrder.Add("FunctionFilter1-Invoking"), + (context) => executionOrder.Add("FunctionFilter1-Invoked")); + + var functionFilter2 = new FakeFunctionFilter( + (context) => executionOrder.Add("FunctionFilter2-Invoking"), + (context) => executionOrder.Add("FunctionFilter2-Invoked")); + + var functionFilter3 = new FakeFunctionFilter( + (context) => executionOrder.Add("FunctionFilter3-Invoking"), + (context) => executionOrder.Add("FunctionFilter3-Invoked")); + + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(functionFilter1); + builder.Services.AddSingleton(functionFilter2); + + var kernel = builder.Build(); + + kernel.FunctionFilters.Insert(1, functionFilter3); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal("FunctionFilter1-Invoking", executionOrder[0]); + Assert.Equal("FunctionFilter3-Invoking", executionOrder[1]); + Assert.Equal("FunctionFilter2-Invoking", executionOrder[2]); + Assert.Equal("FunctionFilter1-Invoked", executionOrder[3]); + Assert.Equal("FunctionFilter3-Invoked", executionOrder[4]); + Assert.Equal("FunctionFilter2-Invoked", executionOrder[5]); + } + + private Kernel GetKernelWithFilters( + Action? onFunctionInvoking = null, + Action? onFunctionInvoked = null, + Action? onPromptRendering = null, + Action? onPromptRendered = null, + ITextGenerationService? textGenerationService = null) + { + var builder = Kernel.CreateBuilder(); + var functionFilter = new FakeFunctionFilter(onFunctionInvoking, onFunctionInvoked); + var promptFilter = new FakePromptFilter(onPromptRendering, onPromptRendered); + + // Add function filter before kernel construction + builder.Services.AddSingleton(functionFilter); + + if (textGenerationService is not null) + { + builder.Services.AddSingleton(textGenerationService); + } + + var kernel = builder.Build(); + + // Add prompt filter after kernel construction + kernel.PromptFilters.Add(promptFilter); + + return kernel; + } + + private Mock GetMockTextGeneration() + { + var mockTextGeneration = new Mock(); + mockTextGeneration + .Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(new List { new("result text") }); + + mockTextGeneration + .Setup(s => s.GetStreamingTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new List() { new("result chunk") }.ToAsyncEnumerable()); + + return mockTextGeneration; + } + + private sealed class FakeFunctionFilter( + Action? onFunctionInvoking = null, + Action? onFunctionInvoked = null) : IFunctionFilter + { + private readonly Action? _onFunctionInvoking = onFunctionInvoking; + private readonly Action? _onFunctionInvoked = onFunctionInvoked; + + public void OnFunctionInvoked(FunctionInvokedContext context) => + this._onFunctionInvoked?.Invoke(context); + + public void OnFunctionInvoking(FunctionInvokingContext context) => + this._onFunctionInvoking?.Invoke(context); + } + + private sealed class FakePromptFilter( + Action? onPromptRendering = null, + Action? onPromptRendered = null) : IPromptFilter + { + private readonly Action? _onPromptRendering = onPromptRendering; + private readonly Action? _onPromptRendered = onPromptRendered; + + public void OnPromptRendered(PromptRenderedContext context) => + this._onPromptRendered?.Invoke(context); + + public void OnPromptRendering(PromptRenderingContext context) => + this._onPromptRendering?.Invoke(context); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/CustomAIServiceSelectorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/CustomAIServiceSelectorTests.cs new file mode 100644 index 000000000000..94d010937127 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/CustomAIServiceSelectorTests.cs @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Services; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class CustomAIServiceSelectorTests +{ + [Fact] + public void ItGetsAIServiceUsingArbitraryAttributes() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new AIService()); + Kernel kernel = builder.Build(); + + var function = kernel.CreateFunctionFromPrompt("Hello AI"); + var serviceSelector = new CustomAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.NotNull(aiService); + Assert.True(aiService.Attributes?.ContainsKey("Key1")); + Assert.Null(defaultExecutionSettings); + } + + private sealed class CustomAIServiceSelector : IAIServiceSelector + { +#pragma warning disable CS8769 // Nullability of reference types in value doesn't match target type. Cannot use [NotNullWhen] because of access to internals from abstractions. + bool IAIServiceSelector.TrySelectAIService(Kernel kernel, KernelFunction function, KernelArguments arguments, out T? service, out PromptExecutionSettings? serviceSettings) where T : class + { + var keyedService = (kernel.Services as IKeyedServiceProvider)?.GetKeyedService("service1"); + if (keyedService is null || keyedService.Attributes is null) + { + service = null; + serviceSettings = null; + return false; + } + + service = keyedService.Attributes.ContainsKey("Key1") ? keyedService as T : null; + serviceSettings = null; + return true; + } + } + + private sealed class AIService : IAIService + { + public IReadOnlyDictionary Attributes => this._attributes; + + public AIService() + { + this._attributes = new Dictionary(); + this._attributes.Add("Key1", "Value1"); + } + + private readonly Dictionary _attributes; + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionCollectionTests.cs deleted file mode 100644 index 502331838418..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionCollectionTests.cs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Functions; - -public class FunctionCollectionTests -{ - [Fact] - public void ItAllowsToReplaceFunctions() - { - // Arrange - List modelSettings = new(); - modelSettings.Add(new AIRequestSettings()); - - var functionOne = new Mock(); - functionOne.SetupGet(x => x.Name).Returns("fName"); - functionOne.SetupGet(x => x.PluginName).Returns("sName"); - functionOne.SetupGet(x => x.Description).Returns("ONE"); - - var functionTwo = new Mock(); - functionTwo.SetupGet(x => x.Name).Returns("fName"); - functionTwo.SetupGet(x => x.PluginName).Returns("sName"); - functionTwo.SetupGet(x => x.Description).Returns("TWO"); - - var target = new FunctionCollection(); - - // Act - target.AddFunction(functionOne.Object); - - // Assert - Assert.True(target.TryGetFunction("sName", "fName", out var func)); - Assert.Equal("ONE", func.Description); - - // Act - target.AddFunction(functionTwo.Object); - - // Assert - Assert.True(target.TryGetFunction("sName", "fName", out func)); - Assert.Equal("TWO", func.Description); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionFromMethodTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionFromMethodTests.cs new file mode 100644 index 000000000000..445ae9304fb5 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionFromMethodTests.cs @@ -0,0 +1,169 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; + +// ReSharper disable StringLiteralTypo + +namespace SemanticKernel.UnitTests.Functions; + +public class FunctionFromMethodTests +{ + [Fact] + public async Task InvokeStreamingAsyncShouldReturnOneChunkFromNonStreamingMethodAsync() + { + // Arrange + var kernel = new Kernel(); + var nativeContent = "Full content result"; + var sut = KernelFunctionFactory.CreateFromMethod(() => nativeContent); + + // Act + var chunkCount = 0; + StreamingKernelContent? lastChunk = null; + await foreach (var chunk in sut.InvokeStreamingAsync(kernel)) + { + chunkCount++; + lastChunk = chunk; + } + + // Assert + Assert.Equal(1, chunkCount); + Assert.NotNull(lastChunk); + Assert.IsAssignableFrom(lastChunk); + Assert.IsType(lastChunk); + + var methodContent = lastChunk as StreamingMethodContent; + Assert.Equal(nativeContent, methodContent!.Content); + } + + [Fact] + public async Task InvokeStreamingAsyncShouldPropagateMetadataFromNonStreamingMethodAsync() + { + // Arrange + var kernel = new Kernel(); + var nativeContent = "Full content result"; + var sut = KernelFunctionFactory.CreateFromMethod((KernelFunction func) => + { + return new FunctionResult(func, nativeContent, metadata: new Dictionary() + { + { "key1", "value1" }, + { "key2", "value2" }, + }); + }); + + // Act + var chunkCount = 0; + StreamingKernelContent? lastChunk = null; + await foreach (var chunk in sut.InvokeStreamingAsync(kernel)) + { + chunkCount++; + lastChunk = chunk; + } + + // Assert + Assert.Equal(1, chunkCount); + Assert.NotNull(lastChunk); + Assert.IsAssignableFrom(lastChunk); + Assert.IsType(lastChunk); + + var methodContent = lastChunk as StreamingMethodContent; + Assert.Equal(nativeContent, methodContent!.Content); + + Assert.NotNull(methodContent.Metadata); + Assert.Equal(2, methodContent.Metadata.Count); + Assert.Equal("value1", methodContent.Metadata["key1"]); + Assert.Equal("value2", methodContent.Metadata["key2"]); + } + + [Fact] + public async Task InvokeStreamingAsyncOnlySupportsInvokingEventAsync() + { + // Arrange + var kernel = new Kernel(); + var sut = KernelFunctionFactory.CreateFromMethod(() => "any"); + + var invokedCalled = false; + var invokingCalled = false; + +#pragma warning disable CS0618 // Events are deprecated + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + invokingCalled = true; + }; + + // Invoked is not supported for streaming... + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + { + invokedCalled = true; + }; +#pragma warning restore CS0618 // Events are deprecated + + // Act + await foreach (var chunk in sut.InvokeStreamingAsync(kernel)) + { + } + + // Assert + Assert.True(invokingCalled); + Assert.False(invokedCalled); + } + + [Fact] + public async Task InvokeStreamingAsyncInvokingCancelingShouldThrowAsync() + { + // Arrange + var kernel = new Kernel(); + var sut = KernelFunctionFactory.CreateFromMethod(() => "any"); + + bool invokingCalled = false; + +#pragma warning disable CS0618 // Type or member is obsolete + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + invokingCalled = true; + e.Cancel = true; + }; +#pragma warning restore CS0618 // Type or member is obsolete + + // Act + IAsyncEnumerable enumerable = sut.InvokeStreamingAsync(kernel); + IAsyncEnumerator enumerator = enumerable.GetAsyncEnumerator(); + Assert.False(invokingCalled); + var e = await Assert.ThrowsAsync(async () => await enumerator.MoveNextAsync()); + + // Assert + Assert.True(invokingCalled); + Assert.Same(sut, e.Function); + Assert.Same(kernel, e.Kernel); + Assert.Empty(e.Arguments); + } + + [Fact] + public async Task InvokeStreamingAsyncUsingInvokedEventHasNoEffectAsync() + { + // Arrange + var kernel = new Kernel(); + var sut = KernelFunctionFactory.CreateFromMethod(() => "any"); + +#pragma warning disable CS0618 // Type or member is obsolete + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + { + // This will have no effect on streaming + e.Cancel = true; + }; +#pragma warning restore CS0618 // Type or member is obsolete + + var chunkCount = 0; + + // Act + await foreach (var chunk in sut.InvokeStreamingAsync(kernel)) + { + chunkCount++; + } + + // Assert + Assert.Equal(1, chunkCount); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionResultTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionResultTests.cs new file mode 100644 index 000000000000..7e71a57f8c69 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionResultTests.cs @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +/// +/// Unit tests of . +/// +public class FunctionResultTests +{ + private static readonly KernelFunction s_nopFunction = KernelFunctionFactory.CreateFromMethod(() => { }); + + [Fact] + public void DefaultsAreExpected() + { + var result = new FunctionResult(s_nopFunction); + Assert.Null(result.GetValue()); + Assert.Same(CultureInfo.InvariantCulture, result.Culture); + Assert.Null(result.Metadata); + } + + [Fact] + public void PropertiesRoundtrip() + { + object resultValue = new(); + CultureInfo culture = new("fr-FR"); + var metadata = new Dictionary(); + + FunctionResult result = new(s_nopFunction, resultValue, culture); + Assert.Same(resultValue, result.GetValue()); + Assert.Same(culture, result.Culture); + Assert.Null(result.Metadata); + + result = new(s_nopFunction, resultValue, culture, metadata); + Assert.Same(resultValue, result.GetValue()); + Assert.Same(culture, result.Culture); + Assert.Same(metadata, result.Metadata); + } + + [Fact] + public void GetValueReturnsValueWhenValueIsNotNull() + { + // Arrange + string value = Guid.NewGuid().ToString(); + FunctionResult target = new(s_nopFunction, value, CultureInfo.InvariantCulture); + + // Act,Assert + Assert.Equal(value, target.GetValue()); + } + + [Fact] + public void GetValueReturnsNullWhenValueIsNull() + { + // Arrange + FunctionResult target = new(s_nopFunction); + + // Act,Assert + Assert.Null(target.GetValue()); + } + + [Fact] + public void GetValueThrowsWhenValueIsNotNullButTypeDoesNotMatch() + { + // Arrange + int value = 42; + FunctionResult target = new(s_nopFunction, value, CultureInfo.InvariantCulture); + + // Act,Assert + Assert.Throws(() => target.GetValue()); + } + + [Fact] + public void ConstructorSetsProperties() + { + // Act + FunctionResult target = new(s_nopFunction); + + // Assert + Assert.Same(s_nopFunction, target.Function); + } + + [Fact] + public void ConstructorSetsPropertiesAndValue() + { + // Arrange + string functionName = Guid.NewGuid().ToString(); + string value = Guid.NewGuid().ToString(); + + // Act + FunctionResult target = new(s_nopFunction, value, CultureInfo.InvariantCulture); + + // Assert + Assert.Same(s_nopFunction, target.Function); + Assert.Equal(value, target.Value); + } + + [Fact] + public void ToStringWorksCorrectly() + { + // Arrange + string value = Guid.NewGuid().ToString(); + FunctionResult target = new(s_nopFunction, value, CultureInfo.InvariantCulture); + + // Act and Assert + Assert.Equal(value, target.ToString()); + } + + [Fact] + public void GetValueWhenValueIsKernelContentGenericStringShouldReturnContentBaseToString() + { + // Arrange + string expectedValue = Guid.NewGuid().ToString(); + FunctionResult target = new(s_nopFunction, new TextContent(expectedValue)); + + // Act and Assert + Assert.Equal(expectedValue, target.GetValue()); + } + + [Fact] + public void GetValueWhenValueIsKernelContentGenericTypeMatchShouldReturn() + { + // Arrange + string expectedValue = Guid.NewGuid().ToString(); + var valueType = new TextContent(expectedValue); + FunctionResult target = new(s_nopFunction, valueType); + + // Act and Assert + + Assert.Equal(valueType, target.GetValue()); + Assert.Equal(valueType, target.GetValue()); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionViewTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionViewTests.cs deleted file mode 100644 index 16de94bd4e09..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/FunctionViewTests.cs +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.SemanticKernel; -using Xunit; - -namespace SemanticKernel.UnitTests.Functions; - -public class FunctionViewTests -{ - [Fact] - public void ItReturnsFunctionParams() - { - // Arrange - var paramsA = new List - { - new("p1", "param 1", "default 1"), - new("p2", "param 2", "default 2") - }; - - // Act - var funcViewA = new FunctionView("funcA", "s1", "", paramsA); - - // Assert - Assert.NotNull(funcViewA); - - Assert.Equal("p1", funcViewA.Parameters[0].Name); - Assert.Equal("p2", funcViewA.Parameters[1].Name); - Assert.Equal("param 1", funcViewA.Parameters[0].Description); - Assert.Equal("param 2", funcViewA.Parameters[1].Description); - Assert.Equal("default 1", funcViewA.Parameters[0].DefaultValue); - Assert.Equal("default 2", funcViewA.Parameters[1].DefaultValue); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs new file mode 100644 index 000000000000..b1aa98d7a5a3 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelArgumentsTests +{ + [Fact] + public void ItCanBeCreatedWithNoArguments() + { + KernelArguments sut = new() { }; + + Assert.Null(sut.ExecutionSettings); + Assert.Empty(sut); + } + + [Fact] + public void ItCanBeCreatedWithExecutionSettingsOnly() + { + // Arrange + var executionSettings = new PromptExecutionSettings(); + + // Act + KernelArguments sut = new(executionSettings) { }; + + // Assert + Assert.Same(executionSettings, sut.ExecutionSettings?[PromptExecutionSettings.DefaultServiceId]); + Assert.Empty(sut); + } + + [Fact] + public void ItCanBeCreatedWithArgumentsOnly() + { + // Arrange & Act + KernelArguments sut = new() { { "fake-key", "fake-value" } }; + + // Assert + Assert.Null(sut.ExecutionSettings); + + var argument = Assert.Single(sut); + Assert.Equal("fake-key", argument.Key); + Assert.Equal("fake-value", argument.Value); + } + + [Fact] + public void ItCanBeCreatedWithBothExecutionSettingsAndArguments() + { + // Arrange + var executionSettings = new PromptExecutionSettings(); + + // Act + KernelArguments sut = new(executionSettings) { { "fake-key", "fake-value" } }; + + // Assert + Assert.Same(executionSettings, sut.ExecutionSettings?[PromptExecutionSettings.DefaultServiceId]); + + var argument = Assert.Single(sut); + Assert.Equal("fake-key", argument.Key); + Assert.Equal("fake-value", argument.Value); + } + + [Fact] + public void ItCanPerformCaseInsensitiveSearch() + { + //Constructor 1 + var executionSettings = new PromptExecutionSettings(); + KernelArguments sut = new(executionSettings) { { "FAKE-key", "fake-value" } }; + Assert.True(sut.ContainsName("fake-key")); + + //Constructor 2 + IDictionary source = new Dictionary { { "FAKE-key", "fake-value" } }; + sut = new(source); + Assert.True(sut.ContainsName("fake-key")); + + //Constructor 3 + KernelArguments other = new() { { "FAKE-key", "fake-value" } }; + sut = new(other); + Assert.True(sut.ContainsName("fake-key")); + } + + [Fact] + public void ItCanBeInitializedFromIDictionary() + { + // Arrange + IDictionary source = new Dictionary { { "fake-key", "fake-value" } }; + + // Act + KernelArguments sut = new(source); + + // Assert + Assert.Single(sut); + Assert.True(sut.ContainsName("fake-key")); + Assert.Equal("fake-value", sut["fake-key"]); + + Assert.Null(sut.ExecutionSettings); + } + + [Fact] + public void ItCanBeInitializedFromAnotherSettingsInstance() + { + // Arrange + var executionSettings = new PromptExecutionSettings(); + var other = new KernelArguments(executionSettings) { { "Fake-key", "fake-value" } }; + + // Act + KernelArguments sut = new(other); + + // Assert + Assert.Single(sut); + Assert.True(sut.ContainsName("fake-key")); + Assert.Equal("fake-value", sut["fake-key"]); + + Assert.Same(executionSettings, sut.ExecutionSettings?[PromptExecutionSettings.DefaultServiceId]); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs new file mode 100644 index 000000000000..f17ccd29f5d8 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs @@ -0,0 +1,245 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelBuilderTests +{ + [Fact] + public void ItCreatesNewKernelsOnEachBuild() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + Assert.NotSame(builder.Build(), builder.Build()); + } + + [Fact] + public void ItHasIdempotentServicesAndPlugins() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + + Assert.NotNull(builder.Services); + Assert.NotNull(builder.Plugins); + + IServiceCollection services = builder.Services; + IKernelBuilderPlugins plugins = builder.Plugins; + + for (int i = 0; i < 3; i++) + { + Assert.Same(services, builder.Services); + Assert.Same(plugins, builder.Plugins); + Assert.NotNull(builder.Build()); + } + } + + [Fact] + public void ItDefaultsDataToAnEmptyDictionary() + { + Kernel kernel = Kernel.CreateBuilder().Build(); + Assert.Empty(kernel.Data); + } + + [Fact] + public void ItDefaultsServiceSelectorToSingleton() + { + Kernel kernel = Kernel.CreateBuilder().Build(); + Assert.Null(kernel.Services.GetService()); + Assert.NotNull(kernel.ServiceSelector); + Assert.Same(kernel.ServiceSelector, kernel.ServiceSelector); + Assert.Throws(() => kernel.GetRequiredService()); + + kernel = new Kernel(); + Assert.Null(kernel.Services.GetService()); + Assert.NotNull(kernel.ServiceSelector); + Assert.Same(kernel.ServiceSelector, kernel.ServiceSelector); + Assert.Throws(() => kernel.GetRequiredService()); + + NopServiceSelector selector = new(); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(selector); + kernel = builder.Build(); + Assert.Same(selector, kernel.Services.GetService()); + Assert.Same(selector, kernel.ServiceSelector); + Assert.Same(selector, kernel.GetRequiredService()); + } + + private sealed class NopServiceSelector : IAIServiceSelector + { +#pragma warning disable CS8769 // Nullability of reference types in type of parameter doesn't match implemented member (possibly because of nullability attributes). + bool IAIServiceSelector.TrySelectAIService( +#pragma warning restore CS8769 + Kernel kernel, KernelFunction function, KernelArguments arguments, out T? service, out PromptExecutionSettings? serviceSettings) where T : class => + throw new NotImplementedException(); + } + + [Fact] + public void ItPropagatesPluginsToBuiltKernel() + { + KernelPlugin plugin1 = KernelPluginFactory.CreateFromFunctions("plugin1"); + KernelPlugin plugin2 = KernelPluginFactory.CreateFromFunctions("plugin2"); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin1); + builder.Plugins.Add(plugin2); + Kernel kernel = builder.Build(); + + Assert.Contains(plugin1, kernel.Plugins); + Assert.Contains(plugin2, kernel.Plugins); + } + + [Fact] + public void ItSuppliesServicesCollectionToPluginsBuilder() + { + IKernelBuilder builder = Kernel.CreateBuilder(); + Assert.Same(builder.Services, builder.Plugins.Services); + } + + [Fact] + public void ItBuildsServicesIntoKernel() + { + var builder = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddAzureOpenAITextGeneration(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); + + builder.Services.AddSingleton(CultureInfo.InvariantCulture); + builder.Services.AddSingleton(CultureInfo.CurrentCulture); + builder.Services.AddSingleton(new CultureInfo("en-US")); + + Kernel kernel = builder.Build(); + + Assert.IsType(kernel.GetRequiredService("openai")); + Assert.IsType(kernel.GetRequiredService("azureopenai")); + + Assert.Equal(2, kernel.GetAllServices().Count()); + Assert.Single(kernel.GetAllServices()); + + Assert.Equal(3, kernel.GetAllServices().Count()); + } + + [Fact] + public void ItSupportsMultipleEqualNamedServices() + { + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "lmnop", endpoint: "https://qrs", apiKey: "tuv", serviceId: "openai") + .Build(); + + Assert.Equal(8, kernel.GetAllServices().Count()); + } + + [Fact] + public void ItIsntNeededInDIContexts() + { + KernelPluginCollection plugins = new() { KernelPluginFactory.CreateFromFunctions("plugin1") }; + + var serviceCollection = new ServiceCollection(); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop"); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop"); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop", serviceId: "azureopenai1"); + serviceCollection.AddAzureOpenAIChatCompletion(deploymentName: "abcd", modelId: "efg", endpoint: "https://hijk", apiKey: "lmnop", serviceId: "azureopenai2"); + serviceCollection.AddSingleton(plugins); + serviceCollection.AddSingleton(); + + Kernel k = serviceCollection.BuildServiceProvider().GetService()!; + + Assert.NotNull(k); + Assert.Same(plugins, k.Plugins); + Assert.IsAssignableFrom(k.GetRequiredService("azureopenai1")); + Assert.IsAssignableFrom(k.GetRequiredService("azureopenai2")); + + // This should be 4, not 2. However, there is currently a limitation with Microsoft.Extensions.DependencyInjection + // that prevents GetAllServices from enumerating named services. KernelBuilder works around this, + // but when just using DI directly, it will only find unnamed services. Once that issue is fixed and SK + // brings in the new version, it can update the GetAllServices implementation to remove the workaround, + // and then this test should be updated accordingly. + Assert.Equal(2, k.GetAllServices().Count()); + + // It's possible to explicitly use the same workaround outside of KernelBuilder to get all services, + // but it's not recommended. + + //** WORKAROUND + Dictionary> mapping = new(); + foreach (var descriptor in serviceCollection) + { + if (!mapping.TryGetValue(descriptor.ServiceType, out HashSet? keys)) + { + mapping[descriptor.ServiceType] = keys = new HashSet(); + } + keys.Add(descriptor.ServiceKey); + } + serviceCollection.AddKeyedSingleton>>("KernelServiceTypeToKeyMappings", mapping); + //** + + k = serviceCollection.BuildServiceProvider().GetService()!; + Assert.Equal(4, k.GetAllServices().Count()); // now this is 4 as expected + } + + [Fact] + public void ItFindsAllPluginsToPopulatePluginsCollection() + { + KernelPlugin plugin1 = KernelPluginFactory.CreateFromFunctions("plugin1"); + KernelPlugin plugin2 = KernelPluginFactory.CreateFromFunctions("plugin2"); + KernelPlugin plugin3 = KernelPluginFactory.CreateFromFunctions("plugin3"); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(plugin1); + builder.Services.AddSingleton(plugin2); + builder.Services.AddSingleton(plugin3); + Kernel kernel = builder.Build(); + + Assert.Equal(3, kernel.Plugins.Count); + } + + [Fact] + public void ItFindsPluginCollectionToUse() + { + KernelPlugin plugin1 = KernelPluginFactory.CreateFromFunctions("plugin1"); + KernelPlugin plugin2 = KernelPluginFactory.CreateFromFunctions("plugin2"); + KernelPlugin plugin3 = KernelPluginFactory.CreateFromFunctions("plugin3"); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient(_ => new(new[] { plugin1, plugin2, plugin3 })); + + Kernel kernel1 = builder.Build(); + Assert.Equal(3, kernel1.Plugins.Count); + + Kernel kernel2 = builder.Build(); + Assert.Equal(3, kernel2.Plugins.Count); + + Assert.NotSame(kernel1.Plugins, kernel2.Plugins); + } + + [Fact] + public void ItAddsTheRightTypesInAddKernel() + { + IServiceCollection sc = new ServiceCollection(); + + IKernelBuilder builder = sc.AddKernel(); + Assert.NotNull(builder); + Assert.Throws(() => builder.Build()); + + builder.Services.AddSingleton>(new Dictionary()); + + IServiceProvider provider = sc.BuildServiceProvider(); + + Assert.NotNull(provider.GetService>()); + Assert.NotNull(provider.GetService()); + Assert.NotNull(provider.GetService()); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs new file mode 100644 index 000000000000..915c49e90712 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs @@ -0,0 +1,159 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelExtensionsTests +{ + [Fact] + public void CreatePluginFromFunctions() + { + Kernel kernel = new(); + + KernelPlugin plugin = kernel.CreatePluginFromFunctions("coolplugin", new[] + { + kernel.CreateFunctionFromMethod(() => { }, "Function1"), + kernel.CreateFunctionFromMethod(() => { }, "Function2"), + }); + + Assert.NotNull(plugin); + Assert.Empty(kernel.Plugins); + + Assert.Equal("coolplugin", plugin.Name); + Assert.Empty(plugin.Description); + Assert.Equal(2, plugin.FunctionCount); + Assert.True(plugin.Contains("Function1")); + Assert.True(plugin.Contains("Function2")); + } + + [Fact] + public void CreateEmptyPluginFromFunctions() + { + Kernel kernel = new(); + + KernelPlugin plugin = kernel.CreatePluginFromFunctions("coolplugin"); + + Assert.NotNull(plugin); + Assert.Empty(kernel.Plugins); + + Assert.Equal("coolplugin", plugin.Name); + Assert.Empty(plugin.Description); + Assert.Empty(plugin); + Assert.Equal(0, plugin.FunctionCount); + } + + [Fact] + public void CreatePluginFromDescriptionAndFunctions() + { + Kernel kernel = new(); + + KernelPlugin plugin = kernel.CreatePluginFromFunctions("coolplugin", "the description", new[] + { + kernel.CreateFunctionFromMethod(() => { }, "Function1"), + kernel.CreateFunctionFromMethod(() => { }, "Function2"), + }); + + Assert.NotNull(plugin); + Assert.Empty(kernel.Plugins); + + Assert.Equal("coolplugin", plugin.Name); + Assert.Equal("the description", plugin.Description); + Assert.Equal(2, plugin.FunctionCount); + Assert.True(plugin.Contains("Function1")); + Assert.True(plugin.Contains("Function2")); + } + + [Fact] + public void ImportPluginFromFunctions() + { + Kernel kernel = new(); + + kernel.ImportPluginFromFunctions("coolplugin", new[] + { + kernel.CreateFunctionFromMethod(() => { }, "Function1"), + kernel.CreateFunctionFromMethod(() => { }, "Function2"), + }); + + Assert.Single(kernel.Plugins); + + KernelPlugin plugin = kernel.Plugins["coolplugin"]; + Assert.Equal("coolplugin", plugin.Name); + Assert.Empty(plugin.Description); + Assert.NotNull(plugin); + + Assert.Equal(2, plugin.FunctionCount); + Assert.True(plugin.Contains("Function1")); + Assert.True(plugin.Contains("Function2")); + } + + [Fact] + public void ImportPluginFromDescriptionAndFunctions() + { + Kernel kernel = new(); + + kernel.ImportPluginFromFunctions("coolplugin", "the description", new[] + { + kernel.CreateFunctionFromMethod(() => { }, "Function1"), + kernel.CreateFunctionFromMethod(() => { }, "Function2"), + }); + + Assert.Single(kernel.Plugins); + + KernelPlugin plugin = kernel.Plugins["coolplugin"]; + Assert.Equal("coolplugin", plugin.Name); + Assert.Equal("the description", plugin.Description); + Assert.NotNull(plugin); + + Assert.Equal(2, plugin.FunctionCount); + Assert.True(plugin.Contains("Function1")); + Assert.True(plugin.Contains("Function2")); + } + + [Fact] + public void AddFromFunctions() + { + Kernel kernel = new(); + + kernel.Plugins.AddFromFunctions("coolplugin", new[] + { + kernel.CreateFunctionFromMethod(() => { }, "Function1"), + kernel.CreateFunctionFromMethod(() => { }, "Function2"), + }); + + Assert.Single(kernel.Plugins); + + KernelPlugin plugin = kernel.Plugins["coolplugin"]; + Assert.Equal("coolplugin", plugin.Name); + Assert.Empty(plugin.Description); + Assert.NotNull(plugin); + + Assert.Equal(2, plugin.FunctionCount); + Assert.True(plugin.Contains("Function1")); + Assert.True(plugin.Contains("Function2")); + } + + [Fact] + public void AddFromDescriptionAndFunctions() + { + Kernel kernel = new(); + + kernel.Plugins.AddFromFunctions("coolplugin", "the description", new[] + { + kernel.CreateFunctionFromMethod(() => { }, "Function1"), + kernel.CreateFunctionFromMethod(() => { }, "Function2"), + }); + + Assert.Single(kernel.Plugins); + + KernelPlugin plugin = kernel.Plugins["coolplugin"]; + Assert.Equal("coolplugin", plugin.Name); + Assert.Equal("the description", plugin.Description); + Assert.NotNull(plugin); + + Assert.Equal(2, plugin.FunctionCount); + Assert.True(plugin.Contains("Function1")); + Assert.True(plugin.Contains("Function2")); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionExtensionsTests.cs new file mode 100644 index 000000000000..2168a5435176 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionExtensionsTests.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelFunctionExtensionsTests +{ + [Theory] + [ClassData(typeof(ComplexObjectTestData))] + public async Task InvokeAsyncOfTShouldMatchFunctionResultValueAsync(object? expectedValue) + { + var testFunction = KernelFunctionFactory.CreateFromMethod(() => expectedValue, functionName: "Test"); + + var kernel = new Kernel(); + var resultValueInvokeSignature2 = await testFunction.InvokeAsync(kernel, new KernelArguments()); + + Assert.Equal(expectedValue, resultValueInvokeSignature2); + } + + public class ComplexObjectTestData : IEnumerable + { + private readonly List _data = new() + { + new object?[] { null }, + new object?[] { 1 }, + new object?[] { "Bogus" }, + new object?[] { DateTime.Now }, + new object?[] { new { Id = 2, Name = "Object2" } } + }; + + public IEnumerator GetEnumerator() => this._data.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromMethodTests1.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromMethodTests1.cs new file mode 100644 index 000000000000..218703cb76c0 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromMethodTests1.cs @@ -0,0 +1,1335 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Moq; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public sealed class KernelFunctionFromMethodTests1 +{ + private const string InputParameterName = "input"; + private static readonly KernelFunction s_nopFunction = KernelFunctionFactory.CreateFromMethod(() => { }); + + private readonly Kernel _kernel; + private readonly Mock _logger; + + private static string s_expected = string.Empty; + private static string s_actual = string.Empty; + + public KernelFunctionFromMethodTests1() + { + this._kernel = new Kernel(); + this._logger = new Mock(); + + s_expected = Guid.NewGuid().ToString("D"); + } + + [Fact] + public async Task ItSupportsStaticVoidVoidAsync() + { + // Arrange + static void Test() + { + s_actual = s_expected; + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + await function.InvokeAsync(this._kernel); + + // Assert + Assert.Equal(s_expected, s_actual); + } + + [Fact] + public async Task ItSupportsStaticVoidStringAsync() + { + // Arrange + static string Test() + { + s_actual = s_expected; + return s_expected; + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Equal(s_expected, result.GetValue()); + Assert.Equal(s_expected, result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticVoidTaskStringAsync() + { + // Arrange + static Task Test() + { + s_actual = s_expected; + return Task.FromResult(s_expected); + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Equal(s_expected, result.GetValue()); + Assert.Equal(s_expected, result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticVoidValueTaskStringAsync() + { + // Arrange + static async ValueTask Test() + { + s_actual = s_expected; + await Task.Delay(1); + return s_expected; + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Equal(s_expected, result.GetValue()); + Assert.Equal(s_expected, result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticVoidAsync() + { + // Arrange + static void Test() + { + s_actual = s_expected; + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticAsync() + { + // Arrange + static string Test(string someVar) + { + s_actual = someVar; + return "abc"; + } + + var arguments = new KernelArguments(); + arguments["someVar"] = s_expected; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Equal("abc", result.GetValue()); + Assert.Equal("abc", result.ToString()); + } + + [Fact] + public async Task ItSupportsInstanceStringStringNullableAsync() + { + // Arrange + int invocationCount = 0; + + string? Test(string someVar) + { + invocationCount++; + s_actual = someVar; + return "abc"; + } + + var arguments = new KernelArguments(); + arguments["someVar"] = s_expected; + + // Act + Func method = Test; + var function = KernelFunctionFactory.CreateFromMethod(method, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(1, invocationCount); + Assert.Equal(s_expected, s_actual); + Assert.Equal("abc", result.GetValue()); + Assert.Equal("abc", result.ToString()); + } + + [Fact] + public async Task ItSupportsInstanceStringTaskAsync() + { + // Arrange + int invocationCount = 0; + + async Task TestAsync(string canary) + { + await Task.Delay(0); + invocationCount++; + s_actual = canary; + } + + var arguments = new KernelArguments(); + arguments["canary"] = s_expected; + + // Act + Func method = TestAsync; + var function = KernelFunctionFactory.CreateFromMethod(method, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(1, invocationCount); + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsInstanceStringVoidAsync() + { + // Arrange + int invocationCount = 0; + + void Test(string input) + { + invocationCount++; + s_actual = s_expected + input; + } + + var arguments = new KernelArguments() { [InputParameterName] = ".blah" }; + + // Act + Action method = Test; + var function = KernelFunctionFactory.CreateFromMethod(method, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(1, invocationCount); + Assert.Equal(s_expected + ".blah", s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsInstanceStringStringAsync() + { + // Arrange + int invocationCount = 0; + + string Test(string input) + { + invocationCount++; + return input; + } + + var arguments = new KernelArguments() { [InputParameterName] = "foo-bar" }; + + // Act + Func method = Test; + var function = KernelFunctionFactory.CreateFromMethod(method, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(1, invocationCount); + Assert.Equal("foo-bar", result.GetValue()); + Assert.Equal("foo-bar", result.ToString()); + } + + [Fact] + public async Task ItSupportsInstanceStringTaskStringAsync() + { + // Arrange + int invocationCount = 0; + + Task Test(string input) + { + invocationCount++; + return Task.FromResult("hello there"); + } + + var arguments = new KernelArguments() { [InputParameterName] = string.Empty }; + + // Act + Func> method = Test; + var function = KernelFunctionFactory.CreateFromMethod(method, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(1, invocationCount); + Assert.Equal("hello there", result.GetValue()); + } + + [Fact] + public async Task ItSupportsInstanceKernelVoidAsync() + { + // Arrange + int invocationCount = 0; + Kernel? actualKernel = null; + + void Test(Kernel kernel) + { + invocationCount++; + actualKernel = kernel; + } + + var arguments = new KernelArguments(); + + // Act + Action method = Test; + var function = KernelFunctionFactory.CreateFromMethod(method); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(1, invocationCount); + Assert.Equal(this._kernel, actualKernel); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticStringStringAsync() + { + // Arrange + static string Test(string input) + { + s_actual = input; + return "new data"; + } + + var arguments = new KernelArguments() { [InputParameterName] = s_expected }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Equal("new data", result.GetValue()); + Assert.Equal("new data", result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticStringTaskStringAsync() + { + // Arrange + static Task Test(string input) + { + s_actual = input; + return Task.FromResult("new data"); + } + + var arguments = new KernelArguments() { [InputParameterName] = s_expected }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Equal("new data", result.GetValue()); + Assert.Equal("new data", result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticValueTaskAsync() + { + // Arrange + s_expected = "testabc"; + + static ValueTask Test(string input) + { + s_actual = input + "abc"; + return new ValueTask(); + } + + var arguments = new KernelArguments() { [InputParameterName] = "test" }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticStringTaskAsync() + { + // Arrange + static Task TestAsync(string input) + { + s_actual = s_expected; + return Task.CompletedTask; + } + + var arguments = new KernelArguments() { [InputParameterName] = string.Empty }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestAsync, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticStringValueTaskAsync() + { + // Arrange + static ValueTask TestAsync(string input) + { + s_actual = s_expected; + return default; + } + + var arguments = new KernelArguments() { [InputParameterName] = string.Empty }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestAsync, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticTaskAsync() + { + // Arrange + s_expected = "x y z"; + + static Task TestAsync() + { + s_actual = s_expected; + return Task.CompletedTask; + } + + var arguments = new KernelArguments(); + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestAsync, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticStringAsync() + { + // Arrange + s_expected = "x y z"; + + static Task TestAsync(string input) + { + s_actual = input; + return Task.CompletedTask; + } + + var arguments = new KernelArguments() { [InputParameterName] = "x y z" }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestAsync, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsStaticVoidTaskAsync() + { + // Arrange + static Task TestAsync() + { + s_actual = s_expected; + return Task.CompletedTask; + } + + var arguments = new KernelArguments(); + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestAsync, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(s_expected, s_actual); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItSupportsUsingNamedInputValueAsync() + { + static string Test(string input) => "Result: " + input; + + var arguments = new KernelArguments() { [InputParameterName] = "input value" }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal("Result: input value", result.GetValue()); + Assert.Equal("Result: input value", result.ToString()); + } + + [Fact] + public async Task ItSupportsPreferringNamedValueOverInputAsync() + { + static string Test(string other) => "Result: " + other; + + var arguments = new KernelArguments() + { + [InputParameterName] = "input value", + ["other"] = "other value" + }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal("Result: other value", result.GetValue()); + Assert.Equal("Result: other value", result.ToString()); + } + + [Fact] + public async Task ItSupportsOverridingNameWithAttributeAsync() + { + static string Test([Description("description")] string input) => "Result: " + input; + + var arguments = new KernelArguments() + { + [InputParameterName] = "input value", + ["other"] = "other value" + }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal("Result: input value", result.GetValue()); + Assert.Equal("Result: input value", result.ToString()); + } + + [Fact] + public async Task ItSupportNullDefaultValuesOverInputAsync() + { + static string Test(string? input = null, string? other = null) => "Result: " + (other is null); + + var arguments = new KernelArguments() { [InputParameterName] = "input value" }; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal("Result: True", result.GetValue()); + Assert.Equal("Result: True", result.ToString()); + } + + [Fact] + public async Task ItSupportFunctionResultAsync() + { + FunctionResult Test() => new(s_nopFunction, "fake-result", CultureInfo.InvariantCulture); + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake-result", result.GetValue()); + Assert.Equal("fake-result", result.ToString()); + } + + [Fact] + public async Task ItSupportFunctionResultTaskAsync() + { + // Arrange + Task Test() + { + var functionResult = new FunctionResult(s_nopFunction, "fake-result", CultureInfo.InvariantCulture); + return Task.FromResult(functionResult); + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake-result", result.GetValue()); + Assert.Equal("fake-result", result.ToString()); + } + + [Fact] + public async Task ItSupportFunctionResultValueTaskAsync() + { + // Arrange + ValueTask Test() + { + var functionResult = new FunctionResult(s_nopFunction, "fake-result", CultureInfo.InvariantCulture); + return ValueTask.FromResult(functionResult); + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel); + + // Assert + Assert.NotNull(result); + Assert.Equal("fake-result", result.GetValue()); + Assert.Equal("fake-result", result.ToString()); + } + + [Fact] + public async Task ItSupportsConvertingFromManyTypesAsync() + { + static string Test(int a, long b, decimal c, Guid d, DateTimeOffset e, DayOfWeek? f) => + $"{a} {b} {c} {d} {e:R} {f}"; + + var arguments = new KernelArguments(); + arguments["a"] = "1"; + arguments["b"] = -2; + arguments["c"] = "1234"; + arguments["d"] = Guid.Parse("7e08cc00-1d71-4558-81ed-69929499dea1"); + arguments["e"] = "Thu, 25 May 2023 20:17:30 GMT"; + arguments["f"] = DayOfWeek.Monday; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal("1 -2 1234 7e08cc00-1d71-4558-81ed-69929499dea1 Thu, 25 May 2023 20:17:30 GMT Monday", result.GetValue()); + Assert.Equal("1 -2 1234 7e08cc00-1d71-4558-81ed-69929499dea1 Thu, 25 May 2023 20:17:30 GMT Monday", result.ToString()); + } + + [Fact] + public async Task ItSupportsConvertingFromTypeConverterAttributedTypesAsync() + { + static int Test(MyCustomType mct) => mct.Value * 2; + + var arguments = new KernelArguments(); + arguments["mct"] = "42"; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(84, result.GetValue()); + Assert.Equal("84", result.ToString()); + } + + [Theory] + [InlineData((int)0, DayOfWeek.Sunday)] + [InlineData((uint)1, DayOfWeek.Monday)] + [InlineData((long)2, DayOfWeek.Tuesday)] + [InlineData((ulong)3, DayOfWeek.Wednesday)] + [InlineData((short)4, DayOfWeek.Thursday)] + [InlineData((ushort)5, DayOfWeek.Friday)] + [InlineData((byte)6, DayOfWeek.Saturday)] + [InlineData((sbyte)0, DayOfWeek.Sunday)] + public async Task ItSupportsConvertingAllIntegerTypesToEnumAsync(object argument, DayOfWeek expected) + { + // Arrange + object? actual = null; + + var function = KernelFunctionFactory.CreateFromMethod((DayOfWeek dow) => actual = dow); + + // Act + var result = await function.InvokeAsync(this._kernel, new() { ["dow"] = argument }); + + // Assert + Assert.Equal(expected, actual); + } + + [TypeConverter(typeof(MyCustomTypeConverter))] + private sealed class MyCustomType + { + public int Value { get; set; } + } + +#pragma warning disable CA1812 // Instantiated by reflection + private sealed class MyCustomTypeConverter : TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => + sourceType == typeof(string); + public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) => + new MyCustomType { Value = int.Parse((string)value, culture) }; + } +#pragma warning restore CA1812 + + [Fact] + public async Task ItSupportsConvertingArgumentsFromStringToManyTypesAsync() + { + // Arrange + var arguments = new KernelArguments() { [InputParameterName] = "1" }; + + async Task AssertResult(Delegate d, object? expected, string? expectedString) + { + var result = await KernelFunctionFactory.CreateFromMethod(d, functionName: "Test")!.InvokeAsync(this._kernel, arguments); + + Assert.Equal(expected, result.GetValue()); + Assert.Equal(expectedString, result.ToString()); + } + + // Act/Assert + await AssertResult((sbyte input) => input * 2, 2, "2"); + await AssertResult((byte input) => input * 2, 2, "2"); + await AssertResult((short input) => input * 2, 2, "2"); + await AssertResult((ushort input) => input * 2, 2, "2"); + await AssertResult((int input) => input * 2, 2, "2"); + await AssertResult((uint input) => input * 2, (uint)2, "2"); + await AssertResult((long input) => input * 2, (long)2, "2"); + await AssertResult((ulong input) => input * 2, (ulong)2, "2"); + await AssertResult((float input) => input * 2, (float)2, "2"); + await AssertResult((double input) => input * 2, (double)2, "2"); + await AssertResult((int input) => Task.FromResult(input * 2), 2, "2"); + await AssertResult((long input) => Task.FromResult(input * 2), (long)2, "2"); + await AssertResult((int input) => new ValueTask(input * 2), 2, "2"); + await AssertResult((long input) => new ValueTask(input * 2), (long)2, "2"); + await AssertResult((long? input) => input!.Value * 2, (long?)2, "2"); + await AssertResult((TimeSpan input) => TimeSpan.FromTicks(input.Ticks * 2), TimeSpan.FromDays(2), "2.00:00:00"); + await AssertResult((TimeSpan? input) => (int?)null, null, ""); + + arguments[InputParameterName] = "http://example.com/semantic"; + await AssertResult((Uri input) => new Uri(input, "kernel"), new Uri("http://example.com/kernel"), "http://example.com/kernel"); + } + + [Fact] + public async Task ItSupportsArgumentsAsIsWithoutConvertingTheirTypeAsync() + { + //Arrange + async Task AssertParameterType(T expected) + { + var d = (T actual) => + { + //Check the argument is of the expected type + if (actual is not null) + { + Assert.IsType(actual); + } + + //Check the argument value is the expected value + Assert.Equal(expected, actual); + }; + + var arguments = new KernelArguments() { { "actual", (T)expected } }; + + await KernelFunctionFactory.CreateFromMethod(d, functionName: "Test")!.InvokeAsync(this._kernel, arguments); + } + + // Act & Assert + await AssertParameterType(null); + await AssertParameterType("2"); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType(2); + await AssertParameterType('w'); + await AssertParameterType(true); + await AssertParameterType(DateTime.UtcNow); + await AssertParameterType(DateTimeOffset.UtcNow); + await AssertParameterType(TimeSpan.FromMinutes(1)); + await AssertParameterType(Guid.NewGuid()); + await AssertParameterType(ConsoleColor.Blue); + await AssertParameterType(new Uri("https://fake-random-test-host/fake-path")); + await AssertParameterType(new object()); + } + + [Fact] + public async Task ItSupportsArgumentsImplicitConversionAsync() + { + //Arrange + var arguments = new KernelArguments() + { + ["l"] = (int)1, //Passed to parameter of type long + ["i"] = (byte)1, //Passed to parameter of type int + ["d"] = (float)1.0, //Passed to parameter of type double + ["f"] = (uint)1.0, //Passed to parameter of type float + ["g"] = new Guid("35626209-b0ab-458c-bfc4-43e6c7bd13dc"), //Passed to parameter of type string + ["dof"] = DayOfWeek.Thursday //Passed to parameter of type int + }; + + var function = KernelFunctionFactory.CreateFromMethod((long l, int i, double d, float f, string g, int dof) => + { + Assert.Equal(1, l); + Assert.Equal(1, i); + Assert.Equal(1.0, d); + Assert.Equal("35626209-b0ab-458c-bfc4-43e6c7bd13dc", g); + Assert.Equal(4, dof); + }, + functionName: "Test"); + + // Act & Assert + await function.InvokeAsync(this._kernel, arguments); + } + + [Fact] + public async Task ItSupportsParametersWithDefaultValuesAsync() + { + //Arrange + + static void Test(int a, long b = 20, string c = "dv", char d = 'w') + { + Assert.Equal(10, a); + Assert.Equal(20, b); + Assert.Equal("dv", c); + Assert.Equal('w', d); + } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + + await function.InvokeAsync(this._kernel, arguments: new() { { "a", 10 } }); // Passing value for the 'a' parameter only. + } + + [Fact] + public async Task ItShouldMarshalArgumentsOfValueTypeAsync() + { + //Scenario #1 - passing int argument to a method that accepts int + object? actual = null; + var sut = KernelFunctionFactory.CreateFromMethod((int val) => { actual = val; }); + await sut.InvokeAsync(this._kernel, new() { ["val"] = 5 }); + Assert.Equal(5, actual); + + //Scenario #2 - passing null argument to a method that accepts int + sut = KernelFunctionFactory.CreateFromMethod((int val) => { actual = val; }); + await Assert.ThrowsAsync(() => sut.InvokeAsync(this._kernel, new() { ["val"] = null })); + + //Scenario #3 - passing int argument to a method that accepts int? + actual = null; + sut = KernelFunctionFactory.CreateFromMethod((int? val) => { actual = val; }); + await sut.InvokeAsync(this._kernel, new() { ["val"] = 5 }); + Assert.Equal(5, actual); + + //Scenario #4 - passing null argument to a method that accepts int? + actual = new(); + sut = KernelFunctionFactory.CreateFromMethod((int? val) => { actual = val; }); + await sut.InvokeAsync(this._kernel, new() { ["val"] = null }); + Assert.Null(actual); + } + + [Fact] + public async Task ItShouldMarshalArgumentsOfReferenceTypeAsync() + { + //Scenario #1 - passing string argument to a method that accepts string + object? actual = null; + var sut = KernelFunctionFactory.CreateFromMethod((string val) => { actual = val; }); + await sut.InvokeAsync(this._kernel, new() { ["val"] = "5" }); + Assert.Equal("5", actual); + + //Scenario #2 - passing null argument to a method that accepts string + actual = new(); + sut = KernelFunctionFactory.CreateFromMethod((string val) => { actual = val; }); + await sut.InvokeAsync(this._kernel, new() { ["val"] = null }); + Assert.Null(actual); + + //Scenario #3 - passing string argument to a method that accepts string? + actual = null; + sut = KernelFunctionFactory.CreateFromMethod((string? val) => { actual = val; }); + await sut.InvokeAsync(this._kernel, new() { ["val"] = "5" }); + Assert.Equal("5", actual); + + //Scenario #4 - passing null argument to a method that accepts string? + actual = new(); + sut = KernelFunctionFactory.CreateFromMethod((string? val) => { actual = val; }); + await sut.InvokeAsync(this._kernel, new() { ["val"] = null }); + Assert.Null(actual); + } + + [Fact] + public async Task ItUsesContextCultureForParsingFormattingAsync() + { + // Arrange + var arguments = new KernelArguments(); + KernelFunction func = KernelFunctionFactory.CreateFromMethod((double input) => input * 2, functionName: "Test"); + FunctionResult result; + + // Act/Assert + + this._kernel.Culture = new CultureInfo("fr-FR"); + arguments[InputParameterName] = "12,34"; // tries first to parse with the specified culture + result = await func.InvokeAsync(this._kernel, arguments); + Assert.Equal(24.68, result.GetValue()); + Assert.Equal("24,68", result.ToString()); + + this._kernel.Culture = new CultureInfo("fr-FR"); + arguments[InputParameterName] = "12.34"; // falls back to invariant culture + result = await func.InvokeAsync(this._kernel, arguments); + Assert.Equal(24.68, result.GetValue()); + Assert.Equal("24,68", result.ToString()); + + this._kernel.Culture = new CultureInfo("en-US"); + arguments[InputParameterName] = "12.34"; // works with current culture + result = await func.InvokeAsync(this._kernel, arguments); + Assert.Equal(24.68, result.GetValue()); + Assert.Equal("24.68", result.ToString()); + + this._kernel.Culture = new CultureInfo("en-US"); + arguments[InputParameterName] = "12,34"; // not parsable with current or invariant culture + await Assert.ThrowsAsync(() => func.InvokeAsync(this._kernel, arguments)); + } + + [Fact] + public async Task ItThrowsWhenItFailsToConvertAnArgumentAsync() + { + static string Test(Guid g) => g.ToString(); + + var arguments = new KernelArguments(); + arguments["g"] = "7e08cc00-1d71-4558-81ed-69929499dxyz"; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + Assert.NotNull(function); + + var ex = await Assert.ThrowsAsync(() => function.InvokeAsync(this._kernel, arguments)); + + //Assert + AssertExtensions.AssertIsArgumentOutOfRange(ex, "g", (string?)arguments["g"]!); + } + + [Fact] + public void ItExposesMetadataFromDelegate() + { + [Description("Concat information")] + static string Test(Guid id, string name, int old) => $"{id} {name} {old}"; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + + // Assert + Assert.Contains("Test", function.Name, StringComparison.Ordinal); + Assert.Equal("Concat information", function.Description); + Assert.Equal("id", function.Metadata.Parameters[0].Name); + Assert.Equal("name", function.Metadata.Parameters[1].Name); + Assert.Equal("old", function.Metadata.Parameters[2].Name); + } + + [Fact] + public void ItExposesMetadataFromMethodInfo() + { + [Description("Concat information")] + static string Test(Guid id, string name, int old) => $"{id} {name} {old}"; + + // Act + var function = KernelFunctionFactory.CreateFromMethod(Test); + + // Assert + Assert.Contains("Test", function.Name, StringComparison.Ordinal); + Assert.Equal("Concat information", function.Description); + Assert.Equal("id", function.Metadata.Parameters[0].Name); + Assert.Equal("name", function.Metadata.Parameters[1].Name); + Assert.Equal("old", function.Metadata.Parameters[2].Name); + } + + [Fact] + public async Task ItCanReturnBasicTypesAsync() + { + // Arrange + static int TestInt(int number) => number; + static double TestDouble(double number) => number; + static string TestString(string str) => str; + static bool TestBool(bool flag) => flag; + + var function1 = KernelFunctionFactory.CreateFromMethod(TestInt); + var function2 = KernelFunctionFactory.CreateFromMethod(TestDouble); + var function3 = KernelFunctionFactory.CreateFromMethod(TestString); + var function4 = KernelFunctionFactory.CreateFromMethod(TestBool); + + // Act + FunctionResult result1 = await function1.InvokeAsync(this._kernel, new() { ["number"] = "42" }); + FunctionResult result2 = await function2.InvokeAsync(this._kernel, new() { ["number"] = "3.14" }); + FunctionResult result3 = await function3.InvokeAsync(this._kernel, new() { ["str"] = "test-string" }); + FunctionResult result4 = await function4.InvokeAsync(this._kernel, new() { ["flag"] = "true" }); + + // Assert + Assert.Equal(42, result1.GetValue()); + Assert.Equal("42", result1.ToString()); + + Assert.Equal(3.14, result2.GetValue()); + Assert.Equal("3.14", result2.ToString()); + + Assert.Equal("test-string", result3.GetValue()); + Assert.Equal("test-string", result3.ToString()); + + Assert.True(result4.GetValue()); + Assert.Equal("True", result4.ToString()); + } + + [Fact] + public async Task ItCanReturnComplexTypeAsync() + { + // Arrange + static MyCustomType TestCustomType(MyCustomType instance) => instance; + + var arguments = new KernelArguments(); + arguments["instance"] = "42"; + + var function = KernelFunctionFactory.CreateFromMethod(TestCustomType); + + // Act + FunctionResult result = await function.InvokeAsync(this._kernel, arguments); + + var actualInstance = result.GetValue(); + + // Assert + Assert.NotNull(actualInstance); + Assert.Equal(42, result.GetValue()?.Value); + Assert.Equal(42, actualInstance.Value); + } + + [Fact] + public async Task ItCanReturnAsyncEnumerableTypeAsync() + { + // Arrange + static async IAsyncEnumerable TestAsyncEnumerableTypeAsync() + { + yield return 1; + + await Task.Delay(50); + + yield return 2; + + await Task.Delay(50); + + yield return 3; + } + + var function = KernelFunctionFactory.CreateFromMethod(TestAsyncEnumerableTypeAsync); + + // Act + FunctionResult result = await function.InvokeAsync(this._kernel, new KernelArguments()); + + // Assert + Assert.NotNull(result); + + var asyncEnumerableResult = result.GetValue>(); + + Assert.NotNull(asyncEnumerableResult); + + var assertResult = new List(); + + await foreach (var value in asyncEnumerableResult) + { + assertResult.Add(value); + } + + Assert.True(assertResult.SequenceEqual(new List { 1, 2, 3 })); + } + + [Fact] + public async Task ItCanStreamAsyncEnumerableTypeAsync() + { + // Arrange + bool invoked = false; + async IAsyncEnumerable TestAsyncEnumerableTypeAsync() + { + invoked = true; + for (int i = 0; i < 10; i++) + { + await Task.Yield(); + yield return i; + } + } + KernelFunction function = KernelFunctionFactory.CreateFromMethod(TestAsyncEnumerableTypeAsync); + + // Act / Assert + IAsyncEnumerable enumerable = function.InvokeStreamingAsync(this._kernel); + Assert.False(invoked); + IAsyncEnumerator enumerator = enumerable.GetAsyncEnumerator(); + Assert.NotNull(enumerator); + Assert.False(invoked); + Assert.True(await enumerator.MoveNextAsync()); + Assert.True(invoked); + Assert.Equal(0, enumerator.Current); + for (int i = 1; i < 10; i++) + { + Assert.True(await enumerator.MoveNextAsync()); + Assert.Equal(i, enumerator.Current); + } + Assert.False(await enumerator.MoveNextAsync()); + await enumerator.DisposeAsync(); + } + + [Fact] + public async Task ItCanStreamAsyncEnumerablePassthroughAsync() + { + // Arrange + bool invoked = false; + async IAsyncEnumerable TestAsyncEnumerableTypeAsync() + { + invoked = true; + for (int i = 0; i < 10; i++) + { + await Task.Yield(); + yield return new StreamingMethodContent(i); + } + } + KernelFunction function = KernelFunctionFactory.CreateFromMethod(TestAsyncEnumerableTypeAsync); + + // Act / Assert + IAsyncEnumerable enumerable = function.InvokeStreamingAsync(this._kernel); + Assert.False(invoked); + IAsyncEnumerator enumerator = enumerable.GetAsyncEnumerator(); + Assert.NotNull(enumerator); + Assert.False(invoked); + Assert.True(await enumerator.MoveNextAsync()); + Assert.True(invoked); + Assert.Equal(0, enumerator.Current.Content); + for (int i = 1; i < 10; i++) + { + Assert.True(await enumerator.MoveNextAsync()); + Assert.Equal(i, enumerator.Current.Content); + } + Assert.False(await enumerator.MoveNextAsync()); + await enumerator.DisposeAsync(); + } + + [Fact] + public async Task ItPropagatesOriginalExceptionTypeAsync() + { + // Arrange + var arguments = new KernelArguments(); + Exception expected = new FormatException("expected"); + KernelFunction func = KernelFunctionFactory.CreateFromMethod(() => { throw expected; }); + + // Act + Exception actual = await Record.ExceptionAsync(() => func.InvokeAsync(this._kernel, arguments)); + + // Assert + Assert.Same(expected, actual); + } + + [Fact] + public async Task ItCanDeserializeJsonDocumentAsync() + { + // Arrange + var document = JsonDocument.Parse(@"{""id"":28}"); + CustomTypeForJsonTests? actualArgValue = null; + + var func = KernelFunctionFactory.CreateFromMethod((CustomTypeForJsonTests param) => { actualArgValue = param; }); + + // Act + var res = await func.InvokeAsync(this._kernel, new() { ["param"] = document }); + + // Assert + Assert.NotNull(actualArgValue); + Assert.Equal(28, actualArgValue.Id); + } + + [Fact] + public async Task ItCanDeserializeJsonElementAsync() + { + // Arrange + var element = JsonDocument.Parse(@"{""id"":28}").RootElement; + CustomTypeForJsonTests? actualArgValue = null; + + var func = KernelFunctionFactory.CreateFromMethod((CustomTypeForJsonTests param) => { actualArgValue = param; }); + + // Act + var res = await func.InvokeAsync(this._kernel, new() { ["param"] = element }); + + // Assert + Assert.NotNull(actualArgValue); + Assert.Equal(28, actualArgValue.Id); + } + + [Fact] + public async Task ItCanDeserializeJsonNodeAsync() + { + // Arrange + var node = JsonNode.Parse(@"{""id"":28}"); + CustomTypeForJsonTests? actualArgValue = null; + + var func = KernelFunctionFactory.CreateFromMethod((CustomTypeForJsonTests param) => { actualArgValue = param; }); + + // Act + var res = await func.InvokeAsync(this._kernel, new() { ["param"] = node }); + + // Assert + Assert.NotNull(actualArgValue); + Assert.Equal(28, actualArgValue.Id); + } + + [Fact] + public async Task ItShouldNotDeserializeIfParameterTypeAndArgumentTypeAreSameAsync() + { + // Arrange + var node = JsonNode.Parse(@"{""id"":28}"); + JsonNode? actualArgValue = null; + + var func = KernelFunctionFactory.CreateFromMethod((JsonNode? param) => { actualArgValue = param; }); + + // Act + var res = await func.InvokeAsync(this._kernel, new() { ["param"] = node }); + + // Assert + Assert.NotNull(actualArgValue); + Assert.Same(node, actualArgValue); + } + + [Fact] + public async Task ItCanDeserializeJsonStringAsync() + { + // Arrange + var jsonString = @"{""id"":28}"; + CustomTypeForJsonTests? actualArgValue = null; + + var func = KernelFunctionFactory.CreateFromMethod((CustomTypeForJsonTests param) => { actualArgValue = param; }); + + // Act + var res = await func.InvokeAsync(this._kernel, new() { ["param"] = jsonString }); + + // Assert + Assert.NotNull(actualArgValue); + Assert.Equal(28, actualArgValue.Id); + } + + [Fact] + public async Task ItCanDeserializeThirdPartyJsonPrimitivesAsync() + { + // Arrange + var thirdPartyJsonPrimitive = new ThirdPartyJsonPrimitive(@"{""id"":28}"); + CustomTypeForJsonTests? actualArgValue = null; + + var func = KernelFunctionFactory.CreateFromMethod((CustomTypeForJsonTests param) => { actualArgValue = param; }); + + // Act + var res = await func.InvokeAsync(this._kernel, new() { ["param"] = thirdPartyJsonPrimitive }); + + // Assert + Assert.NotNull(actualArgValue); + Assert.Equal(28, actualArgValue.Id); + } + +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class CustomTypeForJsonTests +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [JsonPropertyName("id")] + public int Id { get; set; } + } + + private sealed class ThirdPartyJsonPrimitive + { + private readonly string _jsonToReturn; + + public ThirdPartyJsonPrimitive(string jsonToReturn) + { + this._jsonToReturn = jsonToReturn; + } + + public override string ToString() + { + return this._jsonToReturn; + } + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromMethodTests2.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromMethodTests2.cs new file mode 100644 index 000000000000..7705646ca842 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromMethodTests2.cs @@ -0,0 +1,368 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Globalization; +using System.Linq; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public sealed class KernelFunctionFromMethodTests2 +{ + private static readonly KernelFunction s_nopFunction = KernelFunctionFactory.CreateFromMethod(() => { }); + + [Fact] + public void ItDoesntThrowForValidFunctionsViaDelegate() + { + // Arrange + var pluginInstance = new LocalExamplePlugin(); + MethodInfo[] methods = pluginInstance.GetType() + .GetMethods(BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.InvokeMethod) + .Where(m => m.Name is not "GetType" and not "Equals" and not "GetHashCode" and not "ToString") + .ToArray(); + + KernelFunction[] functions = (from method in methods select KernelFunctionFactory.CreateFromMethod(method, pluginInstance, "plugin")).ToArray(); + + // Act + Assert.Equal(methods.Length, functions.Length); + Assert.All(functions, Assert.NotNull); + } + + [Fact] + public void ItDoesNotThrowForValidFunctionsViaPlugin() + { + // Arrange + var pluginInstance = new LocalExamplePlugin(); + MethodInfo[] methods = pluginInstance.GetType() + .GetMethods(BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.InvokeMethod) + .Where(m => m.Name is not "GetType" and not "Equals" and not "GetHashCode" and not "ToString") + .ToArray(); + + KernelFunction[] functions = KernelPluginFactory.CreateFromObject(pluginInstance).ToArray(); + + // Act + Assert.Equal(methods.Length, functions.Length); + Assert.All(functions, f => Assert.NotNull(f)); + } + + [Fact] + public async Task ItCanImportMethodFunctionsAsync() + { + // Arrange + var canary = false; + + // Note: the function doesn't have any SK attributes + async Task ExecuteAsync(string done) + { + Assert.Equal("NO", done); + canary = true; + await Task.Delay(0); + } + + // Act + KernelFunction function = KernelFunctionFactory.CreateFromMethod( + method: ExecuteAsync, + parameters: null, + description: "description", + functionName: "functionName"); + + FunctionResult result = await function.InvokeAsync(new(), new KernelArguments + { + ["done"] = "NO" + }); + + // Assert + Assert.True(canary); + Assert.Null(result.GetValue()); + Assert.Empty(result.ToString()); + } + + [Fact] + public async Task ItCanImportMethodFunctionsWithExternalReferencesAsync() + { + // Arrange + var arguments = new KernelArguments(); + arguments["done"] = "NO"; + + // Note: This is an important edge case that affects the function signature and how delegates + // are handled internally: the function references an external variable and cannot be static. + // This scenario is used for gRPC functions. + string variableOutsideTheFunction = "foo"; + + async Task ExecuteAsync(string done) + { + string referenceToExternalVariable = variableOutsideTheFunction; + await Task.Delay(0); + return referenceToExternalVariable; + } + + // Act. Note: this will throw an exception if the KernelFunction doesn't handle the function type. + KernelFunction function = KernelFunctionFactory.CreateFromMethod( + method: ExecuteAsync, + description: "description", + functionName: "functionName"); + + FunctionResult result = await function.InvokeAsync(new(), arguments); + + // Assert + Assert.Equal(variableOutsideTheFunction, result.GetValue()); + Assert.Equal(variableOutsideTheFunction, result.ToString()); + } + + [Fact] + public async Task ItFlowsSpecialArgumentsIntoFunctionsAsync() + { + KernelBuilder builder = new(); + builder.Services.AddLogging(c => c.SetMinimumLevel(LogLevel.Warning)); + Kernel kernel = builder.Build(); + kernel.Culture = new CultureInfo("fr-FR"); + KernelArguments args = new(); + using CancellationTokenSource cts = new(); + + bool invoked = false; + KernelFunction func = null!; + func = KernelFunctionFactory.CreateFromMethod( + (Kernel kernelArg, KernelFunction funcArg, KernelArguments argsArg, ILoggerFactory loggerFactoryArg, + ILogger loggerArg, IAIServiceSelector serviceSelectorArg, CultureInfo cultureArg, CancellationToken cancellationToken) => + { + Assert.Same(kernel, kernelArg); + Assert.Same(func, funcArg); + Assert.Same(args, argsArg); + Assert.Same(kernel.LoggerFactory, loggerFactoryArg); + Assert.NotNull(loggerArg); + Assert.Same(kernel.ServiceSelector, serviceSelectorArg); + Assert.Same(kernel.Culture, cultureArg); + Assert.Equal(cts.Token, cancellationToken); + invoked = true; + }); + + await func.InvokeAsync(kernel, args, cts.Token); + + Assert.True(invoked); + } + + [Fact] + public async Task ItInjectsServicesFromDIIntoFunctionsAsync() + { + var serviceA = new ExampleService(); + var serviceB = new ExampleService(); + var serviceC = new ExampleService(); + + KernelBuilder builder = new(); + builder.Services.AddKeyedSingleton("something", serviceA); + builder.Services.AddSingleton(serviceB); + builder.Services.AddKeyedSingleton("somethingelse", serviceC); + Kernel kernel = builder.Build(); + + bool invoked = false; + KernelFunction func = KernelFunctionFactory.CreateFromMethod( + ([FromKernelServices] IExampleService service1Arg, + [FromKernelServices("something")] IExampleService service2Arg, + [FromKernelServices("somethingelse")] IExampleService service3Arg, + [FromKernelServices] IExampleService service4Arg, + [FromKernelServices("doesntexist")] IExampleService? service5Arg = null) => + { + Assert.Same(serviceB, service1Arg); + Assert.Same(serviceA, service2Arg); + Assert.Same(serviceC, service3Arg); + Assert.Same(serviceB, service4Arg); + Assert.Null(service5Arg); + invoked = true; + }); + + await func.InvokeAsync(kernel); + + Assert.True(invoked); + + Assert.DoesNotContain(func.Metadata.Parameters, p => p.Name.Contains("service", StringComparison.Ordinal)); + } + + [Fact] + public async Task ItThrowsForMissingServicesWithoutDefaultsAsync() + { + Kernel kernel = new(); + KernelFunction func; + + func = KernelFunctionFactory.CreateFromMethod(([FromKernelServices] IExampleService service) => { }); + await Assert.ThrowsAsync(() => func.InvokeAsync(kernel)); + + func = KernelFunctionFactory.CreateFromMethod(([FromKernelServices] IExampleService? service) => { }); + await Assert.ThrowsAsync(() => func.InvokeAsync(kernel)); + + func = KernelFunctionFactory.CreateFromMethod(([FromKernelServices("name")] IExampleService? service) => { }); + await Assert.ThrowsAsync(() => func.InvokeAsync(kernel)); + } + + private interface IExampleService + { + } + + private sealed class ExampleService : IExampleService + { + } + + private sealed class LocalExamplePlugin + { + [KernelFunction] + public void Type01() + { + } + + [KernelFunction] + public string Type02() + { + return ""; + } + + [KernelFunction] + public string? Type02Nullable() + { + return null; + } + + [KernelFunction] + public async Task Type03Async() + { + await Task.Delay(0); + return ""; + } + + [KernelFunction] + public async Task Type03NullableAsync() + { + await Task.Delay(0); + return null; + } + + [KernelFunction] + public void Type04(string input) + { + } + + [KernelFunction] + public void Type04Nullable(string? input) + { + } + + [KernelFunction] + public string Type05(string input) + { + return ""; + } + + [KernelFunction] + public string? Type05Nullable(string? input = null) + { + return ""; + } + + [KernelFunction] + public async Task Type06Async(string input) + { + await Task.Delay(0); + return ""; + } + + [KernelFunction] + public async Task Type06NullableAsync(string? input) + { + await Task.Delay(0); + return ""; + } + + [KernelFunction] + public async Task Type07Async(string input) + { + await Task.Delay(0); + } + + [KernelFunction] + public async Task Type08Async() + { + await Task.Delay(0); + } + + [KernelFunction] + public async ValueTask ReturnsValueTaskAsync() + { + await Task.Delay(0); + } + + [KernelFunction] + public async ValueTask ReturnsValueTaskStringAsync() + { + await Task.Delay(0); + return "hello world"; + } + + [KernelFunction] + public FunctionResult ReturnsFunctionResult() + { + return new FunctionResult(s_nopFunction, "fake-result", CultureInfo.InvariantCulture); + } + + [KernelFunction] + public async Task ReturnsTaskFunctionResultAsync() + { + await Task.Delay(0); + return new FunctionResult(s_nopFunction, "fake-result", CultureInfo.InvariantCulture); + } + + [KernelFunction] + public async ValueTask ReturnsValueTaskFunctionResultAsync() + { + await Task.Delay(0); + return new FunctionResult(s_nopFunction, "fake-result", CultureInfo.InvariantCulture); + } + + [KernelFunction] + public string WithPrimitives( + byte a1, + byte? b1, + sbyte c1, + sbyte? d1, + short e1, + short? f1, + ushort g1, + ushort? h1, + int i1, + int? j1, + uint k1, + uint? l1, + long m1, + long? n1, + ulong o1, + ulong? p1, + float q1, + float? r1, + double s1, + double? t1, + decimal u1, + decimal? v1, + char w1, + char? x1, + bool y1, + bool? z1, + DateTime a2, + DateTime? b2, + DateTimeOffset c2, + DateTimeOffset? d2, + TimeSpan e2, + TimeSpan? f2, + Guid g2, + Guid? h2, + DayOfWeek i2, + DayOfWeek? j2, + Uri k2, + string l2) + { + return string.Empty; + } + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs new file mode 100644 index 000000000000..3f977d788c15 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs @@ -0,0 +1,704 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using Moq; +using Xunit; + +// ReSharper disable StringLiteralTypo + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelFunctionFromPromptTests +{ + [Fact] + public void ItAddsMissingVariablesForPrompt() + { + // Arrange & Act + var function = KernelFunctionFromPrompt.Create("This {{$x11}} {{$a}}{{$missing}} test template {{p.bar $b}} and {{p.foo c='literal \"c\"' d = $d}} and {{p.baz ename=$e}}"); + + // Assert + Assert.NotNull(function); + Assert.NotNull(function.Metadata); + Assert.NotNull(function.Metadata.Parameters); + Assert.Equal(6, function.Metadata.Parameters.Count); + Assert.Equal("x11", function.Metadata.Parameters[0].Name); + Assert.Equal("a", function.Metadata.Parameters[1].Name); + Assert.Equal("missing", function.Metadata.Parameters[2].Name); + Assert.Equal("b", function.Metadata.Parameters[3].Name); + Assert.Equal("d", function.Metadata.Parameters[4].Name); + Assert.Equal("e", function.Metadata.Parameters[5].Name); + } + + [Fact] + public void ItProvidesAccessToFunctionsViaFunctionCollection() + { + // Arrange + var factory = new Mock>(); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(factory.Object); + Kernel kernel = builder.Build(); + + kernel.ImportPluginFromFunctions("jk", functions: new[] { kernel.CreateFunctionFromPrompt(promptTemplate: "Tell me a joke", functionName: "joker", description: "Nice fun") }); + + // Act & Assert - 3 functions, var name is not case sensitive + Assert.True(kernel.Plugins.TryGetFunction("jk", "joker", out _)); + Assert.True(kernel.Plugins.TryGetFunction("JK", "JOKER", out _)); + } + + [Theory] + [InlineData(null, "Assistant is a large language model.")] + [InlineData("My Chat Prompt", "My Chat Prompt")] + public async Task ItUsesChatSystemPromptWhenProvidedAsync(string? providedSystemChatPrompt, string expectedSystemChatPrompt) + { + // Arrange + var mockTextGeneration = new Mock(); + var fakeTextContent = new TextContent("llmResult"); + + mockTextGeneration.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("x", mockTextGeneration.Object); + Kernel kernel = builder.Build(); + + var promptConfig = new PromptTemplateConfig(); + promptConfig.Template = "template"; + var openAIExecutionSettings = providedSystemChatPrompt is null + ? new OpenAIPromptExecutionSettings() + : new OpenAIPromptExecutionSettings + { + ChatSystemPrompt = providedSystemChatPrompt + }; + + promptConfig.AddExecutionSettings(openAIExecutionSettings); + var func = kernel.CreateFunctionFromPrompt(promptConfig); + + // Act + await kernel.InvokeAsync(func); + + // Assert + mockTextGeneration.Verify(a => a.GetTextContentsAsync("template", It.Is(c => c.ChatSystemPrompt == expectedSystemChatPrompt), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task ItUsesServiceIdWhenProvidedAsync() + { + // Arrange + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); + var fakeTextContent = new TextContent("llmResult"); + + mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + mockTextGeneration2.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + Kernel kernel = builder.Build(); + + var promptConfig = new PromptTemplateConfig(); + promptConfig.Template = "template"; + promptConfig.AddExecutionSettings(new PromptExecutionSettings(), "service1"); + var func = kernel.CreateFunctionFromPrompt(promptConfig); + + // Act + await kernel.InvokeAsync(func); + + // Assert + mockTextGeneration1.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + mockTextGeneration2.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); + } + + [Fact] + public async Task ItFailsIfInvalidServiceIdIsProvidedAsync() + { + // Arrange + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + Kernel kernel = builder.Build(); + + var promptConfig = new PromptTemplateConfig(); + promptConfig.Template = "template"; + promptConfig.AddExecutionSettings(new PromptExecutionSettings(), "service3"); + var func = kernel.CreateFunctionFromPrompt(promptConfig); + + // Act + var exception = await Assert.ThrowsAsync(() => kernel.InvokeAsync(func)); + + // Assert + Assert.Equal("Required service of type Microsoft.SemanticKernel.TextGeneration.ITextGenerationService not registered. Expected serviceIds: service3.", exception.Message); + } + + [Fact] + public async Task ItParsesStandardizedPromptWhenServiceIsChatCompletionAsync() + { + var fakeService = new FakeChatAsTextService(); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => fakeService); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt(@" + You are a helpful assistant. + How many 20 cents can I get from 1 dollar? + "); + + // Act + Assert + await kernel.InvokeAsync(function); + + Assert.NotNull(fakeService.ChatHistory); + Assert.Equal(2, fakeService.ChatHistory.Count); + Assert.Equal("You are a helpful assistant.", fakeService.ChatHistory[0].Content); + Assert.Equal("How many 20 cents can I get from 1 dollar?", fakeService.ChatHistory[1].Content); + } + + [Fact] + public async Task ItParsesStandardizedPromptWhenServiceIsStreamingChatCompletionAsync() + { + var fakeService = new FakeChatAsTextService(); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => fakeService); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt(@" + You are a helpful assistant. + How many 20 cents can I get from 1 dollar? + "); + + // Act + Assert + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + } + + Assert.NotNull(fakeService.ChatHistory); + Assert.Equal(2, fakeService.ChatHistory.Count); + Assert.Equal("You are a helpful assistant.", fakeService.ChatHistory[0].Content); + Assert.Equal("How many 20 cents can I get from 1 dollar?", fakeService.ChatHistory[1].Content); + } + + [Fact] + public async Task ItNotParsesStandardizedPromptWhenServiceIsOnlyTextCompletionAsync() + { + var mockService = new Mock(); + var mockResult = mockService.Setup(s => s.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(new List() { new("something") }); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => mockService.Object); + Kernel kernel = builder.Build(); + + var inputPrompt = @" + You are a helpful assistant. + How many 20 cents can I get from 1 dollar? + "; + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt(inputPrompt); + + // Act + Assert + mockResult.Callback((string prompt, PromptExecutionSettings _, Kernel _, CancellationToken _) => + { + Assert.NotNull(prompt); + Assert.Equal(inputPrompt, prompt); + }); + + await kernel.InvokeAsync(function); + } + + [Fact] + public async Task ItNotParsesStandardizedPromptWhenStreamingWhenServiceIsOnlyTextCompletionAsync() + { + var mockService = new Mock(); + var mockResult = mockService.Setup(s => s.GetStreamingTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns((new List() { new("something") }).ToAsyncEnumerable()); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => mockService.Object); + Kernel kernel = builder.Build(); + + var inputPrompt = @" + You are a helpful assistant. + How many 20 cents can I get from 1 dollar? + "; + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt(inputPrompt); + + // Act + Assert + mockResult.Callback((string prompt, PromptExecutionSettings _, Kernel _, CancellationToken _) => + { + Assert.NotNull(prompt); + Assert.Equal(inputPrompt, prompt); + }); + + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + } + } + + [Fact] + public async Task InvokeAsyncReturnsTheConnectorResultWhenInServiceIsOnlyTextCompletionAsync() + { + var mockService = new Mock(); + var mockResult = mockService.Setup(s => s.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(new List() { new("something") }); + + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => mockService.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything"); + + var result = await kernel.InvokeAsync(function); + + Assert.Equal("something", result.GetValue()); + Assert.Equal("something", result.GetValue()!.Text); + Assert.Equal("something", result.GetValue()!.ToString()); + } + + [Fact] + public async Task InvokeAsyncReturnsTheConnectorChatResultWhenInServiceIsOnlyChatCompletionAsync() + { + var mockService = new Mock(); + var mockResult = mockService.Setup(s => s.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(new List() { new(AuthorRole.User, "something") }); + + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => mockService.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything"); + + var result = await kernel.InvokeAsync(function); + + Assert.Equal("something", result.GetValue()); + Assert.Equal("something", result.GetValue()!.Content); + Assert.Equal(AuthorRole.User, result.GetValue()!.Role); + Assert.Equal("something", result.GetValue()!.ToString()); + } + + [Fact] + public async Task InvokeAsyncReturnsTheConnectorChatResultWhenInServiceIsChatAndTextCompletionAsync() + { + var fakeService = new FakeChatAsTextService(); + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => fakeService); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything"); + + var result = await kernel.InvokeAsync(function); + + Assert.Equal("Something", result.GetValue()); + Assert.Equal("Something", result.GetValue()!.Content); + Assert.Equal(AuthorRole.Assistant, result.GetValue()!.Role); + Assert.Equal("Something", result.GetValue()!.ToString()); + } + + [Fact] + public async Task InvokeAsyncOfTGivesBackTheExpectedResultTypeFromTheConnectorWhenStreamingWhenServiceIsOnlyTextCompletionAsync() + { + var expectedContent = new StreamingTextContent("something"); + var mockService = new Mock(); + var mockResult = mockService.Setup(s => s.GetStreamingTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns((new List() { expectedContent }).ToAsyncEnumerable()); + + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => mockService.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything"); + + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + Assert.Equal(expectedContent, chunk); + } + + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + Assert.Equal(expectedContent, chunk); + } + } + + [Fact] + public async Task InvokeAsyncOfTGivesBackTheExpectedResultTypeFromTheConnectorWhenStreamingWhenerviceIsOnlyChatCompletionAsync() + { + var expectedContent = new StreamingChatMessageContent(AuthorRole.Assistant, "Something"); + var mockService = new Mock(); + var mockResult = mockService.Setup(s => s.GetStreamingChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns((new List() { expectedContent }).ToAsyncEnumerable()); + + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => mockService.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything"); + + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + Assert.Equal(expectedContent, chunk); + Assert.Equal("Something", chunk.ToString()); + } + + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + Assert.Equal(expectedContent, chunk); + Assert.Equal("Something", chunk.Content); + Assert.Equal(AuthorRole.Assistant, chunk.Role); + } + } + + [Fact] + public async Task InvokeAsyncOfTGivesBackTheExpectedResultTypeFromTheConnectorWhenStreamingWhenServiceIsTextAndChatCompletionAsync() + { + var fakeService = new FakeChatAsTextService(); + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => fakeService); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything"); + + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + Assert.Equal("Something", chunk.ToString()); + } + + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + Assert.Equal(AuthorRole.Assistant, chunk.Role); + Assert.Equal("Something", chunk.Content); + } + } + + [Fact] + public async Task InvokeAsyncUsesPromptExecutionSettingsAsync() + { + // Arrange + var mockTextContent = new TextContent("Result"); + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent }); + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => mockTextCompletion.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything", new OpenAIPromptExecutionSettings { MaxTokens = 1000 }); + + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal("Result", result.GetValue()); + mockTextCompletion.Verify(m => m.GetTextContentsAsync("Anything", It.Is(settings => settings.MaxTokens == 1000), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task InvokeAsyncUsesKernelArgumentsExecutionSettingsAsync() + { + // Arrange + var mockTextContent = new TextContent("Result"); + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent }); + KernelBuilder builder = new(); + builder.Services.AddTransient((sp) => mockTextCompletion.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything", new OpenAIPromptExecutionSettings { MaxTokens = 1000 }); + + // Act + var result = await kernel.InvokeAsync(function, new KernelArguments(new OpenAIPromptExecutionSettings { MaxTokens = 2000 })); + + // Assert + Assert.Equal("Result", result.GetValue()); + mockTextCompletion.Verify(m => m.GetTextContentsAsync("Anything", It.Is(settings => settings.MaxTokens == 2000), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task InvokeAsyncWithServiceIdUsesKernelArgumentsExecutionSettingsAsync() + { + // Arrange + var mockTextContent = new TextContent("Result"); + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent }); + KernelBuilder builder = new(); + builder.Services.AddKeyedSingleton("service1", mockTextCompletion.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Anything", new OpenAIPromptExecutionSettings { MaxTokens = 1000 }); + + // Act + var result = await kernel.InvokeAsync(function, new KernelArguments(new OpenAIPromptExecutionSettings { MaxTokens = 2000 })); + + // Assert + Assert.Equal("Result", result.GetValue()); + mockTextCompletion.Verify(m => m.GetTextContentsAsync("Anything", It.Is(settings => settings.MaxTokens == 2000), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task InvokeAsyncWithMultipleServicesUsesKernelArgumentsExecutionSettingsAsync() + { + // Arrange + var mockTextContent1 = new TextContent("Result1"); + var mockTextCompletion1 = new Mock(); + mockTextCompletion1.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent1 }); + var mockTextContent2 = new TextContent("Result2"); + var mockTextCompletion2 = new Mock(); + mockTextCompletion2.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent2 }); + + KernelBuilder builder = new(); + builder.Services.AddKeyedSingleton("service1", mockTextCompletion1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextCompletion2.Object); + Kernel kernel = builder.Build(); + + KernelFunction function1 = KernelFunctionFactory.CreateFromPrompt(new PromptTemplateConfig { Template = "Prompt1", ExecutionSettings = new() { ["service1"] = new OpenAIPromptExecutionSettings { MaxTokens = 1000 } } }); + KernelFunction function2 = KernelFunctionFactory.CreateFromPrompt(new PromptTemplateConfig { Template = "Prompt2", ExecutionSettings = new() { ["service2"] = new OpenAIPromptExecutionSettings { MaxTokens = 2000 } } }); + + // Act + var result1 = await kernel.InvokeAsync(function1); + var result2 = await kernel.InvokeAsync(function2); + + // Assert + Assert.Equal("Result1", result1.GetValue()); + mockTextCompletion1.Verify(m => m.GetTextContentsAsync("Prompt1", It.Is(settings => settings.MaxTokens == 1000), It.IsAny(), It.IsAny()), Times.Once()); + Assert.Equal("Result2", result2.GetValue()); + mockTextCompletion2.Verify(m => m.GetTextContentsAsync("Prompt2", It.Is(settings => settings.MaxTokens == 2000), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task InvokeAsyncWithMultipleServicesUsesServiceFromKernelArgumentsExecutionSettingsAsync() + { + // Arrange + var mockTextContent1 = new TextContent("Result1"); + var mockTextCompletion1 = new Mock(); + mockTextCompletion1.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent1 }); + var mockTextContent2 = new TextContent("Result2"); + var mockTextCompletion2 = new Mock(); + mockTextCompletion2.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent2 }); + + KernelBuilder builder = new(); + builder.Services.AddKeyedSingleton("service1", mockTextCompletion1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextCompletion2.Object); + Kernel kernel = builder.Build(); + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + + // Act + KernelArguments arguments1 = new(); + arguments1.ExecutionSettings = new Dictionary() + { + { "service1", new OpenAIPromptExecutionSettings { MaxTokens = 1000 } } + }; + var result1 = await kernel.InvokeAsync(function, arguments1); + + KernelArguments arguments2 = new(); + arguments2.ExecutionSettings = new Dictionary() + { + { "service2", new OpenAIPromptExecutionSettings { MaxTokens = 2000 } } + }; + var result2 = await kernel.InvokeAsync(function, arguments2); + + // Assert + Assert.Equal("Result1", result1.GetValue()); + mockTextCompletion1.Verify(m => m.GetTextContentsAsync("Prompt", It.Is(settings => settings.MaxTokens == 1000), It.IsAny(), It.IsAny()), Times.Once()); + Assert.Equal("Result2", result2.GetValue()); + mockTextCompletion2.Verify(m => m.GetTextContentsAsync("Prompt", It.Is(settings => settings.MaxTokens == 2000), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task InvokeAsyncWithMultipleServicesUsesKernelArgumentsExecutionSettingsOverrideAsync() + { + // Arrange + var mockTextContent1 = new TextContent("Result1"); + var mockTextCompletion1 = new Mock(); + mockTextCompletion1.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent1 }); + var mockTextContent2 = new TextContent("Result2"); + var mockTextCompletion2 = new Mock(); + mockTextCompletion2.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent2 }); + + KernelBuilder builder = new(); + builder.Services.AddKeyedSingleton("service1", mockTextCompletion1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextCompletion2.Object); + Kernel kernel = builder.Build(); + + KernelFunction function1 = KernelFunctionFactory.CreateFromPrompt(new PromptTemplateConfig { Template = "Prompt1", ExecutionSettings = new() { ["service1"] = new OpenAIPromptExecutionSettings { MaxTokens = 1000 } } }); + KernelFunction function2 = KernelFunctionFactory.CreateFromPrompt(new PromptTemplateConfig { Template = "Prompt2", ExecutionSettings = new() { ["service2"] = new OpenAIPromptExecutionSettings { MaxTokens = 2000 } } }); + + // Act + KernelArguments arguments1 = new(); + arguments1.ExecutionSettings = new Dictionary() + { + { "service2", new OpenAIPromptExecutionSettings { MaxTokens = 2000 } } + }; + var result1 = await kernel.InvokeAsync(function1, arguments1); + + KernelArguments arguments2 = new(); + arguments2.ExecutionSettings = new Dictionary() + { + { "service1", new OpenAIPromptExecutionSettings { MaxTokens = 1000 } } + }; + var result2 = await kernel.InvokeAsync(function2, arguments2); + + // Assert + Assert.Equal("Result2", result1.GetValue()); + mockTextCompletion2.Verify(m => m.GetTextContentsAsync("Prompt1", It.Is(settings => settings.MaxTokens == 2000), It.IsAny(), It.IsAny()), Times.Once()); + Assert.Equal("Result1", result2.GetValue()); + mockTextCompletion1.Verify(m => m.GetTextContentsAsync("Prompt2", It.Is(settings => settings.MaxTokens == 1000), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task InvokeAsyncWithNestedPromptsSelectsCorrectServiceAsync() + { + // Arrange + var mockTextContent1 = new TextContent("Result1"); + var mockTextCompletion1 = new Mock(); + mockTextCompletion1.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent1 }); + var mockTextContent2 = new TextContent("Result2"); + var mockTextCompletion2 = new Mock(); + mockTextCompletion2.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent2 }); + + KernelBuilder builder = new(); + builder.Services.AddKeyedSingleton("service1", mockTextCompletion1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextCompletion2.Object); + Kernel kernel = builder.Build(); + + KernelFunction function1 = KernelFunctionFactory.CreateFromPrompt(new PromptTemplateConfig { Name = "Prompt1", Template = "Prompt1", ExecutionSettings = new() { ["service1"] = new OpenAIPromptExecutionSettings { MaxTokens = 1000 } } }); + KernelFunction function2 = KernelFunctionFactory.CreateFromPrompt(new PromptTemplateConfig { Name = "Prompt2", Template = "Prompt2 {{MyPrompts.Prompt1}}", ExecutionSettings = new() { ["service2"] = new OpenAIPromptExecutionSettings { MaxTokens = 2000 } } }); + + kernel.ImportPluginFromFunctions("MyPrompts", new[] { function1, function2 }); + + // Act + var result = await kernel.InvokeAsync(function2); + + // Assert + Assert.Equal("Result2", result.GetValue()); + mockTextCompletion1.Verify(m => m.GetTextContentsAsync("Prompt1", It.Is(settings => settings.MaxTokens == 1000), It.IsAny(), It.IsAny()), Times.Once()); + mockTextCompletion2.Verify(m => m.GetTextContentsAsync("Prompt2 Result1", It.Is(settings => settings.MaxTokens == 2000), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Fact] + public async Task InvokeAsyncWithPromptRenderedHooksExecutesModifiedPromptAsync() + { + // Arrange + var mockTextContent = new TextContent("Result"); + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent }); + +#pragma warning disable CS0618 // Events are deprecated + void MyRenderedHandler(object? sender, PromptRenderedEventArgs e) + { + e.RenderedPrompt += " USE SHORT, CLEAR, COMPLETE SENTENCES."; + } + + KernelBuilder builder = new(); + builder.Services.AddKeyedSingleton("service", mockTextCompletion.Object); + Kernel kernel = builder.Build(); + kernel.PromptRendered += MyRenderedHandler; +#pragma warning restore CS0618 // Events are deprecated + + KernelFunction function = KernelFunctionFactory.CreateFromPrompt("Prompt"); + + // Act + var result1 = await kernel.InvokeAsync(function); + + // Assert + mockTextCompletion.Verify(m => m.GetTextContentsAsync("Prompt USE SHORT, CLEAR, COMPLETE SENTENCES.", It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + } + + [Theory] + [InlineData(KernelInvocationType.InvokePrompt)] + [InlineData(KernelInvocationType.InvokePromptStreaming)] + [InlineData(KernelInvocationType.InvokeFunction)] + [InlineData(KernelInvocationType.InvokeFunctionStreaming)] + public async Task ItUsesPromptAsUserMessageAsync(KernelInvocationType invocationType) + { + // Arrange + const string Prompt = "Test prompt as user message"; + + var fakeService = new FakeChatAsTextService(); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => fakeService); + Kernel kernel = builder.Build(); + + var function = KernelFunctionFactory.CreateFromPrompt(Prompt); + + // Act + switch (invocationType) + { + case KernelInvocationType.InvokePrompt: + await kernel.InvokePromptAsync(Prompt); + break; + case KernelInvocationType.InvokePromptStreaming: + await foreach (var result in kernel.InvokePromptStreamingAsync(Prompt)) { } + break; + case KernelInvocationType.InvokeFunction: + await kernel.InvokeAsync(function); + break; + case KernelInvocationType.InvokeFunctionStreaming: + await foreach (var result in kernel.InvokeStreamingAsync(function)) { } + break; + } + + // Assert + Assert.NotNull(fakeService.ChatHistory); + Assert.Single(fakeService.ChatHistory); + + var messageContent = fakeService.ChatHistory[0]; + + Assert.Equal(AuthorRole.User, messageContent.Role); + Assert.Equal("Test prompt as user message", messageContent.Content); + } + + public enum KernelInvocationType + { + InvokePrompt, + InvokePromptStreaming, + InvokeFunction, + InvokeFunctionStreaming + } + + #region private + + private sealed class FakeChatAsTextService : ITextGenerationService, IChatCompletionService + { + public IReadOnlyDictionary Attributes => throw new NotImplementedException(); + public ChatHistory? ChatHistory { get; private set; } + + public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + this.ChatHistory = chatHistory; + + return Task.FromResult>(new List { new(AuthorRole.Assistant, "Something") }); + } + +#pragma warning disable IDE0036 // Order modifiers +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + public async IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) +#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously +#pragma warning restore IDE0036 // Order modifiers + { + this.ChatHistory = chatHistory; + yield return new StreamingChatMessageContent(AuthorRole.Assistant, "Something"); + } + + public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + } + + #endregion +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionMetadataTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionMetadataTests.cs new file mode 100644 index 000000000000..1801fa770d8a --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionMetadataTests.cs @@ -0,0 +1,185 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.ComponentModel; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Moq; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelFunctionMetadataTests +{ + private readonly Mock _logger; + + public KernelFunctionMetadataTests() + { + this._logger = new Mock(); + } + + [Fact] + public void ItReturnsFunctionParams() + { + // Arrange + var paramsA = new List + { + new("p1") { Description = "param 1", DefaultValue = "default 1" }, + new("p2") { Description = "param 2", DefaultValue = "default 2" }, + }; + + // Act + var funcViewA = new KernelFunctionMetadata("funcA") { Parameters = paramsA }; + + // Assert + Assert.NotNull(funcViewA); + + Assert.Equal("p1", funcViewA.Parameters[0].Name); + Assert.Equal("p2", funcViewA.Parameters[1].Name); + Assert.Equal("param 1", funcViewA.Parameters[0].Description); + Assert.Equal("param 2", funcViewA.Parameters[1].Description); + Assert.Equal("default 1", funcViewA.Parameters[0].DefaultValue); + Assert.Equal("default 2", funcViewA.Parameters[1].DefaultValue); + } + + [Fact] + public void ItReturnsFunctionReturnParameter() + { + // Arrange + var ReturnParameterViewA = new KernelReturnParameterMetadata + { + Description = "ReturnParameterA", + ParameterType = typeof(string), + Schema = KernelJsonSchema.Parse("{\"type\": \"object\" }"), + }; + + // Act + var funcViewA = new KernelFunctionMetadata("funcA") { ReturnParameter = ReturnParameterViewA }; + + // Assert + Assert.NotNull(funcViewA); + + Assert.Equal("ReturnParameterA", funcViewA.ReturnParameter.Description); + Assert.Equal(typeof(string), funcViewA.ReturnParameter.ParameterType); + Assert.Equivalent(KernelJsonSchema.Parse("{\"type\": \"object\" }"), funcViewA.ReturnParameter.Schema); + } + + [Fact] + public void ItSupportsValidFunctionName() + { + // Act + var function = KernelFunctionFactory.CreateFromMethod(ValidFunctionName, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + var fv = function.Metadata; + + // Assert + Assert.Equal("ValidFunctionName", fv.Name); + } + + [Fact] + public void ItSupportsValidFunctionAsyncName() + { + // Act + var function = KernelFunctionFactory.CreateFromMethod(ValidFunctionNameAsync, loggerFactory: this._logger.Object); + Assert.NotNull(function); + KernelFunctionMetadata fv = function.Metadata; + + // Assert + Assert.Equal("ValidFunctionName", fv.Name); + } + + [Fact] + public void ItSupportsValidFunctionKernelFunctionNameAttributeOverride() + { + // Arrange + [KernelFunction("NewTestFunctionName")] + static void TestFunctionName() + { } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestFunctionName, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + KernelFunctionMetadata fv = function.Metadata; + + // Assert + Assert.Equal("NewTestFunctionName", fv.Name); + } + + [Fact] + public void ItSupportsValidAttributeDescriptions() + { + // Arrange + [Description("function description")] + [return: Description("return parameter description")] + static void TestFunctionName( + [Description("first parameter description")] int p1, + [Description("second parameter description")] int p2) + { } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestFunctionName, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + KernelFunctionMetadata fv = function.Metadata; + + // Assert + Assert.Equal("function description", fv.Description); + Assert.Equal("first parameter description", fv.Parameters[0].Description); + Assert.Equal(typeof(int), fv.Parameters[0].ParameterType); + Assert.Equal("second parameter description", fv.Parameters[1].Description); + Assert.Equal(typeof(int), fv.Parameters[1].ParameterType); + Assert.Equal("return parameter description", fv.ReturnParameter.Description); + Assert.Equal(typeof(void), fv.ReturnParameter.ParameterType); + } + + [Fact] + public void ItSupportsNoAttributeDescriptions() + { + // Arrange + static void TestFunctionName(int p1, int p2) { } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestFunctionName, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + KernelFunctionMetadata fv = function.Metadata; + + // Assert + Assert.Equal(string.Empty, fv.Description); + Assert.Equal(string.Empty, fv.Parameters[0].Description); + Assert.Equal(typeof(int), fv.Parameters[0].ParameterType); + Assert.Equal(string.Empty, fv.Parameters[1].Description); + Assert.Equal(typeof(int), fv.Parameters[1].ParameterType); + Assert.Equal(string.Empty, fv.ReturnParameter.Description); + Assert.Equal(typeof(void), fv.ReturnParameter.ParameterType); + } + + [Fact] + public void ItSupportsValidNoParameters() + { + // Arrange + static void TestFunctionName() { } + + // Act + var function = KernelFunctionFactory.CreateFromMethod(TestFunctionName, loggerFactory: this._logger.Object); + Assert.NotNull(function); + + KernelFunctionMetadata fv = function.Metadata; + + // Assert + var emptyList = new List(); + + Assert.Equal(emptyList, fv.Parameters); + Assert.Equal(typeof(void), fv.ReturnParameter.ParameterType); + } + + private static void ValidFunctionName() { } + private static async Task ValidFunctionNameAsync() + { + var function = KernelFunctionFactory.CreateFromMethod(ValidFunctionName); + var result = await function.InvokeAsync(new()); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelJsonSchemaTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelJsonSchemaTests.cs new file mode 100644 index 000000000000..cd76005ff91c --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelJsonSchemaTests.cs @@ -0,0 +1,101 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelJsonSchemaTests +{ + [Fact] + public void ItParsesJsonSchemaSuccessfully() + { + const string ValidJsonSchema = @" +{ + ""$schema"": ""http://json-schema.org/draft-07/schema#"", + ""type"": ""object"", + ""properties"": { + ""title"": { + ""type"": ""string"", + ""description"": ""The title of the book"" + }, + ""author"": { + ""type"": ""string"", + ""description"": ""The name of the author"" + }, + ""year"": { + ""type"": ""integer"", + ""description"": ""The year of publication"", + ""minimum"": 0 + }, + ""genre"": { + ""type"": ""string"", + ""description"": ""The genre of the book"", + ""enum"": [""fiction"", ""non-fiction"", ""biography"", ""poetry"", ""other""] + }, + ""pages"": { + ""type"": ""integer"", + ""description"": ""The number of pages in the book"", + ""minimum"": 1 + }, + ""rating"": { + ""type"": ""number"", + ""description"": ""The average rating of the book"", + ""minimum"": 0, + ""maximum"": 5 + } + }, + ""required"": [""title"", ""author"", ""year"", ""genre"", ""pages"", ""rating""] +}"; + + KernelJsonSchema schema1 = KernelJsonSchema.Parse(ValidJsonSchema); + KernelJsonSchema schema2 = KernelJsonSchema.Parse((ReadOnlySpan)ValidJsonSchema); + KernelJsonSchema schema3 = KernelJsonSchema.Parse(Encoding.UTF8.GetBytes(ValidJsonSchema)); + + string expected = JsonSerializer.Serialize(JsonSerializer.Deserialize(ValidJsonSchema)); // roundtrip through JsonSerializer to normalize whitespace + + foreach (KernelJsonSchema schema in new[] { schema1, schema2, schema3 }) + { + Assert.Equal(expected, JsonSerializer.Serialize(schema.RootElement)); + Assert.Equal(expected, JsonSerializer.Serialize(JsonSerializer.Deserialize(schema.ToString()))); + } + } + + [Fact] + public void ItThrowsOnInvalidJson() + { + const string InvalidJsonSchema = @" +{ + ""$schema"": ""http://json-schema.org/draft-07/schema#"", + ""type"":, + ""properties"": { + ""title"": { + ""type"": ""string"", + ""description"": ""The title of the book"" + }, +}"; + + Assert.Throws(() => KernelJsonSchema.Parse((string)null!)); + + Assert.Throws(() => KernelJsonSchema.Parse(string.Empty)); + Assert.Throws(() => KernelJsonSchema.Parse(ReadOnlySpan.Empty)); + Assert.Throws(() => KernelJsonSchema.Parse(ReadOnlySpan.Empty)); + + Assert.Throws(() => KernelJsonSchema.Parse(InvalidJsonSchema)); + Assert.Throws(() => KernelJsonSchema.Parse((ReadOnlySpan)InvalidJsonSchema)); + Assert.Throws(() => KernelJsonSchema.Parse(Encoding.UTF8.GetBytes(InvalidJsonSchema))); + } + + [Theory] + [InlineData("invalid")] + [InlineData("{ \"type\":\"invalid\" }")] + public void ItThrowsOnInvalidJsonSchema(string invalidSchema) + { + Assert.Throws(() => KernelJsonSchema.Parse(invalidSchema)); + Assert.Throws(() => KernelJsonSchema.Parse((ReadOnlySpan)invalidSchema)); + Assert.Throws(() => KernelJsonSchema.Parse(Encoding.UTF8.GetBytes(invalidSchema))); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelParameterMetadataTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelParameterMetadataTests.cs new file mode 100644 index 000000000000..c0a75d76fb16 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelParameterMetadataTests.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelParameterMetadataTests +{ + [Fact] + public void ItThrowsForInvalidName() + { + Assert.Throws(() => new KernelParameterMetadata((string)null!)); + Assert.Throws(() => new KernelParameterMetadata("")); + Assert.Throws(() => new KernelParameterMetadata(" ")); + Assert.Throws(() => new KernelParameterMetadata("\t\r\v ")); + } + + [Fact] + public void ItCanBeConstructedWithJustName() + { + var m = new KernelParameterMetadata("p"); + Assert.Equal("p", m.Name); + Assert.Empty(m.Description); + Assert.Null(m.ParameterType); + Assert.Null(m.Schema); + Assert.Null(m.DefaultValue); + Assert.False(m.IsRequired); + } + + [Fact] + public void ItRoundtripsArguments() + { + var m = new KernelParameterMetadata("p") { Description = "d", DefaultValue = "v", IsRequired = true, ParameterType = typeof(int), Schema = KernelJsonSchema.Parse("{ \"type\":\"object\" }") }; + Assert.Equal("p", m.Name); + Assert.Equal("d", m.Description); + Assert.Equal("v", m.DefaultValue); + Assert.True(m.IsRequired); + Assert.Equal(typeof(int), m.ParameterType); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"object\" }")), JsonSerializer.Serialize(m.Schema)); + } + + [Fact] + public void ItInfersSchemaFromType() + { + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"integer\" }")), JsonSerializer.Serialize(new KernelParameterMetadata("p") { ParameterType = typeof(int) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"number\" }")), JsonSerializer.Serialize(new KernelParameterMetadata("p") { ParameterType = typeof(double) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"string\" }")), JsonSerializer.Serialize(new KernelParameterMetadata("p") { ParameterType = typeof(string) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"boolean\" }")), JsonSerializer.Serialize(new KernelParameterMetadata("p") { ParameterType = typeof(bool) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"object\" }")), JsonSerializer.Serialize(new KernelParameterMetadata("p") { ParameterType = typeof(object) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"array\",\"items\":{\"type\":\"boolean\"}}")), JsonSerializer.Serialize(new KernelParameterMetadata("p") { ParameterType = typeof(bool[]) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{\"type\":\"object\",\"properties\":{\"Value1\":{\"type\":\"string\"},\"Value2\":{\"type\":\"integer\"},\"Value3\":{\"type\":\"number\"}}}")), JsonSerializer.Serialize(new KernelParameterMetadata("p") { ParameterType = typeof(Example) }.Schema)); + } + + [Fact] + public void ItCantInferSchemaFromUnsupportedType() + { + Assert.Null(new KernelParameterMetadata("p") { ParameterType = typeof(void) }.Schema); + Assert.Null(new KernelParameterMetadata("p") { ParameterType = typeof(int*) }.Schema); + } + + [Fact] + public void ItIncludesDescriptionInSchema() + { + var m = new KernelParameterMetadata("p") { Description = "something neat", ParameterType = typeof(int) }; + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"integer\", \"description\":\"something neat\" }")), JsonSerializer.Serialize(m.Schema)); + } + + [Fact] + public void ItIncludesDefaultValueInSchema() + { + var m = new KernelParameterMetadata("p") { DefaultValue = "42", ParameterType = typeof(int) }; + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"integer\", \"description\":\"(default value: 42)\" }")), JsonSerializer.Serialize(m.Schema)); + } + + [Fact] + public void ItIncludesDescriptionAndDefaultValueInSchema() + { + var m = new KernelParameterMetadata("p") { Description = "something neat", DefaultValue = "42", ParameterType = typeof(int) }; + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"integer\", \"description\":\"something neat (default value: 42)\" }")), JsonSerializer.Serialize(m.Schema)); + } + + [Fact] + public void ItCachesInferredSchemas() + { + var m = new KernelParameterMetadata("p") { ParameterType = typeof(Example) }; + Assert.Same(m.Schema, m.Schema); + } + + [Fact] + public void ItCopiesInferredSchemaToCopy() + { + var m = new KernelParameterMetadata("p") { ParameterType = typeof(Example) }; + KernelJsonSchema? schema1 = m.Schema; + Assert.NotNull(schema1); + + m = new KernelParameterMetadata(m); + Assert.Same(schema1, m.Schema); + } + + [Fact] + public void ItInvalidatesSchemaForNewType() + { + var m = new KernelParameterMetadata("p") { ParameterType = typeof(Example) }; + KernelJsonSchema? schema1 = m.Schema; + Assert.NotNull(schema1); + + m = new KernelParameterMetadata(m) { ParameterType = typeof(int) }; + Assert.NotNull(m.Schema); + Assert.NotSame(schema1, m.Schema); + } + + [Fact] + public void ItInvalidatesSchemaForNewDescription() + { + var m = new KernelParameterMetadata("p") { ParameterType = typeof(Example) }; + KernelJsonSchema? schema1 = m.Schema; + Assert.NotNull(schema1); + + m = new KernelParameterMetadata(m) { Description = "something new" }; + Assert.NotNull(m.Schema); + Assert.NotSame(schema1, m.Schema); + } + + [Fact] + public void ItInvalidatesSchemaForNewDefaultValue() + { + var m = new KernelParameterMetadata("p") { ParameterType = typeof(Example) }; + KernelJsonSchema? schema1 = m.Schema; + Assert.NotNull(schema1); + + m = new KernelParameterMetadata(m) { DefaultValue = "42" }; + Assert.NotNull(m.Schema); + Assert.NotSame(schema1, m.Schema); + } + +#pragma warning disable CS0649 // fields never assigned to +#pragma warning disable CA1812 // class never instantiated + internal sealed class Example + { + public string? Value1; + public int Value2; + public double Value3; + } +#pragma warning restore CA1812 +#pragma warning restore CS0649 +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginCollectionTests.cs new file mode 100644 index 000000000000..6d4ee3ae9fe1 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginCollectionTests.cs @@ -0,0 +1,255 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel; +using Xunit; + +#pragma warning disable xUnit2013 // Do not use equality check to check for collection size. +#pragma warning disable xUnit2017 // Do not use Contains() to check if a value exists in a collection + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelPluginCollectionTests +{ + [Fact] + public void ItHasExpectedDefaultValues() + { + KernelPluginCollection c; + + c = new(); + Assert.Equal(0, c.Count); + Assert.NotNull(c.GetEnumerator()); + Assert.False(c.GetEnumerator().MoveNext()); + + c = new(Array.Empty()); + Assert.Equal(0, c.Count); + Assert.NotNull(c.GetEnumerator()); + Assert.False(c.GetEnumerator().MoveNext()); + + c = new(new[] { KernelPluginFactory.CreateFromFunctions("plugin1") }); + Assert.Equal(1, c.Count); + Assert.NotNull(c.GetEnumerator()); + Assert.True(c.Contains("plugin1")); + Assert.False(c.Contains("plugin2")); + + c = new(new[] { KernelPluginFactory.CreateFromFunctions("plugin1"), KernelPluginFactory.CreateFromFunctions("plugin2") }); + Assert.Equal(2, c.Count); + Assert.NotNull(c.GetEnumerator()); + Assert.True(c.Contains("plugin1")); + Assert.True(c.Contains("plugin2")); + Assert.False(c.Contains("plugin3")); + + c = new(new[] { KernelPluginFactory.CreateFromFunctions("plugin1"), KernelPluginFactory.CreateFromFunctions("plugin2") }.Select(p => p)); + Assert.Equal(2, c.Count); + Assert.NotNull(c.GetEnumerator()); + Assert.True(c.Contains("plugin1")); + Assert.True(c.Contains("plugin2")); + Assert.False(c.Contains("plugin3")); + + c = new(c); + Assert.Equal(2, c.Count); + Assert.NotNull(c.GetEnumerator()); + Assert.True(c.Contains("plugin1")); + Assert.True(c.Contains("plugin2")); + Assert.False(c.Contains("plugin3")); + } + + [Fact] + public void ItExposesAddedPlugins() + { + var c = new KernelPluginCollection(); + + DefaultKernelPlugin plugin1 = new("name1", "description1", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"), + }); + DefaultKernelPlugin plugin2 = new("name2", "description2", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"), + }); + + c.Add(plugin1); + Assert.Equal(1, c.Count); + Assert.True(c.Contains(plugin1)); + Assert.True(c.Contains(plugin1.Name)); + Assert.True(c.Contains(plugin1.Name.ToUpperInvariant())); + Assert.Equal(plugin1, c[plugin1.Name]); + Assert.False(c.Contains(plugin2)); + Assert.False(c.Contains(plugin2.Name)); + Assert.False(c.Contains(plugin2.Name.ToUpperInvariant())); + Assert.Equal(new[] { plugin1 }, c.ToArray()); + + c.Add(plugin2); + Assert.Equal(2, c.Count); + Assert.True(c.Contains(plugin1)); + Assert.True(c.Contains(plugin1.Name)); + Assert.True(c.Contains(plugin1.Name.ToUpperInvariant())); + Assert.Equal(plugin1, c[plugin1.Name]); + Assert.True(c.Contains(plugin2)); + Assert.True(c.Contains(plugin2.Name)); + Assert.True(c.Contains(plugin2.Name.ToUpperInvariant())); + Assert.Equal(plugin2, c[plugin2.Name]); + Assert.Equal(new[] { plugin1, plugin2 }, c.OrderBy(f => f.Name, StringComparer.OrdinalIgnoreCase).ToArray()); + + Assert.True(c.Remove(plugin1)); + Assert.False(c.Remove(plugin1)); + Assert.Equal(1, c.Count); + Assert.False(c.Contains(plugin1)); + Assert.False(c.Contains(plugin1.Name)); + Assert.False(c.Contains(plugin1.Name.ToUpperInvariant())); + Assert.True(c.Contains(plugin2)); + Assert.True(c.Contains(plugin2.Name)); + Assert.True(c.Contains(plugin2.Name.ToUpperInvariant())); + Assert.Equal(plugin2, c[plugin2.Name]); + Assert.Equal(new[] { plugin2 }, c.ToArray()); + + Assert.True(c.Remove(plugin2)); + Assert.False(c.Remove(plugin2)); + Assert.Equal(0, c.Count); + Assert.False(c.Contains(plugin1)); + Assert.False(c.Contains(plugin1.Name)); + Assert.False(c.Contains(plugin1.Name.ToUpperInvariant())); + Assert.False(c.Contains(plugin2)); + Assert.False(c.Contains(plugin2.Name)); + Assert.False(c.Contains(plugin2.Name.ToUpperInvariant())); + Assert.Equal(Array.Empty(), c.ToArray()); + + c.Add(plugin2); + Assert.Equal(1, c.Count); + c.Clear(); + Assert.Equal(0, c.Count); + } + + [Fact] + public void ItExposesGroupsOfAddedPlugins() + { + var c = new KernelPluginCollection(); + + c.AddRange(new[] { KernelPluginFactory.CreateFromFunctions("name1"), KernelPluginFactory.CreateFromFunctions("name2") }); + Assert.Equal(2, c.Count); + Assert.Equal("name1", c["name1"].Name); + Assert.Equal("name2", c["name2"].Name); + } + + [Fact] + public void ItExposesFunctionMetadataForAllFunctions() + { + var c = new KernelPluginCollection() + { + KernelPluginFactory.CreateFromFunctions("plugin1", "description1", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"), + }), + KernelPluginFactory.CreateFromFunctions("plugin2", "description2", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"), + }) + }; + + IList metadata = c.GetFunctionsMetadata().OrderBy(f => f.Name).ToList(); + + Assert.Equal("plugin1", metadata[0].PluginName); + Assert.Equal("Function1", metadata[0].Name); + + Assert.Equal("plugin1", metadata[1].PluginName); + Assert.Equal("Function2", metadata[1].Name); + + Assert.Equal("plugin2", metadata[2].PluginName); + Assert.Equal("Function2", metadata[2].Name); + + Assert.Equal("plugin2", metadata[3].PluginName); + Assert.Equal("Function3", metadata[3].Name); + } + + [Fact] + public void ItExposesFunctionsInPlugins() + { + DefaultKernelPlugin plugin1 = new("name1", "description1", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"), + }); + DefaultKernelPlugin plugin2 = new("name2", "description2", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"), + }); + + var c = new KernelPluginCollection(new[] { plugin1, plugin2 }); + + Assert.Same(plugin1["Function1"], c.GetFunction("name1", "Function1")); + Assert.Same(plugin1["Function2"], c.GetFunction("name1", "Function2")); + Assert.Same(plugin2["Function3"], c.GetFunction("name2", "Function3")); + Assert.Throws(() => c.GetFunction("name1", "Function0")); + Assert.Throws(() => c.GetFunction("name2", "Function1")); + Assert.Throws(() => c.GetFunction("name3", "Function1")); + + Assert.Same(plugin1["Function1"], c.GetFunction(null, "Function1")); + Assert.Same(plugin1["Function2"], c.GetFunction(null, "Function2")); + Assert.Same(plugin2["Function3"], c.GetFunction(null, "Function3")); + + Assert.True(c.TryGetFunction("name1", "Function1", out KernelFunction? func)); + Assert.Same(plugin1["Function1"], func); + + Assert.False(c.TryGetFunction("name2", "Function1", out func)); + Assert.Null(func); + + Assert.True(c.TryGetFunction(null, "Function3", out func)); + Assert.Same(plugin2["Function3"], func); + } + + [Fact] + public void ItThrowsForInvalidArguments() + { + Assert.Throws(() => new KernelPluginCollection(null!)); + Assert.Throws(() => new KernelPluginCollection(new KernelPlugin[] { null! })); + + KernelPluginCollection c = new(); + Assert.Throws(() => c.Add(null!)); + Assert.Throws(() => c.Remove(null!)); + Assert.Throws(() => c.Contains(null!)); + Assert.Throws(() => c[null!]); + Assert.Throws(() => c.TryGetPlugin(null!, out _)); + Assert.Throws(() => ((ICollection)c).CopyTo(null!, 0)); + + Assert.Throws(() => c["Function1"]); + } + + [Fact] + public void ItCopiesToDestinationArrayInCopyTo() + { + KernelPlugin plugin1 = KernelPluginFactory.CreateFromFunctions("plugin1"); + KernelPlugin plugin2 = KernelPluginFactory.CreateFromFunctions("plugin2"); + ICollection c = new KernelPluginCollection(new[] { plugin1, plugin2 }); + + var array = new KernelPlugin[4]; + + c.CopyTo(array, 0); + Assert.Same(plugin1, array[0]); + Assert.Same(plugin2, array[1]); + Assert.Null(array[2]); + Assert.Null(array[3]); + + Array.Clear(array, 0, array.Length); + c.CopyTo(array, 1); + Assert.Same(plugin1, array[1]); + Assert.Same(plugin2, array[2]); + Assert.Null(array[0]); + Assert.Null(array[3]); + + Array.Clear(array, 0, array.Length); + c.CopyTo(array, 2); + Assert.Same(plugin1, array[2]); + Assert.Same(plugin2, array[3]); + Assert.Null(array[0]); + Assert.Null(array[1]); + + Assert.Throws(() => c.CopyTo(array, -1)); + Assert.Throws(() => c.CopyTo(array, 3)); + Assert.Throws(() => c.CopyTo(array, 4)); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs new file mode 100644 index 000000000000..db692168f865 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelPluginTests.cs @@ -0,0 +1,128 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelPluginTests +{ + [Fact] + public void ItRoundTripsCtorArguments() + { + KernelPlugin plugin; + + var functions = new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"), + }; + + plugin = KernelPluginFactory.CreateFromFunctions("name", null, null); + Assert.Equal("name", plugin.Name); + Assert.Equal("", plugin.Description); + Assert.Equal(0, plugin.FunctionCount); + + plugin = KernelPluginFactory.CreateFromFunctions("name", "", functions); + Assert.Equal("name", plugin.Name); + Assert.Equal("", plugin.Description); + Assert.Equal(3, plugin.FunctionCount); + Assert.All(functions, f => Assert.True(plugin.Contains(f))); + + plugin = KernelPluginFactory.CreateFromFunctions("name", "description"); + Assert.Equal("name", plugin.Name); + Assert.Equal("description", plugin.Description); + Assert.Equal(0, plugin.FunctionCount); + + plugin = KernelPluginFactory.CreateFromFunctions("name", "description", functions); + Assert.Equal("name", plugin.Name); + Assert.Equal("description", plugin.Description); + Assert.Equal(3, plugin.FunctionCount); + Assert.All(functions, f => Assert.True(plugin.Contains(f))); + } + + [Fact] + public void ItExposesFunctionsItContains() + { + KernelFunction func1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + KernelFunction func2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + + KernelPlugin plugin = KernelPluginFactory.CreateFromFunctions("name", "description", new[] { func1, func2 }); + + foreach (KernelFunction func in new[] { func1, func2 }) + { + Assert.True(plugin.Contains(func.Name)); + Assert.True(plugin.Contains(func)); + + Assert.True(plugin.TryGetFunction(func.Name, out KernelFunction? found)); + Assert.Equal(found, func); + + Assert.Equal(func, plugin[func.Name]); + Assert.Equal(func, plugin[func.Name.ToUpperInvariant()]); + } + + KernelFunction[] actual = plugin.OrderBy(f => f.Name).ToArray(); + Assert.Equal(actual[0], func1); + Assert.Equal(actual[1], func2); + + Assert.Throws(() => plugin["Function3"]); + Assert.False(plugin.TryGetFunction("Function3", out KernelFunction? notFound)); + Assert.Null(notFound); + } + + [Fact] + public void ItContainsAddedFunctions() + { + KernelFunction func1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + KernelFunction func2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + + KernelPlugin plugin = KernelPluginFactory.CreateFromFunctions("name", "description", new[] { func1, func2 }); + Assert.Equal(2, plugin.FunctionCount); + + Assert.True(plugin.TryGetFunction(func1.Name, out _)); + Assert.Equal(func1, plugin[func1.Name]); + + Assert.True(plugin.TryGetFunction(func2.Name, out _)); + Assert.Equal(func2, plugin[func2.Name]); + } + + [Fact] + public void ItExposesFunctionMetadataForAllFunctions() + { + Assert.Empty(KernelPluginFactory.CreateFromFunctions("plugin1").GetFunctionsMetadata()); + + IList metadata = KernelPluginFactory.CreateFromFunctions("plugin2", "description1", new[] + { + KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"), + }).GetFunctionsMetadata(); + + Assert.NotNull(metadata); + Assert.Equal(2, metadata.Count); + + Assert.Equal("plugin2", metadata[0].PluginName); + Assert.Equal("Function1", metadata[0].Name); + + Assert.Equal("plugin2", metadata[1].PluginName); + Assert.Equal("Function2", metadata[1].Name); + } + + [Fact] + public void ItThrowsForInvalidArguments() + { + Assert.Throws(() => KernelPluginFactory.CreateFromFunctions(null!)); + Assert.Throws(() => KernelPluginFactory.CreateFromFunctions(null!, "")); + Assert.Throws(() => KernelPluginFactory.CreateFromFunctions(null!, "", Array.Empty())); + Assert.Throws(() => KernelPluginFactory.CreateFromFunctions("name", "", new KernelFunction[] { null! })); + + KernelPlugin plugin = KernelPluginFactory.CreateFromFunctions("name"); + Assert.Throws(() => plugin[null!]); + Assert.Throws(() => plugin.TryGetFunction(null!, out _)); + Assert.Throws(() => plugin.Contains((string)null!)); + Assert.Throws(() => plugin.Contains((KernelFunction)null!)); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelReturnParameterMetadataTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelReturnParameterMetadataTests.cs new file mode 100644 index 000000000000..ef5ac36eb2d5 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelReturnParameterMetadataTests.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; + +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + +namespace SemanticKernel.UnitTests.Functions; + +public class KernelReturnParameterMetadataTests +{ + [Fact] + public void ItRoundtripsArguments() + { + var m = new KernelReturnParameterMetadata { Description = "something", ParameterType = typeof(int), Schema = KernelJsonSchema.Parse("{ \"type\":\"object\" }") }; + Assert.Equal("something", m.Description); + Assert.Equal(typeof(int), m.ParameterType); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"object\" }")), JsonSerializer.Serialize(m.Schema)); + } + + [Fact] + public void ItInfersSchemaFromType() + { + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"integer\" }")), JsonSerializer.Serialize(new KernelReturnParameterMetadata { ParameterType = typeof(int) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"number\" }")), JsonSerializer.Serialize(new KernelReturnParameterMetadata { ParameterType = typeof(double) }.Schema)); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"string\" }")), JsonSerializer.Serialize(new KernelReturnParameterMetadata { ParameterType = typeof(string) }.Schema)); ; + } + + [Fact] + public void ItIncludesDescriptionInSchema() + { + var m = new KernelReturnParameterMetadata { Description = "d", ParameterType = typeof(int) }; + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse("{ \"type\":\"integer\", \"description\":\"d\" }")), JsonSerializer.Serialize(m.Schema)); + } + + [Fact] + public void ItCachesInferredSchemas() + { + var m = new KernelReturnParameterMetadata { ParameterType = typeof(KernelParameterMetadataTests.Example) }; + Assert.Same(m.Schema, m.Schema); + } + + [Fact] + public void ItCopiesInferredSchemaToCopy() + { + var m = new KernelReturnParameterMetadata { ParameterType = typeof(KernelParameterMetadataTests.Example) }; + KernelJsonSchema? schema1 = m.Schema; + Assert.NotNull(schema1); + + m = new KernelReturnParameterMetadata(m); + Assert.Same(schema1, m.Schema); + } + + [Fact] + public void ItInvalidatesSchemaForNewType() + { + var m = new KernelReturnParameterMetadata { ParameterType = typeof(KernelParameterMetadataTests.Example) }; + KernelJsonSchema? schema1 = m.Schema; + Assert.NotNull(schema1); + + m = new KernelReturnParameterMetadata(m) { ParameterType = typeof(int) }; + Assert.NotNull(m.Schema); + Assert.NotSame(schema1, m.Schema); + } + + [Fact] + public void ItInvalidatesSchemaForNewDescription() + { + var m = new KernelReturnParameterMetadata { ParameterType = typeof(KernelParameterMetadataTests.Example) }; + KernelJsonSchema? schema1 = m.Schema; + Assert.NotNull(schema1); + + m = new KernelReturnParameterMetadata(m) { Description = "something new" }; + Assert.NotNull(m.Schema); + Assert.NotSame(schema1, m.Schema); + } + + [Fact] + public void ItRepresentsUnderlyingType() + { + Assert.Equal(typeof(void), KernelFunctionFactory.CreateFromMethod(() => { }).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(int), KernelFunctionFactory.CreateFromMethod(() => 42).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(string), KernelFunctionFactory.CreateFromMethod(() => "42").Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(bool), KernelFunctionFactory.CreateFromMethod(() => true).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(int), KernelFunctionFactory.CreateFromMethod(() => (int?)42).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(int), KernelFunctionFactory.CreateFromMethod(async () => 42).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(int), KernelFunctionFactory.CreateFromMethod(async ValueTask () => 42).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(int), KernelFunctionFactory.CreateFromMethod(async () => (int?)42).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(int), KernelFunctionFactory.CreateFromMethod(async ValueTask () => (int?)42).Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(string), KernelFunctionFactory.CreateFromMethod(async () => "42").Metadata.ReturnParameter.ParameterType); + Assert.Equal(typeof(string), KernelFunctionFactory.CreateFromMethod(async ValueTask () => "42").Metadata.ReturnParameter.ParameterType); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/MultipleModelTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/MultipleModelTests.cs index fbe791c58a7c..8e26fb850c52 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/MultipleModelTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/MultipleModelTests.cs @@ -2,135 +2,140 @@ using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.TemplateEngine; +using Microsoft.SemanticKernel.TextGeneration; using Moq; using Xunit; namespace SemanticKernel.UnitTests.Functions; + public class MultipleModelTests { [Fact] public async Task ItUsesServiceIdWhenProvidedAsync() { // Arrange - var mockTextCompletion1 = new Mock(); - var mockTextCompletion2 = new Mock(); - var mockCompletionResult = new Mock(); + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); - mockTextCompletion1.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockTextCompletion2.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockCompletionResult.Setup(cr => cr.GetCompletionAsync(It.IsAny())).ReturnsAsync("llmResult"); + var fakeTextContent = new TextContent("llmResult"); + mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + mockTextGeneration2.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); - var kernel = new KernelBuilder() - .WithAIService("service1", mockTextCompletion1.Object, false) - .WithAIService("service2", mockTextCompletion2.Object, true) - .Build(); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + Kernel kernel = builder.Build(); - var templateConfig = new PromptTemplateConfig(); - templateConfig.ModelSettings.Add(new AIRequestSettings() { ServiceId = "service1" }); - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); + var promptConfig = new PromptTemplateConfig(); + promptConfig.Template = "template"; + promptConfig.AddExecutionSettings(new PromptExecutionSettings(), "service1"); + var func = kernel.CreateFunctionFromPrompt(promptConfig); // Act - await kernel.RunAsync(func); + await kernel.InvokeAsync(func); // Assert - mockTextCompletion1.Verify(a => a.GetCompletionsAsync("template", It.IsAny(), It.IsAny()), Times.Once()); - mockTextCompletion2.Verify(a => a.GetCompletionsAsync("template", It.IsAny(), It.IsAny()), Times.Never()); + mockTextGeneration1.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + mockTextGeneration2.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); } [Fact] public async Task ItFailsIfInvalidServiceIdIsProvidedAsync() { // Arrange - var mockTextCompletion1 = new Mock(); - var mockTextCompletion2 = new Mock(); + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); - var kernel = new KernelBuilder() - .WithAIService("service1", mockTextCompletion1.Object, false) - .WithAIService("service2", mockTextCompletion2.Object, true) - .Build(); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + Kernel kernel = builder.Build(); - var templateConfig = new PromptTemplateConfig(); - templateConfig.ModelSettings.Add(new AIRequestSettings() { ServiceId = "service3" }); - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); + var promptConfig = new PromptTemplateConfig(); + promptConfig.Template = "template"; + promptConfig.AddExecutionSettings(new PromptExecutionSettings(), "service3"); + var func = kernel.CreateFunctionFromPrompt(promptConfig); // Act - var exception = await Assert.ThrowsAsync(() => kernel.RunAsync(func)); + var exception = await Assert.ThrowsAsync(() => kernel.InvokeAsync(func)); // Assert - Assert.Equal("Service of type Microsoft.SemanticKernel.AI.TextCompletion.ITextCompletion and name service3 not registered.", exception.Message); + Assert.Equal("Required service of type Microsoft.SemanticKernel.TextGeneration.ITextGenerationService not registered. Expected serviceIds: service3.", exception.Message); } [Theory] - [InlineData(new string[] { "service1" }, 1, new int[] { 1, 0, 0 })] - [InlineData(new string[] { "service2" }, 2, new int[] { 0, 1, 0 })] - [InlineData(new string[] { "service3" }, 0, new int[] { 0, 0, 1 })] - [InlineData(new string[] { "service4", "service1" }, 1, new int[] { 1, 0, 0 })] - public async Task ItUsesServiceIdByOrderAsync(string[] serviceIds, int defaultServiceIndex, int[] callCount) + [InlineData(new string[] { "service1" }, new int[] { 1, 0, 0 })] + [InlineData(new string[] { "service4", "service1" }, new int[] { 1, 0, 0 })] + public async Task ItUsesServiceIdByOrderAsync(string[] serviceIds, int[] callCount) { // Arrange - var mockTextCompletion1 = new Mock(); - var mockTextCompletion2 = new Mock(); - var mockTextCompletion3 = new Mock(); - var mockCompletionResult = new Mock(); - - mockTextCompletion1.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockTextCompletion2.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockTextCompletion3.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockCompletionResult.Setup(cr => cr.GetCompletionAsync(It.IsAny())).ReturnsAsync("llmResult"); - - var kernel = new KernelBuilder() - .WithAIService("service1", mockTextCompletion1.Object, defaultServiceIndex == 0) - .WithAIService("service2", mockTextCompletion2.Object, defaultServiceIndex == 1) - .WithAIService("service3", mockTextCompletion3.Object, defaultServiceIndex == 2) - .Build(); - - var templateConfig = new PromptTemplateConfig(); + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); + var mockTextGeneration3 = new Mock(); + var fakeTextContent = new TextContent("llmResult"); + + mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + mockTextGeneration2.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + mockTextGeneration3.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + builder.Services.AddKeyedSingleton("service3", mockTextGeneration3.Object); + Kernel kernel = builder.Build(); + + var promptConfig = new PromptTemplateConfig(); + promptConfig.Template = "template"; foreach (var serviceId in serviceIds) { - templateConfig.ModelSettings.Add(new AIRequestSettings() { ServiceId = serviceId }); + promptConfig.AddExecutionSettings(new PromptExecutionSettings(), serviceId); } - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); + var func = kernel.CreateFunctionFromPrompt(promptConfig); // Act - await kernel.RunAsync(func); + await kernel.InvokeAsync(func); // Assert - mockTextCompletion1.Verify(a => a.GetCompletionsAsync("template", It.Is(settings => settings.ServiceId == "service1"), It.IsAny()), Times.Exactly(callCount[0])); - mockTextCompletion2.Verify(a => a.GetCompletionsAsync("template", It.Is(settings => settings.ServiceId == "service2"), It.IsAny()), Times.Exactly(callCount[1])); - mockTextCompletion3.Verify(a => a.GetCompletionsAsync("template", It.Is(settings => settings.ServiceId == "service3"), It.IsAny()), Times.Exactly(callCount[2])); + mockTextGeneration1.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(callCount[0])); + mockTextGeneration2.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(callCount[1])); + mockTextGeneration3.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(callCount[2])); } [Fact] public async Task ItUsesServiceIdWithJsonPromptTemplateConfigAsync() { // Arrange - var mockTextCompletion1 = new Mock(); - var mockTextCompletion2 = new Mock(); - var mockTextCompletion3 = new Mock(); - var mockCompletionResult = new Mock(); - - mockTextCompletion1.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockTextCompletion2.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockTextCompletion3.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockCompletionResult.Setup(cr => cr.GetCompletionAsync(It.IsAny())).ReturnsAsync("llmResult"); - - var kernel = new KernelBuilder() - .WithAIService("service1", mockTextCompletion1.Object, true) - .WithAIService("service2", mockTextCompletion2.Object, false) - .WithAIService("service3", mockTextCompletion3.Object, false) - .Build(); + var mockTextGeneration1 = new Mock(); + var mockTextGeneration2 = new Mock(); + var mockTextGeneration3 = new Mock(); + var fakeTextContent = new TextContent("llmResult"); + + mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + mockTextGeneration2.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + mockTextGeneration3.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { fakeTextContent }); + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object); + builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object); + builder.Services.AddKeyedSingleton("service3", mockTextGeneration3.Object); + Kernel kernel = builder.Build(); var json = @"{ - ""schema"": 1, + ""template"": ""template"", ""description"": ""Semantic function"", - ""models"": [ - { - ""service_id"": ""service2"", +""input_variables"": + [ + { + ""name"": ""input variable name"", + ""description"": ""input variable description"", + ""default"": ""default value"", + ""is_required"": true + } + ], + ""execution_settings"": { + ""service2"": { ""max_tokens"": 100, ""temperature"": 0.2, ""top_p"": 0.0, @@ -140,8 +145,7 @@ public async Task ItUsesServiceIdWithJsonPromptTemplateConfigAsync() ""\n"" ] }, - { - ""service_id"": ""service3"", + ""service3"": { ""max_tokens"": 100, ""temperature"": 0.4, ""top_p"": 0.0, @@ -151,18 +155,18 @@ public async Task ItUsesServiceIdWithJsonPromptTemplateConfigAsync() ""\n"" ] } - ] + } }"; - var templateConfig = PromptTemplateConfig.FromJson(json); - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); + var promptConfig = PromptTemplateConfig.FromJson(json); + var func = kernel.CreateFunctionFromPrompt(promptConfig); // Act - await kernel.RunAsync(func); + await kernel.InvokeAsync(func); // Assert - mockTextCompletion1.Verify(a => a.GetCompletionsAsync("template", It.Is(settings => settings.ServiceId == "service1"), It.IsAny()), Times.Never()); - mockTextCompletion2.Verify(a => a.GetCompletionsAsync("template", It.Is(settings => settings.ServiceId == "service2"), It.IsAny()), Times.Once()); - mockTextCompletion3.Verify(a => a.GetCompletionsAsync("template", It.Is(settings => settings.ServiceId == "service3"), It.IsAny()), Times.Never()); + mockTextGeneration1.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); + mockTextGeneration2.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Once()); + mockTextGeneration3.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never()); } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs new file mode 100644 index 000000000000..9ae18b669235 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedAIServiceSelectorTests.cs @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +public class OrderedAIServiceSelectorTests +{ + [Fact] + public void ItThrowsAKernelExceptionForNoServices() + { + // Arrange + var kernel = new Kernel(); + var function = KernelFunctionFactory.CreateFromPrompt("Hello AI"); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + // Assert + Assert.Throws(() => serviceSelector.SelectAIService(kernel, function, new KernelArguments())); + } + + [Fact] + public void ItGetsAIServiceConfigurationForSingleAIService() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new AIService()); + Kernel kernel = builder.Build(); + + var function = kernel.CreateFunctionFromPrompt("Hello AI"); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.NotNull(aiService); + Assert.Null(defaultExecutionSettings); + } + + [Fact] + public void ItGetsAIServiceConfigurationForSingleTextGeneration() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new TextGenerationService("model_id_1")); + Kernel kernel = builder.Build(); + + var function = kernel.CreateFunctionFromPrompt("Hello AI"); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.NotNull(aiService); + Assert.Null(defaultExecutionSettings); + } + + [Fact] + public void ItGetsAIServiceConfigurationForTextGenerationByServiceId() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new TextGenerationService("model_id_1")); + builder.Services.AddKeyedSingleton("service2", new TextGenerationService("model_id_2")); + Kernel kernel = builder.Build(); + + var promptConfig = new PromptTemplateConfig() { Template = "Hello AI" }; + var executionSettings = new PromptExecutionSettings(); + promptConfig.AddExecutionSettings(executionSettings, "service2"); + var function = kernel.CreateFunctionFromPrompt(promptConfig); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.Equal(kernel.GetRequiredService("service2"), aiService); + Assert.Equal(executionSettings, defaultExecutionSettings); + } + + [Fact] + public void ItThrowsAKernelExceptionForNotFoundService() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new TextGenerationService("model_id_1")); + builder.Services.AddKeyedSingleton("service2", new TextGenerationService("model_id_2")); + Kernel kernel = builder.Build(); + + var promptConfig = new PromptTemplateConfig() { Template = "Hello AI" }; + promptConfig.AddExecutionSettings(new PromptExecutionSettings(), "service3"); + var function = kernel.CreateFunctionFromPrompt(promptConfig); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + // Assert + Assert.Throws(() => serviceSelector.SelectAIService(kernel, function, new KernelArguments())); + } + + [Fact] + public void ItUsesDefaultServiceForNoExecutionSettings() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new TextGenerationService("model_id_1")); + builder.Services.AddKeyedSingleton("service2", new TextGenerationService("model_id_2")); + Kernel kernel = builder.Build(); + var function = kernel.CreateFunctionFromPrompt("Hello AI"); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.Equal(kernel.GetRequiredService("service2"), aiService); + Assert.Null(defaultExecutionSettings); + } + + [Fact] + public void ItUsesDefaultServiceAndSettingsForDefaultExecutionSettings() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new TextGenerationService("model_id_1")); + builder.Services.AddKeyedSingleton("service2", new TextGenerationService("model_id_2")); + Kernel kernel = builder.Build(); + + var executionSettings = new PromptExecutionSettings(); + var function = kernel.CreateFunctionFromPrompt("Hello AI", executionSettings); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.Equal(kernel.GetRequiredService("service2"), aiService); + Assert.Equal(executionSettings, defaultExecutionSettings); + } + + [Fact] + public void ItUsesDefaultServiceAndSettingsForDefaultId() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("service1", new TextGenerationService("model_id_1")); + builder.Services.AddKeyedSingleton("service2", new TextGenerationService("model_id_2")); + Kernel kernel = builder.Build(); + + var executionSettings = new PromptExecutionSettings(); + var function = kernel.CreateFunctionFromPrompt("Hello AI", executionSettings); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.Equal(kernel.GetRequiredService("service2"), aiService); + Assert.Equal(executionSettings, defaultExecutionSettings); + } + + [Theory] + [InlineData(new string[] { "modelid_1" }, "modelid_1")] + [InlineData(new string[] { "modelid_2" }, "modelid_2")] + [InlineData(new string[] { "modelid_3" }, "modelid_3")] + [InlineData(new string[] { "modelid_4", "modelid_1" }, "modelid_1")] + [InlineData(new string[] { "modelid_4", "" }, "modelid_3")] + public void ItGetsAIServiceConfigurationByOrder(string[] serviceIds, string expectedModelId) + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton("modelid_1", new TextGenerationService("modelid_1")); + builder.Services.AddKeyedSingleton("modelid_2", new TextGenerationService("modelid_2")); + builder.Services.AddKeyedSingleton("modelid_3", new TextGenerationService("modelid_3")); + Kernel kernel = builder.Build(); + + var executionSettings = new Dictionary(); + foreach (var serviceId in serviceIds) + { + executionSettings.Add(serviceId, new PromptExecutionSettings() { ModelId = serviceId }); + } + var function = kernel.CreateFunctionFromPrompt(promptConfig: new PromptTemplateConfig() { Template = "Hello AI", ExecutionSettings = executionSettings }); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, new KernelArguments()); + + // Assert + Assert.Equal(kernel.GetRequiredService(expectedModelId), aiService); + if (!string.IsNullOrEmpty(defaultExecutionSettings!.ModelId)) + { + Assert.Equal(expectedModelId, defaultExecutionSettings!.ModelId); + } + } + + [Fact] + public void ItGetsAIServiceConfigurationForTextGenerationByModelId() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddKeyedSingleton(null, new TextGenerationService("model1")); + builder.Services.AddKeyedSingleton(null, new TextGenerationService("model2")); + Kernel kernel = builder.Build(); + + var arguments = new KernelArguments(); + var executionSettings = new PromptExecutionSettings() { ModelId = "model2" }; + var function = kernel.CreateFunctionFromPrompt("Hello AI", executionSettings: executionSettings); + var serviceSelector = new OrderedAIServiceSelector(); + + // Act + (var aiService, var defaultExecutionSettings) = serviceSelector.SelectAIService(kernel, function, arguments); + + // Assert + Assert.NotNull(aiService); + Assert.Equal("model2", aiService.GetModelId()); + Assert.Equal(executionSettings, defaultExecutionSettings); + } + + #region private + private sealed class AIService : IAIService + { + public IReadOnlyDictionary Attributes => new Dictionary(); + } + + private sealed class TextGenerationService : ITextGenerationService + { + public IReadOnlyDictionary Attributes => this._attributes; + + private readonly Dictionary _attributes = new(); + + public TextGenerationService(string modelId) + { + this._attributes.Add("ModelId", modelId); + } + + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + + public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + } + #endregion +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedIAIServiceConfigurationProviderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedIAIServiceConfigurationProviderTests.cs deleted file mode 100644 index c230a440e367..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/OrderedIAIServiceConfigurationProviderTests.cs +++ /dev/null @@ -1,242 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Functions; -using Microsoft.SemanticKernel.Services; -using Xunit; - -namespace SemanticKernel.UnitTests.Functions; -public class OrderedIAIServiceConfigurationProviderTests -{ - [Fact] - public void ItThrowsAnSKExceptionForNoServices() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List(); - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - // Assert - Assert.Throws(() => configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings)); - } - - [Fact] - public void ItGetsAIServiceConfigurationForSingleAIService() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService(new AIService()); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List(); - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings); - - // Assert - Assert.NotNull(aiService); - Assert.Null(defaultRequestSettings); - } - - [Fact] - public void ItGetsAIServiceConfigurationForSingleTextCompletion() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService(new TextCompletion()); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List(); - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings); - - // Assert - Assert.NotNull(aiService); - Assert.Null(defaultRequestSettings); - } - - [Fact] - public void ItAIServiceConfigurationForTextCompletionByServiceId() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService("service1", new TextCompletion()); - serviceCollection.SetService("service2", new TextCompletion()); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List(); - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings); - - // Assert - Assert.NotNull(aiService); - Assert.Null(defaultRequestSettings); - } - - [Fact] - public void ItThrowsAnSKExceptionForNotFoundService() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService("service1", new TextCompletion()); - serviceCollection.SetService("service2", new TextCompletion()); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List - { - new AIRequestSettings() { ServiceId = "service3" } - }; - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - // Assert - Assert.Throws(() => configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings)); - } - - [Fact] - public void ItUsesDefaultServiceForNullModelSettings() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService("service1", new TextCompletion()); - serviceCollection.SetService("service2", new TextCompletion(), true); - var serviceProvider = serviceCollection.Build(); - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, null); - - // Assert - Assert.Equal(serviceProvider.GetService("service2"), aiService); - Assert.Null(defaultRequestSettings); - } - - [Fact] - public void ItUsesDefaultServiceForEmptyModelSettings() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService("service1", new TextCompletion()); - serviceCollection.SetService("service2", new TextCompletion(), true); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List(); - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings); - - // Assert - Assert.Equal(serviceProvider.GetService("service2"), aiService); - Assert.Null(defaultRequestSettings); - } - - [Fact] - public void ItUsesDefaultServiceAndSettings() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService("service1", new TextCompletion()); - serviceCollection.SetService("service2", new TextCompletion(), true); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List - { - new AIRequestSettings() - }; - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings); - - // Assert - Assert.Equal(serviceProvider.GetService("service2"), aiService); - Assert.Equal(modelSettings[0], defaultRequestSettings); - } - - [Fact] - public void ItUsesDefaultServiceAndSettingsEmptyServiceId() - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService("service1", new TextCompletion()); - serviceCollection.SetService("service2", new TextCompletion(), true); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List - { - new AIRequestSettings() { ServiceId = "" } - }; - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings); - - // Assert - Assert.Equal(serviceProvider.GetService("service2"), aiService); - Assert.Equal(modelSettings[0], defaultRequestSettings); - } - - [Theory] - [InlineData(new string[] { "service1" }, "service1")] - [InlineData(new string[] { "service2" }, "service2")] - [InlineData(new string[] { "service3" }, "service3")] - [InlineData(new string[] { "service4", "service1" }, "service1")] - public void ItGetsAIServiceConfigurationByOrder(string[] serviceIds, string expectedServiceId) - { - // Arrange - var renderedPrompt = "Hello AI, what can you do for me?"; - var serviceCollection = new AIServiceCollection(); - serviceCollection.SetService("service1", new TextCompletion()); - serviceCollection.SetService("service2", new TextCompletion()); - serviceCollection.SetService("service3", new TextCompletion()); - var serviceProvider = serviceCollection.Build(); - var modelSettings = new List(); - foreach (var serviceId in serviceIds) - { - modelSettings.Add(new AIRequestSettings() { ServiceId = serviceId }); - } - var configurationProvider = new OrderedIAIServiceSelector(); - - // Act - (var aiService, var defaultRequestSettings) = configurationProvider.SelectAIService(renderedPrompt, serviceProvider, modelSettings); - - // Assert - Assert.Equal(serviceProvider.GetService(expectedServiceId), aiService); - Assert.Equal(expectedServiceId, defaultRequestSettings!.ServiceId); - } - - #region private - private sealed class AIService : IAIService - { - } - - private sealed class TextCompletion : ITextCompletion - { - public Task> GetCompletionsAsync(string text, AIRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - - public IAsyncEnumerable GetStreamingCompletionsAsync(string text, AIRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - } - #endregion -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/ParameterViewTypeTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/ParameterViewTypeTests.cs deleted file mode 100644 index 497cf97b63f2..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/ParameterViewTypeTests.cs +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Xunit; - -namespace SemanticKernel.UnitTests.Functions; - -public class ParameterViewTypeTests -{ - [Theory] - [InlineData("string")] - [InlineData("number")] - [InlineData("object")] - [InlineData("array")] - [InlineData("boolean")] - public void ItCanConvertParameterDataTypeToString(string name) - { - //Arrange - var sut = new ParameterViewType(name); - - //Act - var result = sut.ToString(); - - //Assert - Assert.Equal(name, result); - } - - [Fact] - public void ItCanCreateStringParameterDataType() - { - //Act - var sut = ParameterViewType.String; - - //Assert - Assert.Equal("string", sut.Name); - } - - [Fact] - public void ItCanCreateNumberParameterDataType() - { - //Act - var sut = ParameterViewType.Number; - - //Assert - Assert.Equal("number", sut.Name); - } - - [Fact] - public void ItCanCreateObjectParameterDataType() - { - //Act - var sut = ParameterViewType.Object; - - //Assert - Assert.Equal("object", sut.Name); - } - - [Fact] - public void ItCanArrayParameterDataType() - { - //Act - var sut = ParameterViewType.Array; - - //Assert - Assert.Equal("array", sut.Name); - } - - [Fact] - public void ItCanCreateBooleanParameterDataType() - { - //Act - var sut = ParameterViewType.Boolean; - - //Assert - Assert.Equal("boolean", sut.Name); - } - - [Fact] - public void ItCanCheckTwoParameterDataTypesAreEqual() - { - //Arrange - var sut1 = new ParameterViewType("array"); - var sut2 = new ParameterViewType("array"); - - //Assert - Assert.True(sut1.Equals(sut2)); - } - - [Fact] - public void ItCanCheckTwoParameterDataTypesAreUnequal() - { - //Arrange - var sut1 = new ParameterViewType("array"); - var sut2 = new ParameterViewType("string"); - - //Assert - Assert.False(sut1.Equals(sut2)); - } - - [Fact] - public void ItCanCheckParameterDataTypeIsEqualToAnotherOneRepresentedByObject() - { - //Arrange - var sut1 = new ParameterViewType("array"); - object sut2 = new ParameterViewType("array"); - - //Assert - Assert.True(sut1.Equals(sut2)); - } - - [Fact] - public void ItCanCheckParameterDataTypeIsUnequalToAnotherOneRepresentedByObject() - { - //Arrange - var sut1 = new ParameterViewType("array"); - object sut2 = new ParameterViewType("string"); - - //Assert - Assert.False(sut1.Equals(sut2)); - } - - [Fact] - public void ItCanCheckParameterDataTypeIsUnequalToAnotherType() - { - //Arrange - var sut1 = new ParameterViewType("array"); - var sut2 = "array"; - - //Assert - Assert.False(sut1.Equals(sut2)); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/SKContextTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/SKContextTests.cs deleted file mode 100644 index 0106b73944ed..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/SKContextTests.cs +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Globalization; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Functions; - -public class SKContextTests -{ - private readonly Mock _functions = new(); - - [Fact] - public void ItHasHelpersForContextVariables() - { - // Arrange - var variables = new ContextVariables(); - var target = new SKContext(new Mock().Object, new Mock().Object, new Mock().Object, variables); - variables.Set("foo1", "bar1"); - - // Act - target.Variables["foo2"] = "bar2"; - target.Variables["INPUT"] = Guid.NewGuid().ToString("N"); - - // Assert - Assert.Equal("bar1", target.Variables["foo1"]); - Assert.Equal("bar1", target.Variables["foo1"]); - Assert.Equal("bar2", target.Variables["foo2"]); - Assert.Equal("bar2", target.Variables["foo2"]); - Assert.Equal(target.Variables["INPUT"], target.Result); - Assert.Equal(target.Variables["INPUT"], target.ToString()); - Assert.Equal(target.Variables["INPUT"], target.Variables.Input); - Assert.Equal(target.Variables["INPUT"], target.Variables.ToString()); - } - - [Fact] - public async Task ItHasHelpersForFunctionCollectionAsync() - { - // Arrange - IDictionary functions = KernelBuilder.Create().ImportFunctions(new Parrot(), "test"); - this._functions.Setup(x => x.GetFunction("func")).Returns(functions["say"]); - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(this._functions.Object); - var target = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(), this._functions.Object); - Assert.NotNull(target.Functions); - - // Act - var say = target.Functions.GetFunction("func"); - - FunctionResult result = await say.InvokeAsync("ciao", kernel.Object); - - // Assert - Assert.Equal("ciao", result.Context.Result); - Assert.Equal("ciao", result.GetValue()); - } - - private (Mock kernelMock, Mock functionRunnerMock, Mock serviceProviderMock, Mock serviceSelectorMock) SetupKernelMock(IReadOnlyFunctionCollection? functions = null) - { - functions ??= new Mock().Object; - - var kernel = new Mock(); - var functionRunner = new Mock(); - var serviceProvider = new Mock(); - var serviceSelector = new Mock(); - - kernel.SetupGet(x => x.Functions).Returns(functions); - kernel.Setup(k => k.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((contextVariables, skills, loggerFactory, culture) => - { - return new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, contextVariables); - }); - - return (kernel, functionRunner, serviceProvider, serviceSelector); - } - - private sealed class Parrot - { - [SKFunction, Description("say something")] - // ReSharper disable once UnusedMember.Local - public string Say(string input) - { - return input; - } - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/SKFunctionTests2.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/SKFunctionTests2.cs deleted file mode 100644 index a0f5e523dcb7..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/SKFunctionTests2.cs +++ /dev/null @@ -1,1058 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Globalization; -using System.Linq; -using System.Reflection; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Functions; - -public sealed class SKFunctionTests2 -{ - private readonly Mock _logger; - private readonly Mock _functions; - private readonly Mock _kernel; - - private static string s_expected = string.Empty; - private static string s_actual = string.Empty; - - public SKFunctionTests2() - { - this._logger = new Mock(); - this._functions = new Mock(); - this._kernel = new Mock(); - - s_expected = Guid.NewGuid().ToString("D"); - } - - [Fact] - public async Task ItSupportsStaticVoidVoidAsync() - { - // Arrange - static void Test() - { - s_actual = s_expected; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - } - - [Fact] - public async Task ItSupportsStaticVoidStringAsync() - { - // Arrange - static string Test() - { - s_actual = s_expected; - return s_expected; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Result); - Assert.Equal(s_expected, result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticVoidTaskStringAsync() - { - // Arrange - static Task Test() - { - s_actual = s_expected; - return Task.FromResult(s_expected); - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Result); - Assert.Equal(s_expected, result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticVoidValueTaskStringAsync() - { - // Arrange - static async ValueTask Test() - { - s_actual = s_expected; - await Task.Delay(1); - return s_expected; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Result); - Assert.Equal(s_expected, result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticContextVoidAsync() - { - // Arrange - static void Test(SKContext context) - { - s_actual = s_expected; - context.Variables["canary"] = s_expected; - } - - var context = this.MockContext("xy"); - context.Variables["someVar"] = "qz"; - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - } - - [Fact] - public async Task ItSupportsStaticContextStringAsync() - { - // Arrange - static string Test(SKContext context) - { - s_actual = context.Variables["someVar"]; - return "abc"; - } - - var context = this.MockContext(""); - context.Variables["someVar"] = s_expected; - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal("abc", context.Result); - Assert.Equal("abc", result.GetValue()); - } - - [Fact] - public async Task ItSupportsInstanceContextStringNullableAsync() - { - // Arrange - int invocationCount = 0; - - string? Test(SKContext context) - { - invocationCount++; - s_actual = context.Variables["someVar"]; - return "abc"; - } - - var context = this.MockContext(""); - context.Variables["someVar"] = s_expected; - - // Act - Func method = Test; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected, s_actual); - Assert.Equal("abc", context.Result); - Assert.Equal("abc", result.GetValue()); - } - - [Fact] - public async Task ItSupportsInstanceContextTaskStringAsync() - { - // Arrange - int invocationCount = 0; - - Task Test(SKContext context) - { - invocationCount++; - s_actual = s_expected; - context.Variables["canary"] = s_expected; - return Task.FromResult(s_expected); - } - - var context = this.MockContext(""); - - // Act - Func> method = Test; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_actual, context.Result); - Assert.Equal(s_actual, result.GetValue()); - Assert.Equal(s_expected, context.Variables["canary"]); - } - - [Fact] - public async Task ItSupportsInstanceContextTaskContextAsync() - { - // Arrange - int invocationCount = 0; - - async Task TestAsync(SKContext context) - { - await Task.Delay(0); - invocationCount++; - s_actual = s_expected; - context.Variables.Update("foo"); - context.Variables["canary"] = s_expected; - return context; - } - - var context = this.MockContext(""); - - // Act - Func> method = TestAsync; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - Assert.Equal("foo", context.Result); - Assert.Equal("foo", result.GetValue()); - } - - [Fact] - public async Task ItSupportsInstanceStringVoidAsync() - { - // Arrange - int invocationCount = 0; - - void Test(string input) - { - invocationCount++; - s_actual = s_expected + input; - } - - var context = this.MockContext(".blah"); - - // Act - Action method = Test; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected + ".blah", s_actual); - } - - [Fact] - public async Task ItSupportsInstanceStringStringAsync() - { - // Arrange - int invocationCount = 0; - - string Test(string input) - { - invocationCount++; - s_actual = s_expected; - return "foo-bar"; - } - - var context = this.MockContext(""); - - // Act - Func method = Test; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected, s_actual); - Assert.Equal("foo-bar", context.Result); - Assert.Equal("foo-bar", result.GetValue()); - } - - [Fact] - public async Task ItSupportsInstanceStringTaskStringAsync() - { - // Arrange - int invocationCount = 0; - - Task Test(string input) - { - invocationCount++; - s_actual = s_expected; - return Task.FromResult("hello there"); - } - - var context = this.MockContext(""); - - // Act - Func> method = Test; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected, s_actual); - Assert.Equal("hello there", context.Result); - Assert.Equal("hello there", result.GetValue()); - } - - [Fact] - public async Task ItSupportsInstanceStringContextVoidAsync() - { - // Arrange - int invocationCount = 0; - - void Test(string input, SKContext context) - { - invocationCount++; - s_actual = s_expected; - context.Variables.Update("x y z"); - context.Variables["canary"] = s_expected; - } - - var context = this.MockContext(""); - - // Act - Action method = Test; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - Assert.Equal("x y z", context.Result); - Assert.Null(result.GetValue()); - } - - [Fact] - public async Task ItSupportsInstanceContextStringVoidAsync() - { - // Arrange - int invocationCount = 0; - - void Test(SKContext context, string input) - { - invocationCount++; - s_actual = s_expected; - context.Variables.Update("x y z"); - context.Variables["canary"] = s_expected; - } - - var context = this.MockContext(""); - - // Act - Action method = Test; - var function = SKFunction.FromNativeMethod(Method(method), method.Target, loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(1, invocationCount); - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - Assert.Equal("x y z", context.Result); - Assert.Null(result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticStringContextStringAsync() - { - // Arrange - static string Test(string input, SKContext context) - { - s_actual = s_expected; - context.Variables["canary"] = s_expected; - context.Variables.Update("x y z"); - // This value should overwrite "x y z" - return "new data"; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - Assert.Equal("new data", context.Result); - Assert.Equal("new data", result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticStringContextTaskStringAsync() - { - // Arrange - static Task Test(string input, SKContext context) - { - s_actual = s_expected; - context.Variables["canary"] = s_expected; - context.Variables.Update("x y z"); - // This value should overwrite "x y z" - return Task.FromResult("new data"); - } - - var context = this.MockContext(""); - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - Assert.Equal("new data", context.Result); - Assert.Equal("new data", result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticStringContextTaskContextAsync() - { - // Arrange - static Task Test(string input, SKContext context) - { - s_actual = s_expected; - context.Variables["canary"] = s_expected; - context.Variables.Update("x y z"); - - var newContext = context.Clone(); - newContext.Variables.Clear(); - - // This value should overwrite "x y z". Contexts are merged. - newContext.Variables.Update("new data"); - newContext.Variables["canary2"] = "222"; - - return Task.FromResult(newContext); - } - - var oldContext = this.MockContext(""); - oldContext.Variables["legacy"] = "something"; - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(oldContext); - var newContext = result.Context; - - // Assert - Assert.Equal(s_expected, s_actual); - - Assert.True(oldContext.Variables.ContainsKey("canary")); - Assert.False(oldContext.Variables.ContainsKey("canary2")); - - Assert.False(newContext.Variables.ContainsKey("canary")); - Assert.True(newContext.Variables.ContainsKey("canary2")); - - Assert.Equal(s_expected, oldContext.Variables["canary"]); - Assert.Equal("222", newContext.Variables["canary2"]); - - Assert.True(oldContext.Variables.ContainsKey("legacy")); - Assert.False(newContext.Variables.ContainsKey("legacy")); - - Assert.Equal("x y z", oldContext.Result); - Assert.Equal("new data", newContext.Result); - - Assert.Equal("new data", result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticContextValueTaskContextAsync() - { - // Arrange - static ValueTask Test(string input, SKContext context) - { - // This value should overwrite "x y z". Contexts are merged. - var newCx = context.Clone(); - newCx.Variables.Update(input + "abc"); - - return new ValueTask(newCx); - } - - var oldContext = this.MockContext("test"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(oldContext); - - // Assert - Assert.Equal("testabc", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportsStaticStringTaskAsync() - { - // Arrange - static Task TestAsync(string input) - { - s_actual = s_expected; - return Task.CompletedTask; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(TestAsync), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - } - - [Fact] - public async Task ItSupportsStaticStringValueTaskAsync() - { - // Arrange - static ValueTask TestAsync(string input) - { - s_actual = s_expected; - return default; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(TestAsync), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - } - - [Fact] - public async Task ItSupportsStaticContextTaskAsync() - { - // Arrange - static Task TestAsync(SKContext context) - { - s_actual = s_expected; - context.Variables["canary"] = s_expected; - context.Variables.Update("x y z"); - return Task.CompletedTask; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(TestAsync), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - Assert.Equal("x y z", context.Result); - Assert.Null(result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticStringContextTaskAsync() - { - // Arrange - static Task TestAsync(string input, SKContext context) - { - s_actual = s_expected; - context.Variables["canary"] = s_expected; - context.Variables.Update(input + "x y z"); - return Task.CompletedTask; - } - - var context = this.MockContext("input:"); - - // Act - var function = SKFunction.FromNativeMethod(Method(TestAsync), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - var result = await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - Assert.Equal(s_expected, context.Variables["canary"]); - Assert.Equal("input:x y z", context.Result); - Assert.Null(result.GetValue()); - } - - [Fact] - public async Task ItSupportsStaticVoidTaskAsync() - { - // Arrange - static Task TestAsync() - { - s_actual = s_expected; - return Task.CompletedTask; - } - - var context = this.MockContext(""); - - // Act - var function = SKFunction.FromNativeMethod(Method(TestAsync), loggerFactory: this._logger.Object); - Assert.NotNull(function); - - await function.InvokeAsync(context); - - // Assert - Assert.Equal(s_expected, s_actual); - } - - [Fact] - public async Task ItSupportsUsingNamedInputValueFromContextAsync() - { - static string Test(string input) => "Result: " + input; - - var context = this.MockContext("input value"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("Result: input value", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportsUsingNonNamedInputValueFromContextAsync() - { - static string Test(string other) => "Result: " + other; - - var context = this.MockContext("input value"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("Result: input value", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportsUsingNonNamedInputValueFromContextEvenWhenThereAreMultipleParametersAsync() - { - static string Test(int something, long orother) => "Result: " + (something + orother); - - var context = this.MockContext("42"); - context.Variables.Set("orother", "8"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("Result: 50", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportsPreferringNamedValueOverInputFromContextAsync() - { - static string Test(string other) => "Result: " + other; - - var context = this.MockContext("input value"); - context.Variables.Set("other", "other value"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("Result: other value", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportsOverridingNameWithAttributeAsync() - { - static string Test([SKName("input"), Description("description")] string other) => "Result: " + other; - - var context = this.MockContext("input value"); - context.Variables.Set("other", "other value"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("Result: input value", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportNullDefaultValuesOverInputAsync() - { - static string Test(string? input = null, string? other = null) => "Result: " + (other is null); - - var context = this.MockContext("input value"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("Result: True", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportsConvertingFromManyTypesAsync() - { - static string Test(int a, long b, decimal c, Guid d, DateTimeOffset e, DayOfWeek? f) => - $"{a} {b} {c} {d} {e:R} {f}"; - - var context = this.MockContext(""); - context.Variables.Set("a", "1"); - context.Variables.Set("b", "-2"); - context.Variables.Set("c", "1234"); - context.Variables.Set("d", "7e08cc00-1d71-4558-81ed-69929499dea1"); - context.Variables.Set("e", "Thu, 25 May 2023 20:17:30 GMT"); - context.Variables.Set("f", "Monday"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("1 -2 1234 7e08cc00-1d71-4558-81ed-69929499dea1 Thu, 25 May 2023 20:17:30 GMT Monday", result.Context.Variables.Input); - } - - [Fact] - public async Task ItSupportsConvertingFromTypeConverterAttributedTypesAsync() - { - static int Test(MyCustomType mct) => mct.Value * 2; - - var context = this.MockContext(""); - context.Variables.Set("mct", "42"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("84", result.Context.Variables.Input); - } - - [TypeConverter(typeof(MyCustomTypeConverter))] - private sealed class MyCustomType - { - public int Value { get; set; } - } - -#pragma warning disable CA1812 // Instantiated by reflection - private sealed class MyCustomTypeConverter : TypeConverter - { - public override bool CanConvertFrom(ITypeDescriptorContext? context, Type sourceType) => - sourceType == typeof(string); - public override object? ConvertFrom(ITypeDescriptorContext? context, CultureInfo? culture, object value) => - new MyCustomType { Value = int.Parse((string)value, culture) }; - } -#pragma warning restore CA1812 - - [Fact] - public async Task ItSupportsConvertingFromToManyTypesAsync() - { - // Arrange - var context = this.MockContext("1"); - - static async Task AssertResult(Delegate d, SKContext context, string expected) - { - var result = await SKFunction.FromNativeFunction(d, functionName: "Test")!.InvokeAsync(context); - context = result.Context; - - Assert.Equal(expected, context.Variables.Input); - } - - // Act/Assert - await AssertResult((sbyte input) => input * 2, context, "2"); - await AssertResult((byte input) => input * 2, context, "4"); - await AssertResult((short input) => input * 2, context, "8"); - await AssertResult((ushort input) => input * 2, context, "16"); - await AssertResult((int input) => input * 2, context, "32"); - await AssertResult((uint input) => input * 2, context, "64"); - await AssertResult((long input) => input * 2, context, "128"); - await AssertResult((ulong input) => input * 2, context, "256"); - await AssertResult((float input) => input * 2, context, "512"); - await AssertResult((double input) => input * 2, context, "1024"); - await AssertResult((int input) => Task.FromResult(input * 2), context, "2048"); - await AssertResult((long input) => Task.FromResult(input * 2), context, "4096"); - await AssertResult((int input) => ValueTask.FromResult(input * 2), context, "8192"); - await AssertResult((long input) => ValueTask.FromResult(input * 2), context, "16384"); - await AssertResult((long? input) => input!.Value * 2, context, "32768"); - await AssertResult((TimeSpan input) => input * 2, context, "65536.00:00:00"); - await AssertResult((TimeSpan? input) => (int?)null, context, ""); - - context.Variables.Update("http://example.com/semantic"); - await AssertResult((Uri input) => new Uri(input, "kernel"), context, "http://example.com/kernel"); - } - - [Fact] - public async Task ItUsesContextCultureForParsingFormattingAsync() - { - // Arrange - var context = this.MockContext(""); - ISKFunction func = SKFunction.FromNativeFunction((double input) => input * 2, functionName: "Test"); - FunctionResult result; - - // Act/Assert - - context.Culture = new CultureInfo("fr-FR"); - context.Variables.Update("12,34"); // tries first to parse with the specified culture - result = await func.InvokeAsync(context); - Assert.Equal("24,68", result.Context.Variables.Input); - - context.Culture = new CultureInfo("fr-FR"); - context.Variables.Update("12.34"); // falls back to invariant culture - result = await func.InvokeAsync(context); - Assert.Equal("24,68", result.Context.Variables.Input); - - context.Culture = new CultureInfo("en-US"); - context.Variables.Update("12.34"); // works with current culture - result = await func.InvokeAsync(context); - Assert.Equal("24.68", result.Context.Variables.Input); - - context.Culture = new CultureInfo("en-US"); - context.Variables.Update("12,34"); // not parsable with current or invariant culture - await Assert.ThrowsAsync(() => func.InvokeAsync(context)); - } - - [Fact] - public async Task ItThrowsWhenItFailsToConvertAnArgumentAsync() - { - static string Test(Guid g) => g.ToString(); - - var context = this.MockContext(""); - context.Variables.Set("g", "7e08cc00-1d71-4558-81ed-69929499dxyz"); - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - Assert.NotNull(function); - - var ex = await Assert.ThrowsAsync(() => function.InvokeAsync(context)); - - //Assert - AssertExtensions.AssertIsArgumentOutOfRange(ex, "g", context.Variables["g"]); - } - - [Fact] - public void ItExposesMetadataFromDelegate() - { - [Description("Concat information")] - static string Test(Guid id, string name, [SKName("old")] int age) => $"{id} {name} {age}"; - - // Act - var function = SKFunction.FromNativeFunction(Test); - - // Assert - Assert.Contains("Test", function.Name, StringComparison.Ordinal); - Assert.Equal("Concat information", function.Description); - Assert.Equal("id", function.Describe().Parameters[0].Name); - Assert.Equal("name", function.Describe().Parameters[1].Name); - Assert.Equal("old", function.Describe().Parameters[2].Name); - } - - [Fact] - public void ItExposesMetadataFromMethodInfo() - { - [Description("Concat information")] - static string Test(Guid id, string name, [SKName("old")] int age) => $"{id} {name} {age}"; - - // Act - var function = SKFunction.FromNativeMethod(Method(Test)); - - // Assert - Assert.Contains("Test", function.Name, StringComparison.Ordinal); - Assert.Equal("Concat information", function.Description); - Assert.Equal("id", function.Describe().Parameters[0].Name); - Assert.Equal("name", function.Describe().Parameters[1].Name); - Assert.Equal("old", function.Describe().Parameters[2].Name); - } - - [Fact] - public async Task ItCanReturnBasicTypesAsync() - { - // Arrange - static int TestInt(int number) => number; - static double TestDouble(double number) => number; - static string TestString(string str) => str; - static bool TestBool(bool flag) => flag; - - var function1 = SKFunction.FromNativeMethod(Method(TestInt)); - var function2 = SKFunction.FromNativeMethod(Method(TestDouble)); - var function3 = SKFunction.FromNativeMethod(Method(TestString)); - var function4 = SKFunction.FromNativeMethod(Method(TestBool)); - - // Act - FunctionResult result1 = await function1.InvokeAsync(this.MockContext("42")); - FunctionResult result2 = await function2.InvokeAsync(this.MockContext("3.14")); - FunctionResult result3 = await function3.InvokeAsync(this.MockContext("test-string")); - FunctionResult result4 = await function4.InvokeAsync(this.MockContext("true")); - - // Assert - Assert.Equal(42, result1.GetValue()); - Assert.Equal(3.14, result2.GetValue()); - Assert.Equal("test-string", result3.GetValue()); - Assert.True(result4.GetValue()); - } - - [Fact] - public async Task ItCanReturnComplexTypeAsync() - { - // Arrange - static MyCustomType TestCustomType(MyCustomType instance) => instance; - - var context = this.MockContext(""); - context.Variables.Set("instance", "42"); - - var function = SKFunction.FromNativeMethod(Method(TestCustomType)); - - // Act - FunctionResult result = await function.InvokeAsync(context); - - var actualInstance = result.GetValue(); - - // Assert - Assert.NotNull(actualInstance); - Assert.Equal(42, actualInstance.Value); - } - - [Fact] - public async Task ItCanReturnAsyncEnumerableTypeAsync() - { - // Arrange - static async IAsyncEnumerable TestAsyncEnumerableTypeAsync() - { - yield return 1; - - await Task.Delay(50); - - yield return 2; - - await Task.Delay(50); - - yield return 3; - } - - var function = SKFunction.FromNativeMethod(Method(TestAsyncEnumerableTypeAsync)); - - // Act - FunctionResult result = await function.InvokeAsync(this.MockContext(string.Empty)); - - // Assert - Assert.NotNull(result); - - var asyncEnumerableResult = result.GetValue>(); - - Assert.NotNull(asyncEnumerableResult); - - var assertResult = new List(); - - await foreach (var value in asyncEnumerableResult) - { - assertResult.Add(value); - } - - Assert.True(assertResult.SequenceEqual(new List { 1, 2, 3 })); - } - - private static MethodInfo Method(Delegate method) - { - return method.Method; - } - - private SKContext MockContext(string input) - { - var functionRunner = new Mock(); - var serviceProvider = new Mock(); - var serviceSelector = new Mock(); - - return new SKContext( - functionRunner.Object, - serviceProvider.Object, - serviceSelector.Object, - new ContextVariables(input) - ); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/SKFunctionTests3.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/SKFunctionTests3.cs deleted file mode 100644 index b114016baca6..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/SKFunctionTests3.cs +++ /dev/null @@ -1,394 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Linq; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Xunit; - -namespace SemanticKernel.UnitTests.Functions; - -public sealed class SKFunctionTests3 -{ - [Fact] - public void ItDoesntThrowForValidFunctionsViaDelegate() - { - // Arrange - var pluginInstance = new LocalExamplePlugin(); - MethodInfo[] methods = pluginInstance.GetType() - .GetMethods(BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.InvokeMethod) - .Where(m => m.Name is not "GetType" and not "Equals" and not "GetHashCode" and not "ToString") - .ToArray(); - - ISKFunction[] functions = (from method in methods select SKFunction.FromNativeMethod(method, pluginInstance, "plugin")).ToArray(); - - // Act - Assert.Equal(methods.Length, functions.Length); - Assert.All(functions, Assert.NotNull); - } - - [Fact] - public void ItDoesNotThrowForValidFunctionsViaPlugin() - { - // Arrange - var pluginInstance = new LocalExamplePlugin(); - MethodInfo[] methods = pluginInstance.GetType() - .GetMethods(BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.InvokeMethod) - .Where(m => m.Name is not "GetType" and not "Equals" and not "GetHashCode" and not "ToString") - .ToArray(); - - ISKFunction[] functions = new KernelBuilder().Build().ImportFunctions(pluginInstance).Select(s => s.Value).ToArray(); - - // Act - Assert.Equal(methods.Length, functions.Length); - Assert.All(functions, f => Assert.NotNull(f)); - } - - [Fact] - public void ItThrowsForInvalidFunctions() - { - // Arrange - var instance = new InvalidPlugin(); - MethodInfo[] methods = instance.GetType() - .GetMethods(BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.InvokeMethod) - .Where(m => m.Name is not "GetType" and not "Equals" and not "GetHashCode") - .ToArray(); - - // Act - Assert that no exception occurs - var count = 0; - foreach (var method in methods) - { - try - { - SKFunction.FromNativeMethod(method, instance, "plugin"); - } - catch (SKException) - { - count++; - } - } - - // Assert - Assert.Equal(4, count); - } - - [Fact] - public async Task ItCanImportNativeFunctionsAsync() - { - // Arrange - var context = new KernelBuilder().Build().CreateNewContext(); - context.Variables["done"] = "NO"; - - // Note: the function doesn't have any SK attributes - async Task ExecuteAsync(SKContext contextIn) - { - Assert.Equal("NO", contextIn.Variables["done"]); - contextIn.Variables["canary"] = "YES"; - - await Task.Delay(0); - return contextIn; - } - - // Act - ISKFunction function = SKFunction.FromNativeFunction( - nativeFunction: ExecuteAsync, - parameters: null, - description: "description", - pluginName: "pluginName", - functionName: "functionName"); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("YES", context.Variables["canary"]); - Assert.Equal("YES", result.Context.Variables["canary"]); - } - - [Fact] - public async Task ItCanImportNativeFunctionsWithExternalReferencesAsync() - { - // Arrange - var context = new KernelBuilder().Build().CreateNewContext(); - context.Variables["done"] = "NO"; - - // Note: This is an important edge case that affects the function signature and how delegates - // are handled internally: the function references an external variable and cannot be static. - // This scenario is used for gRPC functions. - string variableOutsideTheFunction = "foo"; - - async Task ExecuteAsync(SKContext contextIn) - { - string referenceToExternalVariable = variableOutsideTheFunction; - contextIn.Variables["canary"] = "YES"; - - await Task.Delay(0); - return contextIn; - } - - // Act. Note: this will throw an exception if SKFunction doesn't handle the function type. - ISKFunction function = SKFunction.FromNativeFunction( - nativeFunction: ExecuteAsync, - description: "description", - pluginName: "pluginName", - functionName: "functionName"); - - FunctionResult result = await function.InvokeAsync(context); - - // Assert - Assert.Equal("YES", result.Context.Variables["canary"]); - } - - private sealed class InvalidPlugin - { - [SKFunction] - public void Invalid1([SKName("input"), Description("The x parameter")] string x, [SKName("input"), Description("The y parameter")] string y) - { - } - - [SKFunction] - public void Invalid2(string y, CustomUnknownType n) - { - } - - [SKFunction] - public void Invalid3(SKContext context1, SKContext context2) - { - } - - [SKFunction] - public void Invalid4(CancellationToken ct1, CancellationToken ct2) - { - } - - public struct CustomUnknownType { } - } - - private sealed class LocalExamplePlugin - { - [SKFunction] - public void Type01() - { - } - - [SKFunction] - public string Type02() - { - return ""; - } - - [SKFunction] - public string? Type02Nullable() - { - return null; - } - - [SKFunction] - public async Task Type03Async() - { - await Task.Delay(0); - return ""; - } - - [SKFunction] - public async Task Type03NullableAsync() - { - await Task.Delay(0); - return null; - } - - [SKFunction] - public void Type04(SKContext context) - { - } - - [SKFunction] - public void Type04Nullable(SKContext? context) - { - } - - [SKFunction] - public string Type05(SKContext context) - { - return ""; - } - - [SKFunction] - public string? Type05Nullable(SKContext? context) - { - return null; - } - - [SKFunction] - public async Task Type06Async(SKContext context) - { - await Task.Delay(0); - return ""; - } - - [SKFunction] - public async Task Type07Async(SKContext context) - { - await Task.Delay(0); - return context; - } - - [SKFunction] - public void Type08(string input) - { - } - - [SKFunction] - public void Type08Nullable(string? input) - { - } - - [SKFunction] - public string Type09(string input) - { - return ""; - } - - [SKFunction] - public string? Type09Nullable(string? input = null) - { - return ""; - } - - [SKFunction] - public async Task Type10Async(string input) - { - await Task.Delay(0); - return ""; - } - - [SKFunction] - public async Task Type10NullableAsync(string? input) - { - await Task.Delay(0); - return ""; - } - - [SKFunction] - public void Type11(string input, SKContext context) - { - } - - [SKFunction] - public void Type11Nullable(string? input = null, SKContext? context = null) - { - } - - [SKFunction] - public string Type12(string input, SKContext context) - { - return ""; - } - - [SKFunction] - public async Task Type13Async(string input, SKContext context) - { - await Task.Delay(0); - return ""; - } - - [SKFunction] - public async Task Type14Async(string input, SKContext context) - { - await Task.Delay(0); - return context; - } - - [SKFunction] - public async Task Type15Async(string input) - { - await Task.Delay(0); - } - - [SKFunction] - public async Task Type16Async(SKContext context) - { - await Task.Delay(0); - } - - [SKFunction] - public async Task Type17Async(string input, SKContext context) - { - await Task.Delay(0); - } - - [SKFunction] - public async Task Type18Async() - { - await Task.Delay(0); - } - - [SKFunction] - public async ValueTask ReturnsValueTaskAsync() - { - await Task.Delay(0); - } - - [SKFunction] - public async ValueTask ReturnsValueTaskStringAsync() - { - await Task.Delay(0); - return "hello world"; - } - - [SKFunction] - public async ValueTask ReturnsValueTaskContextAsync(SKContext context) - { - await Task.Delay(0); - return context; - } - - [SKFunction] - public string WithPrimitives( - byte a1, - byte? b1, - sbyte c1, - sbyte? d1, - short e1, - short? f1, - ushort g1, - ushort? h1, - int i1, - int? j1, - uint k1, - uint? l1, - long m1, - long? n1, - ulong o1, - ulong? p1, - float q1, - float? r1, - double s1, - double? t1, - decimal u1, - decimal? v1, - char w1, - char? x1, - bool y1, - bool? z1, - DateTime a2, - DateTime? b2, - DateTimeOffset c2, - DateTimeOffset? d2, - TimeSpan e2, - TimeSpan? f2, - Guid g2, - Guid? h2, - DayOfWeek i2, - DayOfWeek? j2, - Uri k2, - string l2) - { - return string.Empty; - } - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/SemanticFunctionTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/SemanticFunctionTests.cs deleted file mode 100644 index d04cc759fc48..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/SemanticFunctionTests.cs +++ /dev/null @@ -1,432 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.TemplateEngine; -using Moq; -using Xunit; - -// ReSharper disable StringLiteralTypo - -namespace SemanticKernel.UnitTests.Functions; - -public class SemanticFunctionTests -{ - [Fact] - public void ItProvidesAccessToFunctionsViaFunctionCollection() - { - // Arrange - var factory = new Mock>(); - var kernel = new KernelBuilder() - .WithDefaultAIService(factory.Object) - .Build(); - - kernel.CreateSemanticFunction(promptTemplate: "Tell me a joke", functionName: "joker", pluginName: "jk", description: "Nice fun"); - - // Act & Assert - 3 functions, var name is not case sensitive - Assert.True(kernel.Functions.TryGetFunction("jk", "joker", out _)); - Assert.True(kernel.Functions.TryGetFunction("JK", "JOKER", out _)); - } - - [Theory] - [InlineData(null, "Assistant is a large language model.")] - [InlineData("My Chat Prompt", "My Chat Prompt")] - public async Task ItUsesChatSystemPromptWhenProvidedAsync(string providedSystemChatPrompt, string expectedSystemChatPrompt) - { - // Arrange - var mockTextCompletion = new Mock(); - var mockCompletionResult = new Mock(); - - mockTextCompletion.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockCompletionResult.Setup(cr => cr.GetCompletionAsync(It.IsAny())).ReturnsAsync("llmResult"); - - var kernel = new KernelBuilder() - .WithAIService("x", mockTextCompletion.Object) - .Build(); - - var templateConfig = new PromptTemplateConfig(); - templateConfig.ModelSettings.Add(new OpenAIRequestSettings() - { - ChatSystemPrompt = providedSystemChatPrompt - }); - - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); - - // Act - await kernel.RunAsync(func); - - // Assert - mockTextCompletion.Verify(a => a.GetCompletionsAsync("template", It.Is(c => c.ChatSystemPrompt == expectedSystemChatPrompt), It.IsAny()), Times.Once()); - } - - [Fact] - public void ItAllowsToCreateFunctionsInTheGlobalNamespace() - { - // Arrange - var kernel = new KernelBuilder().Build(); - var templateConfig = new PromptTemplateConfig(); - - // Act - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName"); - - // Assert - Assert.Equal(FunctionCollection.GlobalFunctionsPluginName, func.PluginName); - } - - [Fact] - public async Task ItUsesDefaultServiceWhenSpecifiedAsync() - { - // Arrange - var mockTextCompletion1 = new Mock(); - var mockTextCompletion2 = new Mock(); - var mockCompletionResult = new Mock(); - - mockTextCompletion1.Setup(c => c.GetCompletionsAsync(It.IsAny(), null, It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockTextCompletion2.Setup(c => c.GetCompletionsAsync(It.IsAny(), null, It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockCompletionResult.Setup(cr => cr.GetCompletionAsync(It.IsAny())).ReturnsAsync("llmResult"); - - var kernel = new KernelBuilder() - .WithAIService("service1", mockTextCompletion1.Object, false) - .WithAIService("service2", mockTextCompletion2.Object, true) - .Build(); - - var templateConfig = new PromptTemplateConfig(); - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); - - // Act - await kernel.RunAsync(func); - - // Assert - mockTextCompletion1.Verify(a => a.GetCompletionsAsync("template", null, It.IsAny()), Times.Never()); - mockTextCompletion2.Verify(a => a.GetCompletionsAsync("template", null, It.IsAny()), Times.Once()); - } - - [Fact] - public async Task ItUsesServiceIdWhenProvidedAsync() - { - // Arrange - var mockTextCompletion1 = new Mock(); - var mockTextCompletion2 = new Mock(); - var mockCompletionResult = new Mock(); - - mockTextCompletion1.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockTextCompletion2.Setup(c => c.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new[] { mockCompletionResult.Object }); - mockCompletionResult.Setup(cr => cr.GetCompletionAsync(It.IsAny())).ReturnsAsync("llmResult"); - - var kernel = new KernelBuilder() - .WithAIService("service1", mockTextCompletion1.Object, false) - .WithAIService("service2", mockTextCompletion2.Object, true) - .Build(); - - var templateConfig = new PromptTemplateConfig(); - templateConfig.ModelSettings.Add(new AIRequestSettings() { ServiceId = "service1" }); - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); - - // Act - await kernel.RunAsync(func); - - // Assert - mockTextCompletion1.Verify(a => a.GetCompletionsAsync("template", It.IsAny(), It.IsAny()), Times.Once()); - mockTextCompletion2.Verify(a => a.GetCompletionsAsync("template", It.IsAny(), It.IsAny()), Times.Never()); - } - - [Fact] - public async Task ItFailsIfInvalidServiceIdIsProvidedAsync() - { - // Arrange - var mockTextCompletion1 = new Mock(); - var mockTextCompletion2 = new Mock(); - - var kernel = new KernelBuilder() - .WithAIService("service1", mockTextCompletion1.Object, false) - .WithAIService("service2", mockTextCompletion2.Object, true) - .Build(); - - var templateConfig = new PromptTemplateConfig(); - templateConfig.ModelSettings.Add(new AIRequestSettings() { ServiceId = "service3" }); - var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName"); - - // Act - var exception = await Assert.ThrowsAsync(() => kernel.RunAsync(func)); - - // Assert - Assert.Equal("Service of type Microsoft.SemanticKernel.AI.TextCompletion.ITextCompletion and name service3 not registered.", exception.Message); - } - - [Theory] - [InlineData(1)] - [InlineData(2)] - public async Task RunAsyncHandlesPreInvocationAsync(int pipelineCount) - { - // Arrange - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var sut = new KernelBuilder().WithAIService(null, mockTextCompletion.Object).Build(); - var semanticFunction = sut.CreateSemanticFunction("Write a simple phrase about UnitTests"); - - var invoked = 0; - sut.FunctionInvoking += (sender, e) => - { - invoked++; - }; - List functions = new(); - for (int i = 0; i < pipelineCount; i++) - { - functions.Add(semanticFunction); - } - - // Act - var result = await sut.RunAsync(functions.ToArray()); - - // Assert - Assert.Equal(pipelineCount, invoked); - mockTextCompletion.Verify(m => m.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(pipelineCount)); - } - - [Fact] - public async Task RunAsyncHandlesPreInvocationWasCancelledAsync() - { - // Arrange - var sut = new KernelBuilder().Build(); - var semanticFunction = sut.CreateSemanticFunction("Write a simple phrase about UnitTests"); - var input = "Test input"; - var invoked = false; - sut.FunctionInvoking += (sender, e) => - { - invoked = true; - e.Cancel(); - }; - - // Act - var result = await sut.RunAsync(input, semanticFunction); - - // Assert - Assert.True(invoked); - Assert.Null(result.GetValue()); - } - - [Fact] - public async Task RunAsyncHandlesPreInvocationCancelationDontRunSubsequentFunctionsInThePipelineAsync() - { - // Arrange - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var sut = new KernelBuilder().WithAIService(null, mockTextCompletion.Object).Build(); - var semanticFunction = sut.CreateSemanticFunction("Write a simple phrase about UnitTests"); - - var invoked = 0; - sut.FunctionInvoking += (sender, e) => - { - invoked++; - e.Cancel(); - }; - - // Act - var result = await sut.RunAsync(semanticFunction, semanticFunction); - - // Assert - Assert.Equal(1, invoked); - mockTextCompletion.Verify(m => m.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); - } - - [Fact] - public async Task RunAsyncPreInvocationCancelationDontTriggerInvokedHandlerAsync() - { - // Arrange - var sut = new KernelBuilder().Build(); - var semanticFunction = sut.CreateSemanticFunction("Write a simple phrase about UnitTests"); - var invoked = 0; - - sut.FunctionInvoking += (sender, e) => - { - e.Cancel(); - }; - - sut.FunctionInvoked += (sender, e) => - { - invoked++; - }; - - // Act - var result = await sut.RunAsync(semanticFunction); - - // Assert - Assert.Equal(0, invoked); - } - - [Fact] - public async Task RunAsyncPreInvocationSkipDontTriggerInvokedHandlerAsync() - { - // Arrange - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var sut = new KernelBuilder().WithAIService(null, mockTextCompletion.Object).Build(); - var semanticFunction1 = sut.CreateSemanticFunction("Write one phrase about UnitTests", functionName: "SkipMe"); - var semanticFunction2 = sut.CreateSemanticFunction("Write two phrases about UnitTests", functionName: "DontSkipMe"); - var invoked = 0; - var invoking = 0; - string invokedFunction = string.Empty; - - sut.FunctionInvoking += (sender, e) => - { - invoking++; - if (e.FunctionView.Name == "SkipMe") - { - e.Skip(); - } - }; - - sut.FunctionInvoked += (sender, e) => - { - invokedFunction = e.FunctionView.Name; - invoked++; - }; - - // Act - var result = await sut.RunAsync( - semanticFunction1, - semanticFunction2); - - // Assert - Assert.Equal(2, invoking); - Assert.Equal(1, invoked); - Assert.Equal("DontSkipMe", invokedFunction); - } - - [Theory] - [InlineData(1)] - [InlineData(2)] - public async Task RunAsyncHandlesPostInvocationAsync(int pipelineCount) - { - // Arrange - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var sut = new KernelBuilder().WithAIService(null, mockTextCompletion.Object).Build(); - var semanticFunction = sut.CreateSemanticFunction("Write a simple phrase about UnitTests"); - - var invoked = 0; - - sut.FunctionInvoked += (sender, e) => - { - invoked++; - }; - - List functions = new(); - for (int i = 0; i < pipelineCount; i++) - { - functions.Add(semanticFunction); - } - - // Act - var result = await sut.RunAsync(functions.ToArray()); - - // Assert - Assert.Equal(pipelineCount, invoked); - mockTextCompletion.Verify(m => m.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(pipelineCount)); - } - - [Fact] - public async Task RunAsyncChangeVariableInvokingHandlerAsync() - { - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var sut = new KernelBuilder().WithAIService(null, mockTextCompletion.Object).Build(); - var prompt = "Write a simple phrase about UnitTests {{$input}}"; - var semanticFunction = sut.CreateSemanticFunction(prompt); - - var originalInput = "Importance"; - var newInput = "Problems"; - - sut.FunctionInvoking += (sender, e) => - { - originalInput = newInput; - }; - - // Act - await sut.RunAsync(originalInput, semanticFunction); - - // Assert - Assert.Equal(newInput, originalInput); - } - - [Fact] - public async Task RunAsyncChangeVariableInvokedHandlerAsync() - { - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var sut = new KernelBuilder().WithAIService(null, mockTextCompletion.Object).Build(); - var prompt = "Write a simple phrase about UnitTests {{$input}}"; - var semanticFunction = sut.CreateSemanticFunction(prompt); - - var originalInput = "Importance"; - var newInput = "Problems"; - - sut.FunctionInvoked += (sender, e) => - { - originalInput = newInput; - }; - - // Act - await sut.RunAsync(originalInput, semanticFunction); - - // Assert - Assert.Equal(newInput, originalInput); - } - - [Fact] - public async Task ItReturnsFunctionResultsCorrectlyAsync() - { - // Arrange - [SKName("Function1")] - static string Function1() => "Result1"; - - [SKName("Function2")] - static string Function2() => "Result2"; - - const string PluginName = "MyPlugin"; - const string Prompt = "Write a simple phrase about UnitTests"; - - var (mockTextResult, mockTextCompletion) = this.SetupMocks("Result3"); - var kernel = new KernelBuilder().WithAIService(null, mockTextCompletion.Object).Build(); - - var function1 = SKFunction.FromNativeMethod(Method(Function1), pluginName: PluginName); - var function2 = SKFunction.FromNativeMethod(Method(Function2), pluginName: PluginName); - - var function3 = kernel.CreateSemanticFunction(Prompt, functionName: "Function3", pluginName: PluginName); - - // Act - var kernelResult = await kernel.RunAsync(function1, function2, function3); - - // Assert - Assert.NotNull(kernelResult); - Assert.Equal("Result3", kernelResult.GetValue()); - - var functionResult1 = kernelResult.FunctionResults.First(l => l.FunctionName == "Function1" && l.PluginName == PluginName); - var functionResult2 = kernelResult.FunctionResults.First(l => l.FunctionName == "Function2" && l.PluginName == PluginName); - var functionResult3 = kernelResult.FunctionResults.First(l => l.FunctionName == "Function3" && l.PluginName == PluginName); - - Assert.Equal("Result1", functionResult1.GetValue()); - Assert.Equal("Result2", functionResult2.GetValue()); - Assert.Equal("Result3", functionResult3.GetValue()); - } - - private (Mock textResultMock, Mock textCompletionMock) SetupMocks(string? completionResult = null) - { - var mockTextResult = new Mock(); - mockTextResult.Setup(m => m.GetCompletionAsync(It.IsAny())).ReturnsAsync(completionResult ?? "LLM Result about UnitTests"); - - var mockTextCompletion = new Mock(); - mockTextCompletion.Setup(m => m.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextResult.Object }); - - return (mockTextResult, mockTextCompletion); - } - - private static MethodInfo Method(Delegate method) - { - return method.Method; - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/HttpMessageHandlerStub.cs b/dotnet/src/SemanticKernel.UnitTests/HttpMessageHandlerStub.cs index e42837c6a602..f36d48d19f42 100644 --- a/dotnet/src/SemanticKernel.UnitTests/HttpMessageHandlerStub.cs +++ b/dotnet/src/SemanticKernel.UnitTests/HttpMessageHandlerStub.cs @@ -3,7 +3,6 @@ using System; using System.Net.Http; using System.Net.Http.Headers; -using System.Net.Mime; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -27,7 +26,7 @@ internal sealed class HttpMessageHandlerStub : DelegatingHandler public HttpMessageHandlerStub() { this.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); - this.ResponseToReturn.Content = new StringContent("{}", Encoding.UTF8, MediaTypeNames.Application.Json); + this.ResponseToReturn.Content = new StringContent("{}", Encoding.UTF8, "application/json"); } protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) @@ -35,7 +34,12 @@ protected override async Task SendAsync(HttpRequestMessage this.Method = request.Method; this.RequestUri = request.RequestUri; this.RequestHeaders = request.Headers; - this.RequestContent = request.Content == null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken); + if (request.Content is not null) + { +#pragma warning disable CA2016 // Forward the 'CancellationToken' parameter to methods; overload doesn't exist on .NET Framework + this.RequestContent = await request.Content.ReadAsByteArrayAsync(); +#pragma warning restore CA2016 + } this.ContentHeaders = request.Content?.Headers; return await Task.FromResult(this.ResponseToReturn); diff --git a/dotnet/src/SemanticKernel.UnitTests/KernelExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/KernelExtensionsTests.cs new file mode 100644 index 000000000000..b10ffcfdabc0 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/KernelExtensionsTests.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests; + +public class KernelExtensionsTests +{ + [Theory] + [ClassData(typeof(ComplexObjectTestData))] + public async Task InvokeAsyncOfTShouldMatchFunctionResultValueAsync(object? expectedValue) + { + var testFunction = KernelFunctionFactory.CreateFromMethod(() => expectedValue, functionName: "Test"); + + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("Fake", "Fake functions", new[] { testFunction }); + + var resultValueInvokeSignature2 = await kernel.InvokeAsync(testFunction); + var resultValueInvokeSignature3 = await kernel.InvokeAsync("Fake", "Test"); + + Assert.Equal(expectedValue, resultValueInvokeSignature2); + Assert.Equal(expectedValue, resultValueInvokeSignature3); + } + + public class ComplexObjectTestData : IEnumerable + { + private readonly List _data = new() + { + new object?[] { null }, + new object?[] { 1 }, + new object?[] { "Bogus" }, + new object?[] { DateTime.Now }, + new object?[] { new { Id = 2, Name = "Object2" } } + }; + + public IEnumerator GetEnumerator() => this._data.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs b/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs index 362985374269..93f59e9c8588 100644 --- a/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/KernelTests.cs @@ -3,71 +3,66 @@ using System; using System.Collections.Generic; using System.ComponentModel; +using System.Globalization; using System.Linq; -using System.Reflection; +using System.Net.Http; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.AI.TextCompletion; -using Microsoft.SemanticKernel.Events; -using Microsoft.SemanticKernel.Orchestration; +using Microsoft.SemanticKernel.TextGeneration; using Moq; using Xunit; -// ReSharper disable StringLiteralTypo +#pragma warning disable CS0618 // Events are deprecated namespace SemanticKernel.UnitTests; public class KernelTests { + private const string InputParameterName = "input"; + [Fact] public void ItProvidesAccessToFunctionsViaFunctionCollection() { // Arrange - var factory = new Mock>(); - var kernel = new KernelBuilder() - .WithDefaultAIService(factory.Object) - .Build(); - - var nativePlugin = new MyPlugin(); - kernel.ImportFunctions(nativePlugin, "mySk"); + Kernel kernel = new(); + kernel.Plugins.AddFromType("mySk"); // Act & Assert - 3 functions, var name is not case sensitive - Assert.True(kernel.Functions.TryGetFunction("mySk", "sayhello", out _)); - Assert.True(kernel.Functions.TryGetFunction("MYSK", "SayHello", out _)); - Assert.True(kernel.Functions.TryGetFunction("mySk", "ReadFunctionCollectionAsync", out _)); - Assert.True(kernel.Functions.TryGetFunction("MYSK", "ReadFunctionCollectionAsync", out _)); + Assert.NotNull(kernel.Plugins.GetFunction("mySk", "sayhello")); + Assert.NotNull(kernel.Plugins.GetFunction("MYSK", "SayHello")); + Assert.NotNull(kernel.Plugins.GetFunction("mySk", "ReadFunctionCollectionAsync")); + Assert.NotNull(kernel.Plugins.GetFunction("MYSK", "ReadFunctionCollectionAsync")); } [Fact] - public async Task RunAsyncDoesNotRunWhenCancelledAsync() + public async Task InvokeAsyncDoesNotRunWhenCancelledAsync() { // Arrange - var kernel = new KernelBuilder().Build(); - var nativePlugin = new MyPlugin(); - var functions = kernel.ImportFunctions(nativePlugin, "mySk"); + var kernel = new Kernel(); + var functions = kernel.ImportPluginFromType(); using CancellationTokenSource cts = new(); cts.Cancel(); // Act - await Assert.ThrowsAsync(() => kernel.RunAsync(cts.Token, functions["GetAnyValue"])); + await Assert.ThrowsAnyAsync(() => kernel.InvokeAsync(functions["GetAnyValue"], cancellationToken: cts.Token)); } [Fact] - public async Task RunAsyncRunsWhenNotCancelledAsync() + public async Task InvokeAsyncRunsWhenNotCancelledAsync() { // Arrange - var kernel = new KernelBuilder().Build(); - var nativePlugin = new MyPlugin(); - kernel.ImportFunctions(nativePlugin, "mySk"); + var kernel = new Kernel(); + kernel.ImportPluginFromType("mySk"); using CancellationTokenSource cts = new(); // Act - KernelResult result = await kernel.RunAsync(cts.Token, kernel.Functions.GetFunction("mySk", "GetAnyValue")); + var result = await kernel.InvokeAsync(kernel.Plugins.GetFunction("mySk", "GetAnyValue"), cancellationToken: cts.Token); // Assert Assert.False(string.IsNullOrEmpty(result.GetValue())); @@ -77,586 +72,652 @@ public async Task RunAsyncRunsWhenNotCancelledAsync() public void ItImportsPluginsNotCaseSensitive() { // Act - IDictionary functions = new KernelBuilder().Build().ImportFunctions(new MyPlugin(), "test"); + KernelPlugin plugin = new Kernel().ImportPluginFromType(); // Assert - Assert.Equal(3, functions.Count); - Assert.True(functions.ContainsKey("GetAnyValue")); - Assert.True(functions.ContainsKey("getanyvalue")); - Assert.True(functions.ContainsKey("GETANYVALUE")); + Assert.Equal(3, plugin.Count()); + Assert.True(plugin.Contains("GetAnyValue")); + Assert.True(plugin.Contains("getanyvalue")); + Assert.True(plugin.Contains("GETANYVALUE")); } [Fact] - public void ItAllowsToImportFunctionsInTheGlobalNamespace() + public void ItAllowsToImportTheSamePluginMultipleTimes() { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); + + // Act - Assert no exception occurs + kernel.ImportPluginFromType(); + kernel.ImportPluginFromType("plugin1"); + kernel.ImportPluginFromType("plugin2"); + kernel.ImportPluginFromType("plugin3"); + } + + [Fact] + public async Task InvokeAsyncHandlesPreInvocationAsync() + { + // Arrange + var kernel = new Kernel(); + int functionInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var handlerInvocations = 0; + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + handlerInvocations++; + }; // Act - IDictionary functions = kernel.ImportFunctions(new MyPlugin()); + var result = await kernel.InvokeAsync(function); // Assert - Assert.Equal(3, functions.Count); - Assert.True(kernel.Functions.TryGetFunction("GetAnyValue", out ISKFunction? functionInstance)); - Assert.NotNull(functionInstance); + Assert.Equal(1, functionInvocations); + Assert.Equal(1, handlerInvocations); } [Fact] - public void ItAllowsToImportTheSamePluginMultipleTimes() + public async Task RunStreamingAsyncHandlesPreInvocationAsync() { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); + int functionInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); - // Act - Assert no exception occurs - kernel.ImportFunctions(new MyPlugin()); - kernel.ImportFunctions(new MyPlugin()); - kernel.ImportFunctions(new MyPlugin()); + var handlerInvocations = 0; + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + handlerInvocations++; + }; + + // Act + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) { } + + // Assert + Assert.Equal(1, functionInvocations); + Assert.Equal(1, handlerInvocations); } - [Theory] - [InlineData(1)] - [InlineData(2)] - public async Task RunAsyncHandlesPreInvocationAsync(int pipelineCount) + [Fact] + public async Task RunStreamingAsyncHandlesPreInvocationWasCancelledAsync() + { + // Arrange + var kernel = new Kernel(); + int functionInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + var handlerInvocations = 0; + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + handlerInvocations++; + e.Cancel = true; + }; + + // Act + IAsyncEnumerable enumerable = kernel.InvokeStreamingAsync(function); + IAsyncEnumerator enumerator = enumerable.GetAsyncEnumerator(); + var e = await Assert.ThrowsAsync(async () => await enumerator.MoveNextAsync()); + + // Assert + Assert.Equal(1, handlerInvocations); + Assert.Equal(0, functionInvocations); + Assert.Same(function, e.Function); + Assert.Same(kernel, e.Kernel); + Assert.Empty(e.Arguments); + } + + [Fact] + public async Task RunStreamingAsyncPreInvocationCancelationDontTriggerInvokedHandlerAsync() { // Arrange - var sut = new KernelBuilder().Build(); - var myPlugin = new Mock(); - var functions = sut.ImportFunctions(myPlugin.Object, "MyPlugin"); + var kernel = new Kernel(); + var functions = kernel.ImportPluginFromType(); var invoked = 0; - sut.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + e.Cancel = true; + }; + + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => { invoked++; }; - List pipeline = new(); - for (int i = 0; i < pipelineCount; i++) + + // Act + IAsyncEnumerable enumerable = kernel.InvokeStreamingAsync(functions["GetAnyValue"]); + IAsyncEnumerator enumerator = enumerable.GetAsyncEnumerator(); + var e = await Assert.ThrowsAsync(async () => await enumerator.MoveNextAsync()); + + // Assert + Assert.Equal(0, invoked); + } + + [Fact] + public async Task InvokeStreamingAsyncDoesNotHandlePostInvocationAsync() + { + // Arrange + var kernel = new Kernel(); + int functionInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); + + int handlerInvocations = 0; + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => { - pipeline.Add(functions["SayHello"]); - } + handlerInvocations++; + }; // Act - var result = await sut.RunAsync(pipeline.ToArray()); + await foreach (var chunk in kernel.InvokeStreamingAsync(function)) + { + } // Assert - Assert.Equal(pipelineCount, invoked); - myPlugin.Verify(m => m.SayHello(), Times.Exactly(pipelineCount)); + Assert.Equal(1, functionInvocations); + Assert.Equal(0, handlerInvocations); } [Fact] - public async Task RunAsyncHandlesPreInvocationWasCancelledAsync() + public async Task InvokeAsyncHandlesPreInvocationWasCancelledAsync() { // Arrange - var sut = new KernelBuilder().Build(); - var functions = sut.ImportFunctions(new MyPlugin(), "MyPlugin"); + var kernel = new Kernel(); + int functionInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); - var invoked = false; - sut.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + var handlerInvocations = 0; + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => { - invoked = true; - e.Cancel(); + handlerInvocations++; + e.Cancel = true; }; // Act - var result = await sut.RunAsync(functions["GetAnyValue"]); + KernelFunctionCanceledException ex = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function)); // Assert - Assert.True(invoked); - Assert.Null(result.GetValue()); + Assert.Equal(1, handlerInvocations); + Assert.Equal(0, functionInvocations); + Assert.Same(function, ex.Function); + Assert.Null(ex.FunctionResult); } [Fact] - public async Task RunAsyncHandlesPreInvocationCancelationDontRunSubsequentFunctionsInThePipelineAsync() + public async Task InvokeAsyncHandlesPreInvocationCancelationDontRunSubsequentFunctionsInThePipelineAsync() { // Arrange - var sut = new KernelBuilder().Build(); - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var myPlugin = new Mock(); - var functions = sut.ImportFunctions(myPlugin.Object, "MyPlugin"); + var kernel = new Kernel(); + int functionInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); - var invoked = 0; - sut.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + int handlerInvocations = 0; + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => { - invoked++; - e.Cancel(); + handlerInvocations++; + e.Cancel = true; }; // Act - var result = await sut.RunAsync(functions["GetAnyValue"], functions["SayHello"]); + KernelFunctionCanceledException ex = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function)); // Assert - Assert.Equal(1, invoked); - myPlugin.Verify(m => m.GetAnyValue(), Times.Never); - myPlugin.Verify(m => m.SayHello(), Times.Never); + Assert.Equal(1, handlerInvocations); + Assert.Equal(0, functionInvocations); + Assert.Same(function, ex.Function); + Assert.Null(ex.FunctionResult); } [Fact] - public async Task RunAsyncPreInvocationCancelationDontTriggerInvokedHandlerAsync() + public async Task InvokeAsyncPreInvocationCancelationDontTriggerInvokedHandlerAsync() { // Arrange - var sut = new KernelBuilder().Build(); - var functions = sut.ImportFunctions(new MyPlugin(), "MyPlugin"); + var kernel = new Kernel(); + var functions = kernel.ImportPluginFromType(); var invoked = 0; - sut.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => { - e.Cancel(); + e.Cancel = true; }; - sut.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => { invoked++; }; // Act - var result = await sut.RunAsync(functions["GetAnyValue"]); + KernelFunctionCanceledException ex = await Assert.ThrowsAsync(() => kernel.InvokeAsync(functions["GetAnyValue"])); // Assert Assert.Equal(0, invoked); + Assert.Same(functions["GetAnyValue"], ex.Function); + Assert.Null(ex.FunctionResult); } [Fact] - public async Task RunAsyncPreInvocationSkipDontTriggerInvokedHandlerAsync() + public async Task InvokeAsyncHandlesPostInvocationAsync() { // Arrange - var sut = new KernelBuilder().Build(); - var (mockTextResult, mockTextCompletion) = this.SetupMocks(); - var myPlugin = new Mock(); - var functions = sut.ImportFunctions(myPlugin.Object, "MyPlugin"); - - var invoked = 0; - var invoking = 0; - string invokedFunction = string.Empty; + var kernel = new Kernel(); + int functionInvocations = 0; + var function = KernelFunctionFactory.CreateFromMethod(() => functionInvocations++); - sut.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + int handlerInvocations = 0; + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => { - invoking++; - if (e.FunctionView.Name == "GetAnyValue") - { - e.Skip(); - } + handlerInvocations++; }; - sut.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + // Act + var result = await kernel.InvokeAsync(function); + + // Assert + Assert.Equal(1, functionInvocations); + Assert.Equal(1, handlerInvocations); + } + + [Fact] + public async Task InvokeAsyncHandlesPostInvocationWithServicesAsync() + { + // Arrange + var (mockTextResult, mockTextCompletion) = this.SetupMocks(); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(mockTextCompletion.Object); + Kernel kernel = builder.Build(); + + var function = KernelFunctionFactory.CreateFromPrompt("Write a simple phrase about UnitTests"); + + var invoked = 0; + + kernel.FunctionInvoked += (sender, e) => { - invokedFunction = e.FunctionView.Name; invoked++; }; // Act - var result = await sut.RunAsync(functions["GetAnyValue"], functions["SayHello"]); + var result = await kernel.InvokeAsync(function); // Assert - Assert.Equal(2, invoking); Assert.Equal(1, invoked); - myPlugin.Verify(m => m.GetAnyValue(), Times.Never); - myPlugin.Verify(m => m.SayHello(), Times.Once); + mockTextCompletion.Verify(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(1)); } - [Theory] - [InlineData(1)] - [InlineData(2)] - public async Task RunAsyncHandlesPostInvocationAsync(int pipelineCount) + [Fact] + public async Task InvokeAsyncHandlesPostInvocationAndCancellationExceptionContainsResultAsync() { // Arrange - var sut = new KernelBuilder().Build(); - var myPlugin = new Mock(); - var functions = sut.ImportFunctions(myPlugin.Object, "MyPlugin"); + var kernel = new Kernel(); + object result = 42; + var function = KernelFunctionFactory.CreateFromMethod(() => result); + var args = new KernelArguments() { { "a", "b" } }; - var invoked = 0; - sut.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => { - invoked++; + e.Cancel = true; }; - List pipeline = new(); - for (int i = 0; i < pipelineCount; i++) + // Act + KernelFunctionCanceledException ex = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function, args)); + + // Assert + Assert.Same(kernel, ex.Kernel); + Assert.Same(function, ex.Function); + Assert.Same(args, ex.Arguments); + Assert.NotNull(ex.FunctionResult); + Assert.Same(result, ex.FunctionResult.GetValue()); + } + + [Fact] + public async Task InvokeAsyncHandlesPostInvocationAndCancellationExceptionContainsModifiedResultAsync() + { + // Arrange + var kernel = new Kernel(); + object result = 42; + object newResult = 84; + var function = KernelFunctionFactory.CreateFromMethod(() => result); + var args = new KernelArguments() { { "a", "b" } }; + + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => { - pipeline.Add(functions["GetAnyValue"]); - } + e.SetResultValue(newResult); + e.Cancel = true; + }; // Act - var result = await sut.RunAsync(pipeline.ToArray()); + KernelFunctionCanceledException ex = await Assert.ThrowsAsync(() => kernel.InvokeAsync(function, args)); // Assert - Assert.Equal(pipelineCount, invoked); - myPlugin.Verify(m => m.GetAnyValue(), Times.Exactly(pipelineCount)); + Assert.Same(kernel, ex.Kernel); + Assert.Same(function, ex.Function); + Assert.Same(args, ex.Arguments); + Assert.NotNull(ex.FunctionResult); + Assert.Same(newResult, ex.FunctionResult.GetValue()); } [Fact] - public async Task RunAsyncChangeVariableInvokingHandlerAsync() + public async Task InvokeAsyncChangeVariableInvokingHandlerAsync() { - var sut = new KernelBuilder().Build(); - var myPlugin = new Mock(); - var functions = sut.ImportFunctions(myPlugin.Object, "MyPlugin"); + var kernel = new Kernel(); + var function = KernelFunctionFactory.CreateFromMethod((string originalInput) => originalInput); var originalInput = "Importance"; var newInput = "Problems"; - sut.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => { - originalInput = newInput; + e.Arguments["originalInput"] = newInput; }; // Act - await sut.RunAsync(originalInput, functions["GetAnyValue"]); + var result = await kernel.InvokeAsync(function, new() { ["originalInput"] = originalInput }); // Assert - Assert.Equal(newInput, originalInput); + Assert.Equal(newInput, result.GetValue()); } [Fact] - public async Task RunAsyncChangeVariableInvokedHandlerAsync() + public async Task InvokeAsyncChangeVariableInvokedHandlerAsync() { - var sut = new KernelBuilder().Build(); - var myPlugin = new Mock(); - var functions = sut.ImportFunctions(myPlugin.Object, "MyPlugin"); + var kernel = new Kernel(); + var function = KernelFunctionFactory.CreateFromMethod(() => { }); var originalInput = "Importance"; var newInput = "Problems"; - sut.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => + kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs e) => { - originalInput = newInput; + e.SetResultValue(newInput); }; // Act - await sut.RunAsync(originalInput, functions["GetAnyValue"]); + var result = await kernel.InvokeAsync(function, new() { [InputParameterName] = originalInput }); // Assert - Assert.Equal(newInput, originalInput); + Assert.Equal(newInput, result.GetValue()); } [Fact] public async Task ItReturnsFunctionResultsCorrectlyAsync() { // Arrange - [SKName("Function1")] - static string Function1() => "Result1"; - - [SKName("Function2")] - static string Function2() => "Result2"; + var kernel = new Kernel(); - const string PluginName = "MyPlugin"; - - var kernel = new KernelBuilder().Build(); - - var function1 = SKFunction.FromNativeMethod(Method(Function1), pluginName: PluginName); - var function2 = SKFunction.FromNativeMethod(Method(Function2), pluginName: PluginName); + var function = KernelFunctionFactory.CreateFromMethod(() => "Result", "Function1"); // Act - var kernelResult = await kernel.RunAsync(function1, function2); - var functionResult1 = kernelResult.FunctionResults.First(l => l.FunctionName == "Function1" && l.PluginName == PluginName); - var functionResult2 = kernelResult.FunctionResults.First(l => l.FunctionName == "Function2" && l.PluginName == PluginName); + var result = await kernel.InvokeAsync(function); // Assert - Assert.NotNull(kernelResult); - Assert.Equal("Result2", kernelResult.GetValue()); - Assert.Equal("Result1", functionResult1.GetValue()); - Assert.Equal("Result2", functionResult2.GetValue()); + Assert.NotNull(result); + Assert.Equal("Result", result.GetValue()); } [Fact] public async Task ItReturnsChangedResultsFromFunctionInvokedEventsAsync() { - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); // Arrange - [SKName("Function1")] - static string Function1() => "Result1"; - var function1 = SKFunction.FromNativeMethod(Method(Function1), pluginName: "MyPlugin"); + var function1 = KernelFunctionFactory.CreateFromMethod(() => "Result1", "Function1"); const string ExpectedValue = "new result"; kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs args) => { - args.SKContext.Variables.Update(ExpectedValue); + args.SetResultValue(ExpectedValue); }; // Act - var kernelResult = await kernel.RunAsync(function1); + var result = await kernel.InvokeAsync(function1); // Assert - Assert.NotNull(kernelResult); - Assert.Equal(ExpectedValue, kernelResult.GetValue()); - Assert.Equal(ExpectedValue, kernelResult.FunctionResults.Single().GetValue()); - Assert.Equal(ExpectedValue, kernelResult.FunctionResults.Single().Context.Result); + Assert.NotNull(result); + Assert.Equal(ExpectedValue, result.GetValue()); } [Fact] public async Task ItReturnsChangedResultsFromFunctionInvokingEventsAsync() { // Arrange - var kernel = new KernelBuilder().Build(); + var kernel = new Kernel(); - [SKName("Function1")] - static string Function1(SKContext context) => context.Variables["injected variable"]; - var function1 = SKFunction.FromNativeMethod(Method(Function1), pluginName: "MyPlugin"); + var function1 = KernelFunctionFactory.CreateFromMethod((string injectedVariable) => injectedVariable, "Function1"); const string ExpectedValue = "injected value"; kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs args) => { - args.SKContext.Variables["injected variable"] = ExpectedValue; + args.Arguments["injectedVariable"] = ExpectedValue; }; // Act - var kernelResult = await kernel.RunAsync(function1); + var result = await kernel.InvokeAsync(function1); // Assert - Assert.NotNull(kernelResult); - Assert.Equal(ExpectedValue, kernelResult.GetValue()); - Assert.Equal(ExpectedValue, kernelResult.FunctionResults.Single().GetValue()); - Assert.Equal(ExpectedValue, kernelResult.FunctionResults.Single().Context.Result); + Assert.NotNull(result); + Assert.Equal(ExpectedValue, result.GetValue()); } - [Theory] - [InlineData("Function1", 5)] - [InlineData("Function2", 1)] - public async Task ItRepeatsFunctionInvokedEventsAsync(string retryFunction, int numberOfRepeats) + [Fact] + public async Task ItCanFindAndRunFunctionAsync() { - // Arrange - var kernel = new KernelBuilder().Build(); + //Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "fake result", "function"); - [SKName("Function1")] - static string Function1(SKContext context) => "Result1"; - var function1 = SKFunction.FromNativeMethod(Method(Function1), pluginName: "MyPlugin"); + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("plugin", new[] { function }); - [SKName("Function2")] - static string Function2(SKContext context) => "Result2"; - var function2 = SKFunction.FromNativeMethod(Method(Function2), pluginName: "MyPlugin"); + //Act + var result = await kernel.InvokeAsync("plugin", "function"); - int numberOfInvocations = 0; - int repeatCount = 0; + //Assert + Assert.NotNull(result); + Assert.Equal("fake result", result.GetValue()); + } - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs args) => - { - if (args.FunctionView.Name == retryFunction && repeatCount < numberOfRepeats) - { - args.Repeat(); - repeatCount++; - } + [Fact] + public void ItShouldBePossibleToSetAndGetCultureAssociatedWithKernel() + { + //Arrange + var kernel = new Kernel(); - numberOfInvocations++; - }; + var culture = CultureInfo.GetCultureInfo(28); - // Act - var kernelResult = await kernel.RunAsync(function1, function2); + //Act + kernel.Culture = culture; - // Assert - Assert.NotNull(kernelResult); - Assert.Equal(2 + numberOfRepeats, kernelResult.FunctionResults.Count); - Assert.Equal(2 + numberOfRepeats, numberOfInvocations); + //Assert + Assert.Equal(culture, kernel.Culture); } - [Theory] - [InlineData("Function1", "Result2 Result3")] - [InlineData("Function2", "Result1 Result3")] - [InlineData("Function3", "Result1 Result2")] - public async Task ItSkipsFunctionsFromFunctionInvokingEventsAsync(string skipFunction, string expectedResult) + [Fact] + public void ItDefaultsLoggerFactoryToNullLoggerFactory() { - // Arrange - [SKName("Function1")] - static string Function1(string input) => input + " Result1"; - - [SKName("Function2")] - static string Function2(string input) => input + " Result2"; + //Arrange + var kernel = new Kernel(); - [SKName("Function3")] - static string Function3(string input) => input + " Result3"; - - const string PluginName = "MyPlugin"; - - var kernel = new KernelBuilder().Build(); + //Assert + Assert.Same(NullLoggerFactory.Instance, kernel.LoggerFactory); + } - var function1 = SKFunction.FromNativeMethod(Method(Function1), pluginName: PluginName); - var function2 = SKFunction.FromNativeMethod(Method(Function2), pluginName: PluginName); - var function3 = SKFunction.FromNativeMethod(Method(Function3), pluginName: PluginName); + [Fact] + public void ItDefaultsDataToEmptyDictionary() + { + //Arrange + var kernel = new Kernel(); - const int ExpectedInvocations = 2; + //Assert + Assert.Empty(kernel.Data); + } - int numberOfInvocations = 0; + [Fact] + public void ItDefaultsPluginsToEmptyCollection() + { + //Arrange + var kernel = new Kernel(); - kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs args) => - { - if (args.FunctionView.Name == skipFunction) - { - args.Skip(); - } - }; + //Assert + Assert.Empty(kernel.Plugins); + } - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs args) => - { - numberOfInvocations++; - }; + [Fact] + public void InvariantCultureShouldBeReturnedIfNoCultureWasAssociatedWithKernel() + { + //Arrange + var kernel = new Kernel(); - // Act - var kernelResult = await kernel.RunAsync(string.Empty, function1, function2, function3); + //Act + var culture = kernel.Culture; - // Assert - Assert.NotNull(kernelResult); - Assert.Equal(expectedResult, kernelResult.GetValue()!.Trim()); - Assert.Equal(ExpectedInvocations, numberOfInvocations); - Assert.Equal(ExpectedInvocations, kernelResult.FunctionResults.Count); + //Assert + Assert.Same(CultureInfo.InvariantCulture, culture); } - [Theory] - [InlineData(1, 0, 0)] - [InlineData(2, 0, 0)] - [InlineData(2, 1, 1)] - [InlineData(5, 2, 2)] - public async Task ItCancelsPipelineFromFunctionInvokingEventsAsync(int numberOfFunctions, int functionCancelIndex, int expectedInvocations) + [Fact] + public void ItDeepClonesAllRelevantStateInClone() { - List functions = new(); - const string PluginName = "MyPlugin"; + // Kernel with all properties set + var serviceSelector = new Mock(); + var loggerFactory = new Mock(); + var serviceProvider = new ServiceCollection() + .AddSingleton(serviceSelector.Object) +#pragma warning disable CA2000 // Dispose objects before losing scope + .AddSingleton(new HttpClient()) +#pragma warning restore CA2000 + .AddSingleton(loggerFactory.Object) + .AddSingleton(new MyFunctionFilter()) + .AddSingleton(new MyPromptFilter()) + .BuildServiceProvider(); + var plugin = KernelPluginFactory.CreateFromFunctions("plugin1"); + var plugins = new KernelPluginCollection() { plugin }; + Kernel kernel1 = new(serviceProvider, plugins); + kernel1.Data["key"] = "value"; + + // Clone and validate it + Kernel kernel2 = kernel1.Clone(); + Assert.Same(kernel1.Services, kernel2.Services); + Assert.Same(kernel1.Culture, kernel2.Culture); + Assert.NotSame(kernel1.Data, kernel2.Data); + Assert.Equal(kernel1.Data.Count, kernel2.Data.Count); + Assert.Equal(kernel1.Data["key"], kernel2.Data["key"]); + Assert.NotSame(kernel1.Plugins, kernel2.Plugins); + Assert.Equal(kernel1.Plugins, kernel2.Plugins); + this.AssertFilters(kernel1, kernel2); + + // Minimally configured kernel + Kernel kernel3 = new(); + + // Clone and validate it + Kernel kernel4 = kernel3.Clone(); + Assert.Same(kernel3.Services, kernel4.Services); + Assert.NotSame(kernel3.Data, kernel4.Data); + Assert.Empty(kernel4.Data); + Assert.NotSame(kernel1.Plugins, kernel2.Plugins); + Assert.Empty(kernel4.Plugins); + this.AssertFilters(kernel3, kernel4); + } + [Fact] + public async Task InvokeStreamingAsyncCallsConnectorStreamingApiAsync() + { // Arrange - [SKName("Function1")] - static string Function1() => "Result1"; - functions.Add(SKFunction.FromNativeMethod(Method(Function1), pluginName: PluginName)); - - [SKName("Function2")] - static string Function2() => "Result2"; - functions.Add(SKFunction.FromNativeMethod(Method(Function2), pluginName: PluginName)); - - [SKName("Function3")] - static string Function3() => "Result3"; - functions.Add(SKFunction.FromNativeMethod(Method(Function3), pluginName: PluginName)); - - [SKName("Function4")] - static string Function4() => "Result4"; - functions.Add(SKFunction.FromNativeMethod(Method(Function4), pluginName: PluginName)); - - [SKName("Function5")] - static string Function5() => "Result5"; - functions.Add(SKFunction.FromNativeMethod(Method(Function5), pluginName: PluginName)); - - var kernel = new KernelBuilder().Build(); - - int numberOfInvocations = 0; - - kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs args) => - { - if (args.FunctionView.Name == functions[functionCancelIndex].Name) - { - args.Cancel(); - } - }; - - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs args) => - { - numberOfInvocations++; - }; - + var mockTextCompletion = this.SetupStreamingMocks( + new StreamingTextContent("chunk1"), + new StreamingTextContent("chunk2")); + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(mockTextCompletion.Object); + Kernel kernel = builder.Build(); + var prompt = "Write a simple phrase about UnitTests {{$input}}"; + var sut = KernelFunctionFactory.CreateFromPrompt(prompt); + var variables = new KernelArguments() { [InputParameterName] = "importance" }; + + var chunkCount = 0; // Act - var kernelResult = await kernel.RunAsync(functions.Take(numberOfFunctions).ToArray()); + await foreach (var chunk in sut.InvokeStreamingAsync(kernel, variables)) + { + chunkCount++; + } // Assert - Assert.NotNull(kernelResult); - - // Kernel result is the same as the last invoked function - Assert.Equal(expectedInvocations, numberOfInvocations); - Assert.Equal(expectedInvocations, kernelResult.FunctionResults.Count); + Assert.Equal(2, chunkCount); + mockTextCompletion.Verify(m => m.GetStreamingTextContentsAsync(It.IsIn("Write a simple phrase about UnitTests importance"), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(1)); } - [Theory] - [InlineData(1, 0, 1)] - [InlineData(2, 0, 1)] - [InlineData(2, 1, 2)] - [InlineData(5, 2, 3)] - public async Task ItCancelsPipelineFromFunctionInvokedEventsAsync(int numberOfFunctions, int functionCancelIndex, int expectedInvocations) + private (TextContent mockTextContent, Mock textCompletionMock) SetupMocks(string? completionResult = null) { - List functions = new(); - const string PluginName = "MyPlugin"; - - // Arrange - [SKName("Function1")] - static string Function1() => "Result1"; - functions.Add(SKFunction.FromNativeMethod(Method(Function1), pluginName: PluginName)); + var mockTextContent = new TextContent(completionResult ?? "LLM Result about UnitTests"); - [SKName("Function2")] - static string Function2() => "Result2"; - functions.Add(SKFunction.FromNativeMethod(Method(Function2), pluginName: PluginName)); + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent }); + return (mockTextContent, mockTextCompletion); + } - [SKName("Function3")] - static string Function3() => "Result3"; - functions.Add(SKFunction.FromNativeMethod(Method(Function3), pluginName: PluginName)); + private Mock SetupStreamingMocks(params StreamingTextContent[] streamingContents) + { + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetStreamingTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(streamingContents.ToAsyncEnumerable()); - [SKName("Function4")] - static string Function4() => "Result4"; - functions.Add(SKFunction.FromNativeMethod(Method(Function4), pluginName: PluginName)); + return mockTextCompletion; + } - [SKName("Function5")] - static string Function5() => "Result5"; - functions.Add(SKFunction.FromNativeMethod(Method(Function5), pluginName: PluginName)); + private void AssertFilters(Kernel kernel1, Kernel kernel2) + { + var functionFilters1 = kernel1.GetAllServices().ToArray(); + var promptFilters1 = kernel1.GetAllServices().ToArray(); - var kernel = new KernelBuilder().Build(); + var functionFilters2 = kernel2.GetAllServices().ToArray(); + var promptFilters2 = kernel2.GetAllServices().ToArray(); - int numberOfInvocations = 0; + Assert.Equal(functionFilters1.Length, functionFilters2.Length); - kernel.FunctionInvoked += (object? sender, FunctionInvokedEventArgs args) => + for (var i = 0; i < functionFilters1.Length; i++) { - numberOfInvocations++; - if (args.FunctionView.Name == functions[functionCancelIndex].Name) - { - args.Cancel(); - } - }; - - // Act - var kernelResult = await kernel.RunAsync(functions.Take(numberOfFunctions).ToArray()); + Assert.Same(functionFilters1[i], functionFilters2[i]); + } - // Assert - Assert.NotNull(kernelResult); + Assert.Equal(promptFilters1.Length, promptFilters2.Length); - // Kernel result is the same as the last invoked function - Assert.Equal($"Result{functionCancelIndex + 1}", kernelResult.GetValue()); - Assert.Equal(expectedInvocations, numberOfInvocations); + for (var i = 0; i < promptFilters1.Length; i++) + { + Assert.Same(promptFilters1[i], promptFilters2[i]); + } } public class MyPlugin { - [SKFunction, Description("Return any value.")] + [KernelFunction, Description("Return any value.")] public virtual string GetAnyValue() { return Guid.NewGuid().ToString(); } - [SKFunction, Description("Just say hello")] + [KernelFunction, Description("Just say hello")] public virtual void SayHello() { Console.WriteLine("Hello folks!"); } - [SKFunction, Description("Export info."), SKName("ReadFunctionCollectionAsync")] - public async Task ReadFunctionCollectionAsync(SKContext context) + [KernelFunction("ReadFunctionCollectionAsync"), Description("Export info.")] + public async Task ReadFunctionCollectionAsync(Kernel kernel) { await Task.Delay(0); - - if (context.Functions == null) - { - Assert.Fail("Functions collection is missing"); - } - - foreach (var function in context.Functions.GetFunctionViews()) - { - context.Variables[$"{function.PluginName}.{function.Name}"] = function.Description; - } - - return context; + Assert.NotNull(kernel.Plugins); } } - private (Mock textResultMock, Mock textCompletionMock) SetupMocks(string? completionResult = null) + private sealed class MyFunctionFilter : IFunctionFilter { - var mockTextResult = new Mock(); - mockTextResult.Setup(m => m.GetCompletionAsync(It.IsAny())).ReturnsAsync(completionResult ?? "LLM Result about UnitTests"); + public void OnFunctionInvoked(FunctionInvokedContext context) + { } - var mockTextCompletion = new Mock(); - mockTextCompletion.Setup(m => m.GetCompletionsAsync(It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextResult.Object }); - - return (mockTextResult, mockTextCompletion); + public void OnFunctionInvoking(FunctionInvokingContext context) + { } } - private static MethodInfo Method(Delegate method) + private sealed class MyPromptFilter : IPromptFilter { - return method.Method; + public void OnPromptRendered(PromptRenderedContext context) + { } + + public void OnPromptRendering(PromptRenderingContext context) + { } } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Memory/MemoryRecordTests.cs b/dotnet/src/SemanticKernel.UnitTests/Memory/MemoryRecordTests.cs index d1200af829c6..b6dafc228a5e 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Memory/MemoryRecordTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Memory/MemoryRecordTests.cs @@ -173,8 +173,10 @@ public void ItCanBeSerialized() // Act string serializedRecord = JsonSerializer.Serialize(memoryRecord); - jsonString = jsonString.Replace("\n", string.Empty, StringComparison.Ordinal); - jsonString = jsonString.Replace(" ", string.Empty, StringComparison.Ordinal); +#pragma warning disable CA1307 // Specify StringComparison for clarity; overload not available on .NET Framework + jsonString = jsonString.Replace("\n", string.Empty); + jsonString = jsonString.Replace(" ", string.Empty); +#pragma warning restore CA1307 // Assert Assert.Equal(jsonString, serializedRecord); @@ -204,8 +206,10 @@ public void ItsMetadataCanBeSerialized() // Act string serializedMetadata = memoryRecord.GetSerializedMetadata(); - jsonString = jsonString.Replace("\n", string.Empty, StringComparison.Ordinal); - jsonString = jsonString.Replace(" ", string.Empty, StringComparison.Ordinal); +#pragma warning disable CA1307 // Specify StringComparison for clarity; overload not available on .NET Framework + jsonString = jsonString.Replace("\n", string.Empty); + jsonString = jsonString.Replace(" ", string.Empty); +#pragma warning restore CA1307 // Assert Assert.Equal(jsonString, serializedMetadata); diff --git a/dotnet/src/SemanticKernel.UnitTests/Orchestration/ContextVariablesConverterTests.cs b/dotnet/src/SemanticKernel.UnitTests/Orchestration/ContextVariablesConverterTests.cs deleted file mode 100644 index 327669744c1d..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Orchestration/ContextVariablesConverterTests.cs +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json; -using Microsoft.SemanticKernel.Orchestration; -using Xunit; - -namespace SemanticKernel.UnitTests.Orchestration; - -/// -/// Unit tests of . -/// -public class ContextVariablesConverterTests -{ - [Fact] - public void ReadFromJsonSucceeds() - { - // Arrange - string json = /*lang=json,strict*/ @"[{""Key"":""a"", ""Value"":""b""}]"; - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - - // Act - var result = JsonSerializer.Deserialize(json, options); - - // Assert - Assert.Equal("b", result!["a"]); - Assert.Throws(() => result!["INPUT"]); - Assert.Equal(string.Empty, result!.Input); - } - - [Fact] - public void ReadFromJsonWrongTypeThrows() - { - // Arrange - string json = /*lang=json,strict*/ @"[{""Key"":""a"", ""Value"":""b""}]"; - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - - // Act and Assert - Assert.Throws(() => JsonSerializer.Deserialize>(json, options)); - } - - [Fact] - public void ReadFromJsonSucceedsWithInput() - { - // Arrange - string json = /*lang=json,strict*/ @"[{""Key"":""INPUT"", ""Value"":""c""}, {""Key"":""a"", ""Value"":""b""}]"; - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - - // Act - var result = JsonSerializer.Deserialize(json, options); - - // Assert - Assert.Equal("b", result!["a"]); - Assert.Equal("c", result!["INPUT"]); - } - - // input value - // params key/value - [Theory] - [InlineData("", new[] { "a", "b" }, new[] - { - /*lang=json,strict*/ @"{""Key"":""INPUT"",""Value"":""""}", /*lang=json,strict*/ @"{""Key"":""a"",""Value"":""b""}" - })] - [InlineData("c", new[] { "a", "b" }, new[] - { - /*lang=json,strict*/ @"{""Key"":""INPUT"",""Value"":""c""}", /*lang=json,strict*/ @"{""Key"":""a"",""Value"":""b""}" - })] - [InlineData("c", new[] { "a", "b", "d", "e" }, new[] - { - /*lang=json,strict*/ @"{""Key"":""INPUT"",""Value"":""c""}", /*lang=json,strict*/ @"{""Key"":""a"",""Value"":""b""}", /*lang=json,strict*/ - @"{""Key"":""d"",""Value"":""e""}" - })] - public void WriteToJsonSucceeds(string inputValue, IList contextToSet, IList expectedJson) - { - // Arrange - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - var context = new ContextVariables(); - if (inputValue != null) - { - context.Update(inputValue); - } - - for (int i = 0; i < contextToSet.Count; i += 2) - { - context.Set(contextToSet[i], contextToSet[i + 1]); - } - - // Act - string result = JsonSerializer.Serialize(context, options); - - // Assert - foreach (string key in expectedJson) - { - Assert.Contains(key, result, StringComparison.Ordinal); - } - } - - [Fact] - public void WriteToJsonSucceedsAfterClearing() - { - // Arrange - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - var context = new ContextVariables(); - context.Set("a", "b"); - context.Set("INPUT", "c"); - context.Set("d", "e"); - context.Set("f", "ThingToBeCleared"); - context.Set("f", null); - context.Set("g", string.Empty); - - // Act - string result = JsonSerializer.Serialize(context, options); - - // Assert - Assert.Contains( /*lang=json,strict*/ @"{""Key"":""INPUT"",""Value"":""c""}", result, StringComparison.Ordinal); - Assert.Contains( /*lang=json,strict*/ @"{""Key"":""a"",""Value"":""b""}", result, StringComparison.Ordinal); - Assert.Contains( /*lang=json,strict*/ @"{""Key"":""d"",""Value"":""e""}", result, StringComparison.Ordinal); - Assert.DoesNotContain(@"""Key"":""f""", result, StringComparison.Ordinal); - Assert.Contains( /*lang=json,strict*/ @"{""Key"":""g"",""Value"":""""}", result, StringComparison.Ordinal); - } - - // Error Tests - [Fact] - public void ReadFromJsonReturnsNullWithNull() - { - // Arrange - string json = /*lang=json,strict*/ "null"; - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - - // Act - var result = JsonSerializer.Deserialize(json, options); - - // Assert - Assert.Null(result); - } - - [Fact] - public void ReadFromJsonReturnsDefaultWithEmpty() - { - // Arrange - string json = /*lang=json,strict*/ "[]"; - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - - // Act - var result = JsonSerializer.Deserialize(json, options); - - // Assert - Assert.NotNull(result); - Assert.Throws(() => result!["INPUT"]); - Assert.Equal(string.Empty, result!.Input); - } - - [Fact] - public void ReadFromJsonThrowsWithInvalidJson() - { - // Arrange -#pragma warning disable JSON001 // Invalid JSON pattern - string json = /*lang=json,strict*/ @"[{""Key"":""a"", ""Value"":""b"""; -#pragma warning restore JSON001 // Invalid JSON pattern - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - - // Act & Assert - Assert.Throws(() => JsonSerializer.Deserialize(json, options)); - } - - [Fact] - public void ReadFromJsonThrowsWithInvalidJson2() - { - // Arrange - string json = /*lang=json,strict*/ @"[{""Keys"":""a"", ""Value"":""b""}]"; - var options = new JsonSerializerOptions(); - options.Converters.Add(new ContextVariablesConverter()); - - // Act & Assert - Assert.Throws(() => JsonSerializer.Deserialize(json, options)); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Orchestration/ContextVariablesTests.cs b/dotnet/src/SemanticKernel.UnitTests/Orchestration/ContextVariablesTests.cs deleted file mode 100644 index e8d775888482..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Orchestration/ContextVariablesTests.cs +++ /dev/null @@ -1,281 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.SemanticKernel.Orchestration; -using Xunit; - -namespace SemanticKernel.UnitTests.Orchestration; - -/// -/// Unit tests of . -/// -public class ContextVariablesTests -{ - [Fact] - public void EnumerationOfContextVariableVariablesSucceeds() - { - // Arrange - string firstName = Guid.NewGuid().ToString(); - string firstValue = Guid.NewGuid().ToString(); - string secondName = Guid.NewGuid().ToString(); - string secondValue = Guid.NewGuid().ToString(); - - // Act - ContextVariables target = new(); - target.Set(firstName, firstValue); - target.Set(secondName, secondValue); - - // Assert - var items = target.ToArray(); - - Assert.Single(items.Where(i => i.Key == firstName && i.Value == firstValue)); - Assert.Single(items.Where(i => i.Key == secondName && i.Value == secondValue)); - } - - [Fact] - public void IndexGetAfterIndexSetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyValue = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target[anyName] = anyValue; - - // Assert - Assert.Equal(anyValue, target[anyName]); - } - - [Fact] - public void IndexGetWithoutSetThrowsKeyNotFoundException() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act,Assert - Assert.Throws(() => target[anyName]); - } - - [Fact] - public void IndexSetAfterIndexSetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyValue = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target[anyName] = anyValue; - target[anyName] = anyValue; - - // Assert - Assert.Equal(anyValue, target[anyName]); - } - - [Fact] - public void IndexSetWithoutGetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyValue = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target[anyName] = anyValue; - - // Assert - Assert.Equal(anyValue, target[anyName]); - } - - [Fact] - public void SetAfterIndexSetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target[anyName] = anyContent; - target.Set(anyName, anyContent); - - // Assert - Assert.True(target.TryGetValue(anyName, out string? _)); - } - - [Fact] - public void SetAfterSetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target.Set(anyName, anyContent); - target.Set(anyName, anyContent); - - // Assert - Assert.True(target.TryGetValue(anyName, out string? _)); - } - - [Fact] - public void SetBeforeIndexSetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target.Set(anyName, anyContent); - target[anyName] = anyContent; - - // Assert - Assert.True(target.TryGetValue(anyName, out string? _)); - } - - [Fact] - public void SetBeforeSetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target.Set(anyName, anyContent); - target.Set(anyName, anyContent); - - // Assert - Assert.True(target.TryGetValue(anyName, out string? _)); - } - - [Fact] - public void SetWithoutGetSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target.Set(anyName, anyContent); - - // Assert - Assert.True(target.TryGetValue(anyName, out string? _)); - } - - [Fact] - public void SetWithoutLabelSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target.Set(anyName, anyContent); - - // Assert - Assert.True(target.TryGetValue(anyName, out string? _)); - } - - [Fact] - public void SetWithNullValueErasesSucceeds() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - target.Set(anyName, anyContent); - - // Assert - AssertContextVariable(target, anyName, anyContent); - - // Act - Erase entry - target.Set(anyName, null); - - // Assert - Should have been erased - Assert.False(target.TryGetValue(anyName, out string? _)); - } - - [Fact] - public void GetWithStringSucceeds() - { - // Arrange - string mainContent = Guid.NewGuid().ToString(); - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - ContextVariables target = new(mainContent); - - // Act - target.Set(anyName, anyContent); - - // Assert - Assert.True(target.TryGetValue(anyName, out string? value)); - Assert.Equal(anyContent, value); - Assert.Equal(mainContent, target.Input); - } - - [Fact] - public void GetNameThatDoesNotExistReturnsFalse() - { - // Arrange - string anyName = Guid.NewGuid().ToString(); - ContextVariables target = new(); - - // Act - var exists = target.TryGetValue(anyName, out string? value); - - // Assert - Assert.False(exists); - Assert.Null(value); - } - - [Fact] - public void UpdateOriginalDoesNotAffectClonedSucceeds() - { - // Arrange - string mainContent = Guid.NewGuid().ToString(); - string anyName = Guid.NewGuid().ToString(); - string anyContent = Guid.NewGuid().ToString(); - string someOtherMainContent = Guid.NewGuid().ToString(); - string someOtherContent = Guid.NewGuid().ToString(); - - ContextVariables original = new(mainContent); - original.Set(anyName, anyContent); - - // Act - // Clone original into target - ContextVariables target = original.Clone(); - // Update original - original.Update(someOtherMainContent); - original.Set(anyName, someOtherContent); - - // Assert - // Target should be the same as the original before the update - AssertContextVariable(target, ContextVariables.MainKey, mainContent); - AssertContextVariable(target, anyName, anyContent); - // Original should have been updated - AssertContextVariable(original, ContextVariables.MainKey, someOtherMainContent); - AssertContextVariable(original, anyName, someOtherContent); - } - - private static void AssertContextVariable(ContextVariables variables, string name, string expectedValue) - { - var exists = variables.TryGetValue(name, out string? value); - - // Assert the variable exists - Assert.True(exists); - // Assert the value matches - Assert.NotNull(value); - Assert.Equal(expectedValue, value); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Orchestration/FunctionResultTests.cs b/dotnet/src/SemanticKernel.UnitTests/Orchestration/FunctionResultTests.cs deleted file mode 100644 index 949d9fda28d1..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Orchestration/FunctionResultTests.cs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Orchestration; - -/// -/// Unit tests of . -/// -public class FunctionResultTests -{ - private readonly Mock _functionRunner = new(); - private readonly Mock _serviceProvider = new(); - private readonly Mock _serviceSelector = new(); - - private SKContext CreateContext() - { - return new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object); - } - - [Fact] - public void TryGetMetadataValueReturnsTrueWhenKeyExists() - { - // Arrange - string key = Guid.NewGuid().ToString(); - string value = Guid.NewGuid().ToString(); - FunctionResult target = new("functionName", "pluginName", this.CreateContext()); - - // Act - target.Metadata.Add(key, value); - - // Assert - Assert.True(target.TryGetMetadataValue(key, out string result)); - Assert.Equal(value, result); - } - - [Fact] - public void TryGetMetadataValueReturnsFalseWhenKeyDoesNotExist() - { - // Arrange - string key = Guid.NewGuid().ToString(); - FunctionResult target = new("functionName", "pluginName", this.CreateContext()); - - // Act,Assert - Assert.False(target.TryGetMetadataValue(key, out string result)); - Assert.Null(result); - } - - [Fact] - public void TryGetMetadataValueReturnsFalseWhenKeyExistsButTypeDoesNotMatch() - { - // Arrange - string key = Guid.NewGuid().ToString(); - int value = 42; - FunctionResult target = new("functionName", "pluginName", this.CreateContext()); - - // Act - target.Metadata.Add(key, value); - - // Assert - Assert.False(target.TryGetMetadataValue(key, out string result)); - Assert.Null(result); - } - - [Fact] - public void GetValueReturnsValueWhenValueIsNotNull() - { - // Arrange - string value = Guid.NewGuid().ToString(); - FunctionResult target = new("functionName", "pluginName", this.CreateContext(), value); - - // Act,Assert - Assert.Equal(value, target.GetValue()); - } - - [Fact] - public void GetValueReturnsNullWhenValueIsNull() - { - // Arrange - FunctionResult target = new("functionName", "pluginName", this.CreateContext(), null); - - // Act,Assert - Assert.Null(target.GetValue()); - } - - [Fact] - public void GetValueThrowsWhenValueIsNotNullButTypeDoesNotMatch() - { - // Arrange - int value = 42; - FunctionResult target = new("functionName", "pluginName", this.CreateContext(), value); - - // Act,Assert - Assert.Throws(() => target.GetValue()); - } - - [Fact] - public void ConstructorSetsProperties() - { - // Arrange - string functionName = Guid.NewGuid().ToString(); - string pluginName = Guid.NewGuid().ToString(); - SKContext context = this.CreateContext(); - - // Act - FunctionResult target = new(functionName, pluginName, context); - - // Assert - Assert.Equal(functionName, target.FunctionName); - Assert.Equal(pluginName, target.PluginName); - Assert.Equal(context, target.Context); - } - - [Fact] - public void ConstructorSetsPropertiesAndValue() - { - // Arrange - string functionName = Guid.NewGuid().ToString(); - string pluginName = Guid.NewGuid().ToString(); - SKContext context = this.CreateContext(); - string value = Guid.NewGuid().ToString(); - - // Act - FunctionResult target = new(functionName, pluginName, context, value); - - // Assert - Assert.Equal(functionName, target.FunctionName); - Assert.Equal(pluginName, target.PluginName); - Assert.Equal(context, target.Context); - Assert.Equal(value, target.Value); - } - - [Fact] - public void ToStringWorksCorrectly() - { - // Arrange - string value = Guid.NewGuid().ToString(); - FunctionResult target = new("functionName", "pluginName", this.CreateContext(), value); - - // Act and Assert - Assert.Equal(value, target.ToString()); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Orchestration/KernelResultTests.cs b/dotnet/src/SemanticKernel.UnitTests/Orchestration/KernelResultTests.cs deleted file mode 100644 index 2181cd6e8ab0..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Orchestration/KernelResultTests.cs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Orchestration; - -/// -/// Unit tests for class. -/// -public class KernelResultTests -{ - private readonly Mock _functionRunner = new(); - private readonly Mock _serviceProvider = new(); - private readonly Mock _serviceSelector = new(); - private readonly SKContext _context; - - public KernelResultTests() - { - this._context = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object); - } - - [Fact] - public void ItReturnsCorrectValuesFromFunctionResults() - { - // Arrange - var functionResults = new List - { - new("function1", "plugin1", this._context, "value1"), - new("function2", "plugin2", this._context, "value2"), - }; - - // Act - var kernelResult = KernelResult.FromFunctionResults("value2", functionResults); - var actualFunctionResults = kernelResult.FunctionResults.ToList(); - - // Assert - Assert.Equal("value2", kernelResult.GetValue()); - Assert.Equal(functionResults.Count, actualFunctionResults.Count); - - for (var i = 0; i < functionResults.Count; i++) - { - this.AssertFunctionResult(functionResults[i], actualFunctionResults[i]); - } - } - - [Fact] - public void ToStringWorksCorrectly() - { - // Arrange - var kernelResult = KernelResult.FromFunctionResults("value", Array.Empty()); - - // Act and Assert - Assert.Equal("value", kernelResult.ToString()); - } - - private void AssertFunctionResult(FunctionResult expected, FunctionResult actual) - { - Assert.Equal(expected.FunctionName, actual.FunctionName); - Assert.Equal(expected.PluginName, actual.PluginName); - Assert.Equal(expected.Context, actual.Context); - Assert.Equal(expected.Value, actual.Value); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Planning/PlanSerializationTests.cs b/dotnet/src/SemanticKernel.UnitTests/Planning/PlanSerializationTests.cs deleted file mode 100644 index b1c3bcb934e6..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Planning/PlanSerializationTests.cs +++ /dev/null @@ -1,622 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Globalization; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Planning; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Planning; - -public sealed class PlanSerializationTests -{ - private readonly Mock _functionRunner = new(); - private readonly Mock _serviceProvider = new(); - private readonly Mock _serviceSelector = new(); - - [Fact] - public void CanSerializePlan() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var expectedSteps = "\"steps\":[]"; - var plan = new Plan(goal); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void CanSerializePlanWithGoalAndSteps() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var expectedSteps = "\"steps\":[{"; - var plan = new Plan(goal, new Mock().Object, new Mock().Object); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void CanSerializePlanWithGoalAndSubPlans() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var expectedSteps = "\"steps\":[{"; - var plan = new Plan(goal, new Plan("Write a poem or joke"), new Plan("Send it in an e-mail to Kai")); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - Assert.Contains($"\"description\":\"{goal}\"", serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains("\"description\":\"Write a poem or joke\"", serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains("\"description\":\"Send it in an e-mail to Kai\"", serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void CanSerializePlanWithPlanStep() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var stepOutput = "Output: The input was: "; - var expectedSteps = "\"steps\":[{"; - var plan = new Plan(goal); - - // Arrange Mocks - var functions = new Mock(); - - var returnContext = new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - plan.AddSteps(new Plan(mockFunction.Object)); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void CanSerializePlanWithFunctionStep() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var stepOutput = "Output: The input was: "; - var expectedSteps = "\"steps\":[{"; - var plan = new Plan(goal); - - // Arrange - var functions = new Mock(); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - plan.AddSteps(mockFunction.Object); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void CanSerializePlanWithFunctionSteps() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var stepOutput = "Output: The input was: "; - var expectedSteps = "\"steps\":[{"; - var plan = new Plan(goal); - - // Arrange - var functions = new Mock(); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void CanSerializePlanWithStepsAndFunction() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var stepOutput = "Output: The input was: "; - var expectedSteps = "\"steps\":[{"; - var plan = new Plan(goal); - - // Arrange - var functions = new Mock(); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - plan.AddSteps(new Plan(mockFunction.Object), mockFunction.Object); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - Assert.Contains(goal, serializedPlan, StringComparison.OrdinalIgnoreCase); - Assert.Contains(expectedSteps, serializedPlan, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void CanSerializePlanWithSteps() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var functions = new Mock(); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - plan.AddSteps(new Plan(mockFunction.Object), new Plan(mockFunction.Object)); - - // Act - var serializedPlan = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan); - Assert.NotEmpty(serializedPlan); - } - - [Fact] - public async Task CanStepAndSerializePlanWithStepsAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var kernel = new Mock(); - var functions = new Mock(); - var functionRunner = new Mock(); - kernel.SetupGet(x => x.Functions).Returns(functions.Object); - - kernel.Setup(k => k.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((contextVariables, functions, loggerFactory, culture) => - { - return new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, contextVariables, functions); - }); - - var returnContext = new SKContext(functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - this._functionRunner.Setup(k => k.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((function, variables, ct) => - { - var c = new SKContext(new Mock().Object, this._serviceProvider.Object, this._serviceSelector.Object, variables); - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input); - var functionResult = new FunctionResult(function.Name, function.PluginName, returnContext); - return Task.FromResult(functionResult); - }); - - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - var serializedPlan1 = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan1); - Assert.NotEmpty(serializedPlan1); - Assert.Contains("\"next_step_index\":0", serializedPlan1, StringComparison.OrdinalIgnoreCase); - - var result = await kernel.Object.StepAsync(planInput, plan); - - // Act - var serializedPlan2 = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan2); - Assert.NotEmpty(serializedPlan2); - Assert.NotEqual(serializedPlan1, serializedPlan2); - Assert.Contains("\"next_step_index\":1", serializedPlan2, StringComparison.OrdinalIgnoreCase); - - result = await kernel.Object.StepAsync(result); - var serializedPlan3 = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan3); - Assert.NotEmpty(serializedPlan3); - Assert.NotEqual(serializedPlan1, serializedPlan3); - Assert.NotEqual(serializedPlan2, serializedPlan3); - Assert.Contains("\"next_step_index\":2", serializedPlan3, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task CanStepAndSerializePlanWithStepsAndContextAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var kernel = new Mock(); - var functions = new Mock(); - - kernel.SetupGet(x => x.Functions).Returns(functions.Object); - - kernel.Setup(k => k.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((contextVariables, functions, loggerFactory, culture) => - { - return new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, contextVariables); - }); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - c.Variables.TryGetValue("variables", out string? v); - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input + v); - }) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - mockFunction.Setup(x => x.Describe()).Returns(new FunctionView("functionName", "pluginName")); - - this._functionRunner.Setup(k => k.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((function, variables, ct) => - { - var c = new SKContext(new Mock().Object, this._serviceProvider.Object, this._serviceSelector.Object, variables); - c.Variables.TryGetValue("variables", out string? v); - - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input + v); - var functionResult = new FunctionResult(function.Name, function.PluginName, returnContext); - - return Task.FromResult(functionResult); - }); - - mockFunction.Setup(x => x.Describe()).Returns(new FunctionView("functionName", "pluginName") - { - Parameters = new ParameterView[] - { - new("variables") - } - }); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - var cv = new ContextVariables(planInput); - cv.Set("variables", "foo"); - plan = await kernel.Object.StepAsync(cv, plan); - - // Act - var serializedPlan1 = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan1); - Assert.NotEmpty(serializedPlan1); - Assert.Contains("\"next_step_index\":1", serializedPlan1, StringComparison.OrdinalIgnoreCase); - - // Act - cv.Set("variables", "bar"); - cv.Update(string.Empty); - plan = await kernel.Object.StepAsync(cv, plan); - - // Assert - Assert.NotNull(plan); - Assert.Equal($"{stepOutput}{planInput}foo{stepOutput}{planInput}foobar", plan.State.ToString()); - this._functionRunner.Verify(x => x.RunAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(2)); - - // Act - var serializedPlan2 = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan2); - Assert.NotEmpty(serializedPlan2); - Assert.NotEqual(serializedPlan1, serializedPlan2); - Assert.Contains("\"next_step_index\":2", serializedPlan2, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task CanStepAndSerializeAndDeserializePlanWithStepsAndContextAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var kernel = new Mock(); - var functions = new Mock(); - - kernel.SetupGet(x => x.Functions).Returns(functions.Object); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - kernel.Setup(k => k.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((contextVariables, functions, loggerFactory, culture) => - { - return new SKContext(this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, contextVariables); - }); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - c.Variables.TryGetValue("variables", out string? v); - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input + v); - }) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - mockFunction.Setup(x => x.Describe()).Returns(new FunctionView("functionName", "pluginName") - { - Parameters = new ParameterView[] - { - new("variables") - } - }); - - ISKFunction? outFunc = mockFunction.Object; - functions.Setup(x => x.TryGetFunction(It.IsAny(), out outFunc)).Returns(true); - functions.Setup(x => x.TryGetFunction(It.IsAny(), It.IsAny(), out outFunc)).Returns(true); - functions.Setup(x => x.GetFunction(It.IsAny(), It.IsAny())).Returns(mockFunction.Object); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - this._functionRunner.Setup(k => k.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((function, variables, ct) => - { - var c = new SKContext(new Mock().Object, this._serviceProvider.Object, this._serviceSelector.Object, variables); - c.Variables.TryGetValue("variables", out string? v); - - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input + v); - var functionResult = new FunctionResult(function.Name, function.PluginName, returnContext); - return Task.FromResult(functionResult); - }); - - var serializedPlan = plan.ToJson(); - - var cv = new ContextVariables(planInput); - cv.Set("variables", "foo"); - plan = await kernel.Object.StepAsync(cv, plan); - - // Act - var serializedPlan1 = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan1); - Assert.NotEmpty(serializedPlan1); - Assert.NotEqual(serializedPlan, serializedPlan1); - Assert.Contains("\"next_step_index\":1", serializedPlan1, StringComparison.OrdinalIgnoreCase); - - // Act - cv.Set("variables", "bar"); - cv.Update(string.Empty); - - var nextContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables() - ); - plan = Plan.FromJson(serializedPlan1, functions.Object); - plan = await kernel.Object.StepAsync(cv, plan); - - // Assert - Assert.NotNull(plan); - Assert.Equal($"{stepOutput}{planInput}foo{stepOutput}{planInput}foobar", plan.State.ToString()); - this._functionRunner.Verify(x => x.RunAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(2)); - - // Act - var serializedPlan2 = plan.ToJson(); - - // Assert - Assert.NotNull(serializedPlan2); - Assert.NotEmpty(serializedPlan2); - Assert.NotEqual(serializedPlan1, serializedPlan2); - Assert.Contains("\"next_step_index\":2", serializedPlan2, StringComparison.OrdinalIgnoreCase); - } - - [Theory] - [InlineData(false)] - [InlineData(true)] - public void CanDeserializePlan(bool requireFunctions) - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var functions = new Mock(); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - if (requireFunctions) - { - mockFunction.Setup(x => x.Name).Returns(string.Empty); - ISKFunction? outFunc = mockFunction.Object; - functions.Setup(x => x.TryGetFunction(It.IsAny(), out outFunc)).Returns(true); - functions.Setup(x => x.TryGetFunction(It.IsAny(), It.IsAny(), out outFunc)).Returns(true); - functions.Setup(x => x.GetFunction(It.IsAny(), It.IsAny())).Returns(mockFunction.Object); - } - - plan.AddSteps(new Plan("Step1", mockFunction.Object), mockFunction.Object); - - // Act - var serializedPlan = plan.ToJson(); - var deserializedPlan = Plan.FromJson(serializedPlan, functions.Object, requireFunctions); - - // Assert - Assert.NotNull(deserializedPlan); - Assert.Equal(goal, deserializedPlan.Description); - - Assert.Equal(string.Join(",", plan.Outputs), - string.Join(",", deserializedPlan.Outputs)); - Assert.Equal(string.Join(",", plan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}")), - string.Join(",", deserializedPlan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}"))); - Assert.Equal(string.Join(",", plan.State.Select(kv => $"{kv.Key}:{kv.Value}")), - string.Join(",", deserializedPlan.State.Select(kv => $"{kv.Key}:{kv.Value}"))); - - Assert.Equal(plan.Steps[0].Name, deserializedPlan.Steps[0].Name); - Assert.Equal(plan.Steps[1].Name, deserializedPlan.Steps[1].Name); - } - - [Theory] - [InlineData(false)] - [InlineData(true)] - public void DeserializeWithMissingFunctions(bool requireFunctions) - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var kernel = new Mock(); - var functions = new Mock(); - - var returnContext = new SKContext( - this._functionRunner.Object, this._serviceProvider.Object, this._serviceSelector.Object, - new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - - plan.AddSteps(new Plan("Step1", mockFunction.Object), mockFunction.Object); - - var serializedPlan = plan.ToJson(); - - if (requireFunctions) - { - // Act + Assert - Assert.Throws(() => Plan.FromJson(serializedPlan, functions.Object)); - } - else - { - // Act - var deserializedPlan = Plan.FromJson(serializedPlan, functions.Object, requireFunctions); - - // Assert - Assert.NotNull(deserializedPlan); - Assert.Equal(goal, deserializedPlan.Description); - - Assert.Equal(string.Join(",", plan.Outputs), - string.Join(",", deserializedPlan.Outputs)); - Assert.Equal(string.Join(",", plan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}")), - string.Join(",", deserializedPlan.Parameters.Select(kv => $"{kv.Key}:{kv.Value}"))); - Assert.Equal(string.Join(",", plan.State.Select(kv => $"{kv.Key}:{kv.Value}")), - string.Join(",", deserializedPlan.State.Select(kv => $"{kv.Key}:{kv.Value}"))); - - Assert.Equal(plan.Steps[0].Name, deserializedPlan.Steps[0].Name); - Assert.Equal(plan.Steps[1].Name, deserializedPlan.Steps[1].Name); - } - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Planning/PlanTests.cs b/dotnet/src/SemanticKernel.UnitTests/Planning/PlanTests.cs deleted file mode 100644 index ca7b2afa5535..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Planning/PlanTests.cs +++ /dev/null @@ -1,857 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Globalization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AI; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.Planning; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.UnitTests.Planning; - -public sealed class PlanTests -{ - [Fact] - public Task CanCreatePlanAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - // Act - var plan = new Plan(goal); - - // Assert - Assert.Equal(goal, plan.Description); - Assert.Empty(plan.Steps); - return Task.CompletedTask; - } - - [Fact] - public async Task CanExecutePlanAsync() - { - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var plan = new Plan(goal); - - // Act - var result = await plan.InvokeAsync("Some input", kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal("Some input", result.Context.Result); - Assert.Null(result.GetValue()); - } - - [Fact] - public async Task CanExecutePlanWithContextAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var plan = new Plan(goal); - var functionRunner = new Mock(); - var serviceProvider = new Mock(); - var serviceSelector = new Mock(); - - var context = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables("Some input")); - - // Act - var result = await plan.InvokeAsync(context); - - // Assert - Assert.NotNull(result); - Assert.Equal("Some input", result.Context.Result); - Assert.Null(result.GetValue()); - - plan = new Plan(goal); - // Act - context.Variables.Update("other input"); - result = await plan.InvokeAsync(context); - // Assert - Assert.NotNull(result); - Assert.Equal("other input", result.Context.Result); - Assert.Null(result.GetValue()); - } - - [Fact] - public async Task CanExecutePlanWithPlanStepAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(new Plan(mockFunction.Object)); - - // Act - var result = await plan.InvokeAsync(planInput, kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal($"{stepOutput}{planInput}", result.Context.Result); - Assert.Equal($"{stepOutput}{planInput}", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - } - - [Fact] - public async Task CanExecutePlanWithFunctionStepAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(mockFunction.Object); - - // Act - var result = await plan.InvokeAsync(planInput, kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal($"{stepOutput}{planInput}", result.Context.Result); - Assert.Equal($"{stepOutput}{planInput}", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - } - - [Fact] - public async Task CanExecutePlanWithFunctionStepsAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - // Act - var result = await plan.InvokeAsync(planInput, kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal($"{stepOutput}{planInput}{stepOutput}{planInput}", result.Context.Result); - Assert.Equal($"{stepOutput}{planInput}{stepOutput}{planInput}", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(2)); - } - - [Fact] - public async Task CanExecutePlanWithStepsAndFunctionAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(new Plan(mockFunction.Object), mockFunction.Object); - - // Act - var result = await plan.InvokeAsync(planInput, kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal($"{stepOutput}{planInput}{stepOutput}{planInput}", result.Context.Result); - Assert.Equal($"{stepOutput}{planInput}{stepOutput}{planInput}", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(2)); - } - - [Fact] - public async Task CanExecutePlanWithStepsAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(new Plan(mockFunction.Object), new Plan(mockFunction.Object)); - - // Act - var result = await plan.InvokeAsync(planInput, kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal($"{stepOutput}{planInput}{stepOutput}{planInput}", result.Context.Result); - Assert.Equal($"{stepOutput}{planInput}{stepOutput}{planInput}", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(2)); - } - - [Fact] - public async Task CanStepPlanWithStepsAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput) - ); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - // Act - var result = await kernel.Object.StepAsync(planInput, plan); - - // Assert - Assert.NotNull(result); - Assert.Equal($"{stepOutput}{planInput}", result.State.ToString()); - - // Act - result = await kernel.Object.StepAsync(result); - - // Assert - Assert.NotNull(result); - Assert.Equal($"{stepOutput}{planInput}{stepOutput}{planInput}", result.State.ToString()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(2)); - } - - [Fact] - public async Task CanStepPlanWithStepsAndContextAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - c.Variables.TryGetValue("variables", out string? v); - returnContext.Variables.Update(returnContext.Variables.Input + c.Variables.Input + v); - }) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext))); - mockFunction.Setup(x => x.Describe()).Returns(new FunctionView("functionName", "pluginName", "description") - { - Parameters = new ParameterView[] { new("variables") } - }); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - // Act - var cv = new ContextVariables(planInput); - cv.Set("variables", "foo"); - plan = await kernel.Object.StepAsync(cv, plan); - - // Assert - Assert.NotNull(plan); - Assert.Equal($"{stepOutput}{planInput}foo", plan.State.ToString()); - - // Act - cv.Set("variables", "bar"); - cv.Update(string.Empty); - plan = await kernel.Object.StepAsync(cv, plan); - - // Assert - Assert.NotNull(plan); - Assert.Equal($"{stepOutput}{planInput}foo{stepOutput}{planInput}foobar", plan.State.ToString()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(2)); - } - - [Fact] - public async Task StepExceptionIsThrownAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var stepOutput = "Output: The input was: "; - var plan = new Plan(goal); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, new ContextVariables(stepOutput)); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Throws(new ArgumentException("Error message")); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(mockFunction.Object, mockFunction.Object); - - // Act - var cv = new ContextVariables(planInput); - await Assert.ThrowsAsync(async () => await kernel.Object.StepAsync(cv, plan)); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - } - - [Fact] - public async Task PlanStepExceptionIsThrownAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var planInput = "Some input"; - var plan = new Plan(goal); - - // Arrange - var logger = new Mock(); - var functions = new Mock(); - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Throws(new ArgumentException("Error message")); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - plan.AddSteps(new Plan(mockFunction.Object), new Plan(mockFunction.Object)); - - // Act - var cv = new ContextVariables(planInput); - await Assert.ThrowsAsync(async () => await kernel.Object.StepAsync(cv, plan)); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - } - - [Fact] - public async Task CanExecutePlanWithTreeStepsAsync() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var plan = new Plan(goal); - var subPlan = new Plan("Write a poem or joke"); - - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var childFunction1 = new Mock(); - childFunction1.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update("Child 1 output!" + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("child1", "pluginName", returnContext, returnContext.Result))); - childFunction1.Setup(x => x.Describe()).Returns(() => new FunctionView("child1", "pluginName")); - - var childFunction2 = new Mock(); - childFunction2.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update("Child 2 is happy about " + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("child2", "pluginName", returnContext, returnContext.Result))); - childFunction2.Setup(x => x.Describe()).Returns(() => new FunctionView("child2", "pluginName")); - - var childFunction3 = new Mock(); - childFunction3.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update("Child 3 heard " + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("child3", "pluginName", returnContext, returnContext.Result))); - childFunction3.Setup(x => x.Describe()).Returns(() => new FunctionView("child3", "pluginName")); - - var nodeFunction1 = new Mock(); - nodeFunction1.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update(c.Variables.Input + " - this just happened.")) - .Returns(() => Task.FromResult(new FunctionResult("node1", "pluginName", returnContext, returnContext.Result))); - nodeFunction1.Setup(x => x.Describe()).Returns(() => new FunctionView("node1", "pluginName")); - - subPlan.AddSteps(childFunction1.Object, childFunction2.Object, childFunction3.Object); - plan.AddSteps(subPlan); - plan.AddSteps(nodeFunction1.Object); - - // Act - while (plan.HasNextStep) - { - plan = await kernel.Object.StepAsync(plan); - } - - // Assert - Assert.NotNull(plan); - Assert.Equal("Child 3 heard Child 2 is happy about Child 1 output!Write a poem or joke - this just happened.", plan.State.ToString()); - nodeFunction1.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - childFunction1.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - childFunction2.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - childFunction3.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - } - - [Fact] - public void CanCreatePlanWithGoalAndSteps() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var plan = new Plan(goal, new Mock().Object, new Mock().Object); - - // Assert - Assert.NotNull(plan); - Assert.Equal(goal, plan.Description); - Assert.Equal(2, plan.Steps.Count); - } - - [Fact] - public void CanCreatePlanWithGoalAndSubPlans() - { - // Arrange - var goal = "Write a poem or joke and send it in an e-mail to Kai."; - var plan = new Plan(goal, new Plan("Write a poem or joke"), new Plan("Send it in an e-mail to Kai")); - - // Assert - Assert.NotNull(plan); - Assert.Equal(goal, plan.Description); - Assert.Equal(2, plan.Steps.Count); - } - - [Fact] - public async Task CanExecutePlanWithOneStepAndStateAsync() - { - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update("Here is a poem about " + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - var plan = new Plan(mockFunction.Object); - plan.State.Set("input", "Cleopatra"); - - // Act - var result = await plan.InvokeAsync(kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal("Here is a poem about Cleopatra", result.Context.Result); - Assert.Equal("Here is a poem about Cleopatra", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - } - - [Fact] - public async Task CanExecutePlanWithStateAsync() - { - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - functionRunner.Setup(k => k.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(async (function, variables, ct) => - { - var c = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, variables); - var functionResult = await function.InvokeAsync(c, cancellationToken: ct); - return functionResult; - }); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - c.Variables.TryGetValue("type", out string? t); - returnContext.Variables.Update($"Here is a {t} about " + c.Variables.Input); - }) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - var planStep = new Plan(mockFunction.Object); - planStep.Parameters.Set("type", string.Empty); - var plan = new Plan(string.Empty); - plan.AddSteps(planStep); - plan.State.Set("input", "Cleopatra"); - plan.State.Set("type", "poem"); - - // Act - var result = await plan.InvokeAsync(kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal("Here is a poem about Cleopatra", result.Context.Result); - Assert.Equal("Here is a poem about Cleopatra", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - } - - [Fact] - public async Task CanExecutePlanWithCustomContextAsync() - { - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - c.Variables.TryGetValue("type", out string? t); - returnContext.Variables.Update($"Here is a {t} about " + c.Variables.Input); - }) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - var plan = new Plan(mockFunction.Object); - plan.State.Set("input", "Cleopatra"); - plan.State.Set("type", "poem"); - - // Act - var result = await plan.InvokeAsync(kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal("Here is a poem about Cleopatra", result.Context.Result); - Assert.Equal("Here is a poem about Cleopatra", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - - plan = new Plan(mockFunction.Object); - plan.State.Set("input", "Cleopatra"); - plan.State.Set("type", "poem"); - - var contextOverride = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - contextOverride.Variables.Set("type", "joke"); - contextOverride.Variables.Update("Medusa"); - - // Act - result = await plan.InvokeAsync(contextOverride); - - // Assert - Assert.NotNull(result); - Assert.Equal("Here is a joke about Medusa", result.Context.Result); - Assert.Equal("Here is a joke about Medusa", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(2)); - } - - [Fact] - public async Task CanExecutePlanWithCustomStateAsync() - { - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var mockFunction = new Mock(); - mockFunction.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - c.Variables.TryGetValue("type", out string? t); - returnContext.Variables.Update($"Here is a {t} about " + c.Variables.Input); - }) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - mockFunction.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - var planStep = new Plan(mockFunction.Object); - planStep.Parameters.Set("type", string.Empty); - var plan = new Plan("A plan"); - plan.State.Set("input", "Medusa"); - plan.State.Set("type", "joke"); - plan.AddSteps(planStep); - - // Act - var result = await plan.InvokeAsync(kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal("Here is a joke about Medusa", result.Context.Result); - Assert.Equal("Here is a joke about Medusa", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Once); - - planStep = new Plan(mockFunction.Object); - plan = new Plan("A plan"); - planStep.Parameters.Set("input", "Medusa"); - planStep.Parameters.Set("type", "joke"); - plan.State.Set("input", "Cleopatra"); // state input will not override parameter - plan.State.Set("type", "poem"); - plan.AddSteps(planStep); - - // Act - result = await plan.InvokeAsync(kernel.Object); - - // Assert - Assert.NotNull(result); - Assert.Equal("Here is a poem about Medusa", result.Context.Result); - Assert.Equal("Here is a poem about Medusa", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(2)); - - planStep = new Plan(mockFunction.Object); - plan = new Plan("A plan"); - planStep.Parameters.Set("input", "Cleopatra"); - planStep.Parameters.Set("type", "poem"); - plan.AddSteps(planStep); - var contextOverride = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - contextOverride.Variables.Set("type", "joke"); - contextOverride.Variables.Update("Medusa"); // context input will not override parameters - - // Act - result = await plan.InvokeAsync(contextOverride); - - // Assert - Assert.NotNull(result); - Assert.Equal("Here is a joke about Cleopatra", result.Context.Result); - Assert.Equal("Here is a joke about Cleopatra", result.GetValue()); - mockFunction.Verify(x => x.InvokeAsync(It.IsAny(), null, It.IsAny()), Times.Exactly(3)); - } - - [Fact] - public async Task CanExecutePlanWithJoinedResultAsync() - { - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - functionRunner.Setup(k => k.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(async (function, variables, ct) => - { - var c = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, variables); - var functionResult = await function.InvokeAsync(c, cancellationToken: ct); - return functionResult; - }); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var outlineMock = new Mock(); - outlineMock.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update($"Here is a {c.Variables["chapterCount"]} chapter outline about " + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("outline", "pluginName", returnContext, returnContext.Result))); - outlineMock.Setup(x => x.Describe()).Returns(() => new FunctionView("outline", "pluginName")); - - var elementAtIndexMock = new Mock(); - elementAtIndexMock.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - returnContext.Variables.Update($"Outline section #{c.Variables["index"]} of {c.Variables["count"]}: " + c.Variables.Input); - }) - .Returns(() => Task.FromResult(new FunctionResult("elementAt", "pluginName", returnContext, returnContext.Result))); - elementAtIndexMock.Setup(x => x.Describe()).Returns(() => new FunctionView("elementAt", "pluginName")); - - var novelChapterMock = new Mock(); - novelChapterMock.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - { - returnContext.Variables.Update( - $"Chapter #{c.Variables["chapterIndex"]}: {c.Variables.Input}\nTheme:{c.Variables["theme"]}\nPreviously:{c.Variables["previousChapter"]}"); - }) - .Returns(() => Task.FromResult(new FunctionResult("novelChapter", "pluginName", returnContext, returnContext.Result))); - novelChapterMock.Setup(x => x.Describe()).Returns(() => new FunctionView("novelChapter", "pluginName")); - - var plan = new Plan("A plan with steps that alternate appending to the plan result."); - - // Steps: - // - WriterPlugin.NovelOutline chapterCount='3' INPUT='A group of kids in a club called 'The Thinking Caps' that solve mysteries and puzzles using their creativity and logic.' endMarker='' => OUTLINE - // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='0' => CHAPTER_1_SYNOPSIS - // - WriterPlugin.NovelChapter chapterIndex='1' previousChapter='' INPUT='$CHAPTER_1_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_1 - // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='1' => CHAPTER_2_SYNOPSIS - // - WriterPlugin.NovelChapter chapterIndex='2' previousChapter='$CHAPTER_1_SYNOPSIS' INPUT='$CHAPTER_2_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_2 - // - MiscPlugin.ElementAtIndex count='3' INPUT='$OUTLINE' index='2' => CHAPTER_3_SYNOPSIS - // - WriterPlugin.NovelChapter chapterIndex='3' previousChapter='$CHAPTER_2_SYNOPSIS' INPUT='$CHAPTER_3_SYNOPSIS' theme='Children's mystery' => RESULT__CHAPTER_3 - var planStep = new Plan(outlineMock.Object); - planStep.Parameters.Set("input", - "NovelOutline function input."); - planStep.Parameters.Set("chapterCount", "3"); - planStep.Outputs.Add("OUTLINE"); - plan.AddSteps(planStep); - - planStep = new Plan(elementAtIndexMock.Object); - planStep.Parameters.Set("count", "3"); - planStep.Parameters.Set("INPUT", "$OUTLINE"); - planStep.Parameters.Set("index", "0"); - planStep.Outputs.Add("CHAPTER_1_SYNOPSIS"); - plan.AddSteps(planStep); - - planStep = new Plan(novelChapterMock.Object); - planStep.Parameters.Set("chapterIndex", "1"); - planStep.Parameters.Set("previousChapter", " "); - planStep.Parameters.Set("INPUT", "$CHAPTER_1_SYNOPSIS"); - planStep.Parameters.Set("theme", "Children's mystery"); - planStep.Outputs.Add("RESULT__CHAPTER_1"); - plan.Outputs.Add("RESULT__CHAPTER_1"); - plan.AddSteps(planStep); - - planStep = new Plan(elementAtIndexMock.Object); - planStep.Parameters.Set("count", "3"); - planStep.Parameters.Set("INPUT", "$OUTLINE"); - planStep.Parameters.Set("index", "1"); - planStep.Outputs.Add("CHAPTER_2_SYNOPSIS"); - plan.AddSteps(planStep); - - planStep = new Plan(novelChapterMock.Object); - planStep.Parameters.Set("chapterIndex", "2"); - planStep.Parameters.Set("previousChapter", "$CHAPTER_1_SYNOPSIS"); - planStep.Parameters.Set("INPUT", "$CHAPTER_2_SYNOPSIS"); - planStep.Parameters.Set("theme", "Children's mystery"); - planStep.Outputs.Add("RESULT__CHAPTER_2"); - plan.Outputs.Add("RESULT__CHAPTER_2"); - plan.AddSteps(planStep); - - planStep = new Plan(elementAtIndexMock.Object); - planStep.Parameters.Set("count", "3"); - planStep.Parameters.Set("INPUT", "$OUTLINE"); - planStep.Parameters.Set("index", "2"); - planStep.Outputs.Add("CHAPTER_3_SYNOPSIS"); - plan.AddSteps(planStep); - - planStep = new Plan(novelChapterMock.Object); - planStep.Parameters.Set("chapterIndex", "3"); - planStep.Parameters.Set("previousChapter", "$CHAPTER_2_SYNOPSIS"); - planStep.Parameters.Set("INPUT", "$CHAPTER_3_SYNOPSIS"); - planStep.Parameters.Set("theme", "Children's mystery"); - planStep.Outputs.Add("CHAPTER_3"); - plan.Outputs.Add("CHAPTER_3"); - plan.AddSteps(planStep); - - // Act - var result = await plan.InvokeAsync(kernel.Object); - - var expected = - @"Chapter #1: Outline section #0 of 3: Here is a 3 chapter outline about NovelOutline function input. -Theme:Children's mystery -Previously: -Chapter #2: Outline section #1 of 3: Here is a 3 chapter outline about NovelOutline function input. -Theme:Children's mystery -Previously:Outline section #0 of 3: Here is a 3 chapter outline about NovelOutline function input. -Chapter #3: Outline section #2 of 3: Here is a 3 chapter outline about NovelOutline function input. -Theme:Children's mystery -Previously:Outline section #1 of 3: Here is a 3 chapter outline about NovelOutline function input."; - - // Assert - Assert.Equal(expected, result.GetValue()); - Assert.Equal(expected, result.Context.Result); - Assert.True(result.TryGetMetadataValue("RESULT__CHAPTER_1", out var chapter1)); - Assert.True(result.TryGetMetadataValue("RESULT__CHAPTER_2", out var chapter2)); - Assert.True(result.TryGetMetadataValue("CHAPTER_3", out var chapter3)); - Assert.False(result.TryGetMetadataValue("CHAPTER_3_SYNOPSIS", out var chapter3Synopsis)); - } - - [Fact] - public async Task CanExecutePlanWithExpandedAsync() - { - // Arrange - var (kernel, functionRunner, serviceProvider, serviceSelector) = this.SetupKernelMock(); - - var returnContext = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object); - - var functionMock = new Mock(); - functionMock.Setup(x => x.InvokeAsync(It.IsAny(), null, It.IsAny())) - .Callback((c, s, ct) => - returnContext.Variables.Update($"Here is a payload '{c.Variables["payload"]}' for " + c.Variables.Input)) - .Returns(() => Task.FromResult(new FunctionResult("functionName", "pluginName", returnContext, returnContext.Result))); - functionMock.Setup(x => x.Describe()).Returns(() => new FunctionView("functionName", "pluginName")); - - var plan = new Plan("A plan with steps that have variables with a $ in them but not associated with an output"); - - var planStep = new Plan(functionMock.Object); - planStep.Parameters.Set("input", - "Function input."); - planStep.Parameters.Set("payload", @"{""prop"":""value"", ""$prop"": 3, ""prop2"": ""my name is $pop and $var""}"); - plan.AddSteps(planStep); - plan.State.Set("var", "foobar"); - - // Act - var result = await plan.InvokeAsync(kernel.Object); - - var expected = - @"Here is a payload '{""prop"":""value"", ""$prop"": 3, ""prop2"": ""my name is $pop and foobar""}' for Function input."; - - // Assert - Assert.Equal(expected, result.Context.Result); - Assert.Equal(expected, result.GetValue()); - } - - private (Mock kernelMock, Mock functionRunnerMock, Mock serviceProviderMock, Mock serviceSelectorMock) SetupKernelMock(IFunctionCollection? functions = null) - { - functions ??= new Mock().Object; - - var kernel = new Mock(); - var functionRunner = new Mock(); - var serviceProvider = new Mock(); - var serviceSelector = new Mock(); - - kernel.SetupGet(x => x.Functions).Returns(functions); - kernel.Setup(k => k.CreateNewContext(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((contextVariables, skills, loggerFactory, culture) => - { - return new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, contextVariables, functions); - }); - - functionRunner.Setup(k => k.RunAsync(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(async (function, variables, ct) => - { - var c = new SKContext(functionRunner.Object, serviceProvider.Object, serviceSelector.Object, variables); - var functionResult = await function.InvokeAsync(c, cancellationToken: ct); - return functionResult; - }); - - return (kernel, functionRunner, serviceProvider, serviceSelector); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs new file mode 100644 index 000000000000..57cff6cc3917 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Xunit; + +namespace SemanticKernel.UnitTests.Prompt; + +public sealed class ChatPromptParserTests +{ + [Theory] + [InlineData("This is plain prompt")] + [InlineData("")] + [InlineData("This is invalidchat prompt")] + public void ItReturnsNullChatHistoryWhenPromptIsPlainTextOrInvalid(string prompt) + { + // Act + var result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.False(result); + Assert.Null(chatHistory); + } + + [Fact] + public void ItReturnsChatHistoryWithValidRolesWhenPromptIsValid() + { + // Arrange + string prompt = GetSimpleValidPrompt(); + + // Act + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + + Assert.Collection(chatHistory, + c => c.Role = AuthorRole.System, + c => c.Role = AuthorRole.User, + c => c.Role = AuthorRole.Assistant, + c => c.Role = AuthorRole.System, + c => c.Role = AuthorRole.User); + } + + [Fact] + public void ItReturnsChatHistoryWithValidContentWhenSimplePrompt() + { + // Arrange + string prompt = GetSimpleValidPrompt(); + + // Act + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + + Assert.Collection(chatHistory, + c => Assert.Equal("Test with role in double quotes and content in new line.", c.Content), + c => Assert.Equal("Test with role in single quotes and content in the same line.", c.Content), + c => Assert.Equal(""" + Test with multiline content. + Second line. + """, c.Content), + c => Assert.Equal("Test line with tab.", c.Content), + c => Assert.Equal("Hello, I'm a user.", c.Content)); + } + + [Fact] + public void ItReturnsChatHistoryWithValidContentItemsWhenNestedPrompt() + { + // Arrange + string prompt = GetNestedItemsValidPrompt(); + + // Act + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + + Assert.Collection(chatHistory, + c => Assert.Equal("Hi how are you?", c.Content), + c => Assert.Equal(""" + Test with multiline content. + Second line. + """, c.Content), + c => Assert.True(((TextContent)c.Items![0]).Text!.Equals("explain image", StringComparison.Ordinal) + && ((ImageContent)c.Items![1]).Uri!.AbsoluteUri == "https://fake-link-to-image/")); + } + + private static string GetSimpleValidPrompt() + { + return + """ + + + Test with role in double quotes and content in new line. + + + Test with role in single quotes and content in the same line. + + + Test with multiline content. + Second line. + + + + Test line with tab. + + + + Hello, I'm a user. + + + """; + } + + private static string GetNestedItemsValidPrompt() + { + return + """ + + Hi how are you? + + + Test with multiline content. + Second line. + + + + explain image + https://fake-link-to-image/ + + + """; + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Prompt/XmlPromptParserTests.cs b/dotnet/src/SemanticKernel.UnitTests/Prompt/XmlPromptParserTests.cs new file mode 100644 index 000000000000..95f99b8b6648 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Prompt/XmlPromptParserTests.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Prompt; + +/// +/// Unit tests for class. +/// +public class XmlPromptParserTests +{ + [Theory] + [InlineData("This is plain prompt")] + [InlineData("")] + public void ItReturnsNullListWhenPromptIsPlainText(string prompt) + { + // Act + var result = XmlPromptParser.TryParse(prompt, out var nodes); + + // Assert + Assert.False(result); + Assert.Null(nodes); + } + + [Fact] + public void ItReturnsPromptNodesWhenPromptHasXmlFormat() + { + // Arrange + const string Prompt = @" + +Test with role in double quotes and content in new line. + + +Test with role in single quotes and content in the same line. + + +Test with multiline content. +Second line. + + + + Test line with tab. + + + + +"; + + var expectedNodes = new List + { + new("message") { Attributes = { { "role", "system" } }, Content = "Test with role in double quotes and content in new line." }, + new("message") { Attributes = { { "role", "user" } }, Content = "Test with role in single quotes and content in the same line." }, + new("message") { Attributes = { { "role", "assistant" } }, Content = "Test with multiline content.\nSecond line." }, + new("message") { Attributes = { { "role", "system" } }, Content = "Test line with tab." }, + new("message") + { + Attributes = { { "role", "user" } }, + ChildNodes = new List { new("audio") { Attributes = { { "src", "https://fake-link-to-audio" } } } } + }, + }; + + // Act + var result = XmlPromptParser.TryParse(Prompt, out var actualNodes); + + // Assert + Assert.True(result); + Assert.NotNull(actualNodes); + + for (var i = 0; i < actualNodes.Count; i++) + { + this.AssertPromptNode(expectedNodes[i], actualNodes[i]); + } + } + + private void AssertPromptNode(PromptNode expectedNode, PromptNode actualNode) + { + Assert.Equal(expectedNode.TagName, expectedNode.TagName); + Assert.Equal(expectedNode.Content, actualNode.Content); + + var attributeKeys = expectedNode.Attributes.Keys.ToArray(); + + for (var i = 0; i < attributeKeys.Length; i++) + { + var key = attributeKeys[i]; + Assert.Equal(expectedNode.Attributes[key], actualNode.Attributes[key]); + } + + for (var i = 0; i < expectedNode.ChildNodes.Count; i++) + { + this.AssertPromptNode(expectedNode.ChildNodes[i], actualNode.ChildNodes[i]); + } + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/AggregatorPromptTemplateFactoryTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/AggregatorPromptTemplateFactoryTests.cs new file mode 100644 index 000000000000..a4bea7ac0f48 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/AggregatorPromptTemplateFactoryTests.cs @@ -0,0 +1,142 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Xunit; + +#pragma warning disable CS8767 // Nullability of reference types in type of parameter doesn't match implicitly implemented member (possibly because of nullability attributes). + +namespace SemanticKernel.UnitTests.PromptTemplate; + +public sealed class AggregatorPromptTemplateFactoryTests +{ + [Fact] + public void ItCreatesMyPromptTemplates() + { + // Arrange + var templateString = "{{$input}}"; + var promptModel1 = new PromptTemplateConfig() { TemplateFormat = "my-format-1", Template = templateString }; + var promptModel2 = new PromptTemplateConfig() { TemplateFormat = "my-format-2", Template = templateString }; + var target = new AggregatorPromptTemplateFactory(new MyPromptTemplateFactory1(), new MyPromptTemplateFactory2()); + + // Act + var result1 = target.Create(promptModel1); + var result2 = target.Create(promptModel2); + + // Assert + Assert.NotNull(result1); + Assert.True(result1 is MyPromptTemplate1); + Assert.NotNull(result2); + Assert.True(result2 is MyPromptTemplate2); + } + + [Fact] + public void ItThrowsExceptionForUnknowPromptTemplateFormat() + { + // Arrange + var templateString = "{{$input}}"; + var promptConfig = new PromptTemplateConfig() { TemplateFormat = "unknown-format", Template = templateString }; + var target = new AggregatorPromptTemplateFactory(new MyPromptTemplateFactory1(), new MyPromptTemplateFactory2()); + + // Act + // Assert + Assert.Throws(() => target.Create(promptConfig)); + } + + [Fact] + public void ItCreatesPromptFunctionsUsingCorrectFactory() + { + // Arrange + var templateString = "{{$input}}"; + var kernel = new Kernel(); + var factory1 = new MyPromptTemplateFactory1(); + var factory2 = new MyPromptTemplateFactory2(); + var target = new AggregatorPromptTemplateFactory(factory1, factory2); + + // Act + var function1 = kernel.CreateFunctionFromPrompt(templateString, templateFormat: "my-format-1", promptTemplateFactory: target); + var function2 = kernel.CreateFunctionFromPrompt(templateString, templateFormat: "my-format-1", promptTemplateFactory: target); + + // Assert + Assert.NotNull(function1); + Assert.NotNull(function2); + } + + [Fact] + public void ItThrowsExceptionCreatePromptFunctionWithoutFormat() + { + // Arrange + var templateString = "{{$input}}"; + var kernel = new Kernel(); + var factory1 = new MyPromptTemplateFactory1(); + + // Act & Assert + var result = Assert.Throws(() => kernel.CreateFunctionFromPrompt(templateString, promptTemplateFactory: factory1)); + Assert.Equal("templateFormat", result.ParamName); + Assert.Equal("Template format is required when providing a promptTemplateFactory (Parameter 'templateFormat')", result.Message); + } + + #region private + private sealed class MyPromptTemplateFactory1 : IPromptTemplateFactory + { + public bool TryCreate(PromptTemplateConfig templateConfig, out IPromptTemplate? result) + { + if (templateConfig.TemplateFormat.Equals("my-format-1", StringComparison.Ordinal)) + { + result = new MyPromptTemplate1(templateConfig); + return true; + } + + result = null; + return false; + } + } + + private sealed class MyPromptTemplate1 : IPromptTemplate + { + private readonly PromptTemplateConfig _promptModel; + + public MyPromptTemplate1(PromptTemplateConfig promptConfig) + { + this._promptModel = promptConfig; + } + + public Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) + { + return Task.FromResult(this._promptModel.Template); + } + } + + private sealed class MyPromptTemplateFactory2 : IPromptTemplateFactory + { + public bool TryCreate(PromptTemplateConfig templateConfig, out IPromptTemplate? result) + { + if (templateConfig.TemplateFormat.Equals("my-format-2", StringComparison.Ordinal)) + { + result = new MyPromptTemplate2(templateConfig); + return true; + } + + result = null; + return false; + } + } + + private sealed class MyPromptTemplate2 : IPromptTemplate + { + private readonly PromptTemplateConfig _promptModel; + + public MyPromptTemplate2(PromptTemplateConfig promptConfig) + { + this._promptModel = promptConfig; + } + + public Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) + { + return Task.FromResult(this._promptModel.Template); + } + } + #endregion +} diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/KernelPromptTemplateFactoryTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/KernelPromptTemplateFactoryTests.cs new file mode 100644 index 000000000000..2494ab1effb1 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/KernelPromptTemplateFactoryTests.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.PromptTemplate; + +public sealed class KernelPromptTemplateFactoryTests +{ + [Fact] + public void ItCreatesBasicPromptTemplateByDefault() + { + // Arrange + var templateString = "{{$input}}"; + var target = new KernelPromptTemplateFactory(); + + // Act + var result = target.Create(new PromptTemplateConfig(templateString)); + + // Assert + Assert.NotNull(result); + Assert.True(result is KernelPromptTemplate); + } + + [Fact] + public void ItCreatesBasicPromptTemplate() + { + // Arrange + var templateString = "{{$input}}"; + var target = new KernelPromptTemplateFactory(); + + // Act + var result = target.Create(new PromptTemplateConfig(templateString) { TemplateFormat = "semantic-kernel" }); + + // Assert + Assert.NotNull(result); + Assert.True(result is KernelPromptTemplate); + } + + [Fact] + public void ItThrowsExceptionForUnknowPromptTemplateFormat() + { + // Arrange + var templateString = "{{$input}}"; + var target = new KernelPromptTemplateFactory(); + + // Act + // Assert + Assert.Throws(() => target.Create(new PromptTemplateConfig(templateString) { TemplateFormat = "unknown-format" })); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/KernelPromptTemplateTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/KernelPromptTemplateTests.cs new file mode 100644 index 000000000000..4a36104b7d3e --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/KernelPromptTemplateTests.cs @@ -0,0 +1,520 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TemplateEngine; +using SemanticKernel.UnitTests.XunitHelpers; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.UnitTests.PromptTemplate; + +public sealed class KernelPromptTemplateTests +{ + private const string InputParameterName = "input"; + private const string DateFormat = "M/d/yyyy"; + private readonly KernelPromptTemplateFactory _factory; + private readonly KernelArguments _arguments; + private readonly ITestOutputHelper _logger; + private readonly Kernel _kernel; + + public KernelPromptTemplateTests(ITestOutputHelper testOutputHelper) + { + this._logger = testOutputHelper; + this._factory = new KernelPromptTemplateFactory(TestConsoleLogger.LoggerFactory); + this._arguments = new KernelArguments() { [InputParameterName] = Guid.NewGuid().ToString("X") }; + this._kernel = new Kernel(); + } + + [Fact] + public void ItAddsMissingVariables() + { + // Arrange + var template = "This {{$x11}} {{$a}}{{$missing}} test template {{p.bar $b}} and {{p.foo c='literal \"c\"' d = $d}} and {{p.baz ename=$e}}"; + var promptTemplateConfig = new PromptTemplateConfig(template); + + // Act + var target = (KernelPromptTemplate)this._factory.Create(promptTemplateConfig); + + // Assert + Assert.Equal(6, promptTemplateConfig.InputVariables.Count); + Assert.Equal("x11", promptTemplateConfig.InputVariables[0].Name); + Assert.Equal("a", promptTemplateConfig.InputVariables[1].Name); + Assert.Equal("missing", promptTemplateConfig.InputVariables[2].Name); + Assert.Equal("b", promptTemplateConfig.InputVariables[3].Name); + Assert.Equal("d", promptTemplateConfig.InputVariables[4].Name); + Assert.Equal("e", promptTemplateConfig.InputVariables[5].Name); + } + + [Fact] + public void ItAllowsSameVariableInMultiplePositions() + { + // Arrange + var template = "This {{$a}} {{$a}} and {{p.bar $a}} and {{p.baz a=$a}}"; + var promptTemplateConfig = new PromptTemplateConfig(template); + + // Act + var target = (KernelPromptTemplate)this._factory.Create(promptTemplateConfig); + + // Assert + Assert.Single(promptTemplateConfig.InputVariables); + Assert.Equal("a", promptTemplateConfig.InputVariables[0].Name); + } + + [Fact] + public void ItAllowsSameVariableInMultiplePositionsCaseInsensitive() + { + // Arrange + var template = "{{$a}} {{$A}} and {{p.bar $a}} and {{p.baz A=$a}}"; + var promptTemplateConfig = new PromptTemplateConfig(template); + + // Act + var target = (KernelPromptTemplate)this._factory.Create(promptTemplateConfig); + + // Assert + Assert.Single(promptTemplateConfig.InputVariables); + Assert.Equal("a", promptTemplateConfig.InputVariables[0].Name); + } + + [Fact] + public void ItDoesNotDuplicateExistingParameters() + { + // Arrange + var template = "This {{$A}} and {{p.bar $B}} and {{p.baz C=$C}}"; + var promptTemplateConfig = new PromptTemplateConfig(template); + promptTemplateConfig.InputVariables.Add(new InputVariable { Name = "a" }); + promptTemplateConfig.InputVariables.Add(new InputVariable { Name = "b" }); + promptTemplateConfig.InputVariables.Add(new InputVariable { Name = "c" }); + + // Act + var target = (KernelPromptTemplate)this._factory.Create(promptTemplateConfig); + + // Assert + Assert.Equal(3, promptTemplateConfig.InputVariables.Count); + Assert.Equal("a", promptTemplateConfig.InputVariables[0].Name); + Assert.Equal("b", promptTemplateConfig.InputVariables[1].Name); + Assert.Equal("c", promptTemplateConfig.InputVariables[2].Name); + } + + [Fact] + public async Task ItRendersVariablesValuesAndFunctionsAsync() + { + // Arrange + var template = "This {{$x11}} {{$a}}{{$missing}} test template {{p.bar $b}} and {{p.food c='literal \"c\"' d = $d}}"; + + this._kernel.ImportPluginFromFunctions("p", new[] + { + KernelFunctionFactory.CreateFromMethod((string input) => "with function that accepts " + input, "bar"), + KernelFunctionFactory.CreateFromMethod((string c, string d) => "another one with " + c + d, "food"), + }); + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Arrange + this._arguments["x11"] = "is"; + this._arguments["a"] = "a"; + this._arguments["b"] = "the positional argument 'input'"; + this._arguments["d"] = " and 'd'"; + + // Act + var renderedPrompt = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("This is a test template with function that accepts the positional argument 'input' and another one with literal \"c\" and 'd'", renderedPrompt); + } + + [Fact] + public async Task ItThrowsExceptionIfTemplateReferencesFunctionThatIsNotRegisteredAsync() + { + // Arrange + var template = "This is a test template that references not registered function {{foo}}"; + + //No plugins/functions are registered with the API - this._kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions(...)); + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act and assert + await Assert.ThrowsAsync(async () => await target.RenderAsync(this._kernel, this._arguments)); + } + + [Fact] + public async Task ItInsertsEmptyStringIfNoArgumentProvidedForVariableAsync() + { + // Arrange + var template = "This is a test template that references variable that does not have argument {{$foo}}."; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.NotNull(result); + Assert.Equal("This is a test template that references variable that does not have argument .", result); + } + + [Fact] + public async Task ItInsertsEmptyStringIfNullArgumentProvidedForVariableAsync() + { + // Arrange + var template = "This is a test template that references variable that have null argument{{$foo}}."; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + this._arguments["foo"] = null; + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.NotNull(result); + Assert.Equal("This is a test template that references variable that have null argument.", result); + } + + [Fact] + public async Task ItCallsMethodWithNullAsArgumentIfNoArgumentProvidedForMethodParameterAsync() + { + // Arrange + string? canary = string.Empty; //It's empty here and not null because the method will be called with a null string as argument + + void Foo(string input) + { + canary = input; + } + + this._kernel.ImportPluginFromFunctions("p", new[] { KernelFunctionFactory.CreateFromMethod(Foo, "bar") }); + + var template = "This is a test template that references variable that does not have argument. {{p.bar $foo}}."; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Null(canary); + } + + [Fact] + public async Task ItCallsMethodWithNullAsArgumentIfNullArgumentProvidedForMethodParameterAsync() + { + // Arrange + string? canary = string.Empty; //It's empty here and not null because the method will be called with a null string as argument + + void Foo(string input) + { + canary = input; + } + + this._kernel.ImportPluginFromFunctions("p", new[] { KernelFunctionFactory.CreateFromMethod(Foo, "bar") }); + + var template = "This is a test template that references variable that have null argument{{p.bar $foo}}."; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + this._arguments["foo"] = null; + + // Act + await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Null(canary); + } + + [Fact] + public async Task ItRendersPromptWithEmptyStringForVariableAndCallsMethodWithNullArgumentIfNullArgumentProvidedAsArgumentAsync() + { + // Arrange + string? canary = string.Empty; //It's empty here and not null because the method will be called with a null string as argument + + void Foo(string input) + { + canary = input; + } + + this._kernel.ImportPluginFromFunctions("p", new[] { KernelFunctionFactory.CreateFromMethod(Foo, "bar") }); + + var template = "This is a test template that {{$zoo}}references variables that have null arguments{{p.bar $foo}}."; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + this._arguments["zoo"] = null; + this._arguments["foo"] = null; + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Null(canary); + Assert.NotNull(result); + Assert.Equal("This is a test template that references variables that have null arguments.", result); + } + + [Fact] + public async Task ItRendersPromptWithEmptyStringForVariableAndCallsMethodWithNullArgumentIfNoArgumentProvidedAsArgumentAsync() + { + // Arrange + string? canary = string.Empty; //It's empty here and not null because the method will be called with a null string as argument + + void Foo(string input) + { + canary = input; + } + + this._kernel.ImportPluginFromFunctions("p", new[] { KernelFunctionFactory.CreateFromMethod(Foo, "bar") }); + + var template = "This is a test template that {{$zoo}}references variables that do not have arguments{{p.bar $foo}}."; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Null(canary); + Assert.NotNull(result); + Assert.Equal("This is a test template that references variables that do not have arguments.", result); + } + + [Fact] + public async Task ItRendersCodeUsingInputAsync() + { + // Arrange + string MyFunctionAsync(string input) + { + this._logger.WriteLine("MyFunction call received, input: {0}", input); + return $"F({input})"; + } + + var func = KernelFunctionFactory.CreateFromMethod(MyFunctionAsync, "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { func }); + + this._arguments[InputParameterName] = "INPUT-BAR"; + + var template = "foo-{{plugin.function}}-baz"; + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("foo-F(INPUT-BAR)-baz", result); + } + + [Fact] + public async Task ItRendersCodeUsingVariablesAsync() + { + // Arrange + string MyFunctionAsync(string input) + { + this._logger.WriteLine("MyFunction call received, input: {0}", input); + return $"F({input})"; + } + + var func = KernelFunctionFactory.CreateFromMethod(MyFunctionAsync, "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { func }); + + this._arguments["myVar"] = "BAR"; + var template = "foo-{{plugin.function $myVar}}-baz"; + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("foo-F(BAR)-baz", result); + } + + [Fact] + public async Task ItRendersCodeUsingNamedVariablesAsync() + { + // Arrange + string MyFunctionAsync( + [Description("Name")] string input, + [Description("Age")] int age, + [Description("Slogan")] string slogan, + [Description("Date")] DateTime date) + { + var dateStr = date.ToString(DateFormat, CultureInfo.InvariantCulture); + this._logger.WriteLine("MyFunction call received, name: {0}, age: {1}, slogan: {2}, date: {3}", input, age, slogan, date); + return $"[{dateStr}] {input} ({age}): \"{slogan}\""; + } + + var func = KernelFunctionFactory.CreateFromMethod(MyFunctionAsync, "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { func }); + + this._arguments[InputParameterName] = "Mario"; + this._arguments["someDate"] = "2023-08-25T00:00:00"; + + var template = "foo-{{plugin.function input=$input age='42' slogan='Let\\'s-a go!' date=$someDate}}-baz"; + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("foo-[8/25/2023] Mario (42): \"Let's-a go!\"-baz", result); + } + + [Fact] + public void ItHandlesSyntaxErrors() + { + // Arrange + this._arguments[InputParameterName] = "Mario"; + this._arguments["someDate"] = "2023-08-25T00:00:00"; + var template = "foo-{{function input=$input age=42 slogan='Let\\'s-a go!' date=$someDate}}-baz"; + + // Act + var result = Assert.Throws(() => this._factory.Create(new PromptTemplateConfig(template))); + + // Assert + Assert.Equal($"Named argument values need to be prefixed with a quote or {Symbols.VarPrefix}.", result.Message); + } + + [Fact] + public async Task ItRendersCodeUsingImplicitInputAndNamedVariablesAsync() + { + // Arrange + string MyFunctionAsync( + [Description("Input")] string input, + [Description("Age")] int age, + [Description("Slogan")] string slogan, + [Description("Date")] DateTime date) + { + this._logger.WriteLine("MyFunction call received, name: {0}, age: {1}, slogan: {2}, date: {3}", input, age, slogan, date); + var dateStr = date.ToString(DateFormat, CultureInfo.InvariantCulture); + return $"[{dateStr}] {input} ({age}): \"{slogan}\""; + } + + KernelFunction func = KernelFunctionFactory.CreateFromMethod(MyFunctionAsync, "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { func }); + + this._arguments[InputParameterName] = "Mario"; + this._arguments["someDate"] = "2023-08-25T00:00:00"; + + var template = "foo-{{plugin.function $input age='42' slogan='Let\\'s-a go!' date=$someDate}}-baz"; + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("foo-[8/25/2023] Mario (42): \"Let's-a go!\"-baz", result); + } + + [Fact] + public async Task ItRendersAsyncCodeUsingImmutableVariablesAsync() + { + // Arrange + var template = "{{func1}} {{func2}} {{func3 $myVar}}"; + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + this._arguments[InputParameterName] = "A"; + this._arguments["myVar"] = "C"; + + string MyFunction1Async(string input) + { + return input; + } + string MyFunction2Async(string input) + { + return "B"; + } + string MyFunction3Async(string myVar) + { + return myVar; + } + + var functions = new List() + { + KernelFunctionFactory.CreateFromMethod(MyFunction1Async, "func1"), + KernelFunctionFactory.CreateFromMethod(MyFunction2Async, "func2"), + KernelFunctionFactory.CreateFromMethod(MyFunction3Async, "func3") + }; + + this._kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("plugin", "description", functions)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("A B C", result); + } + + [Fact] + public async Task ItRendersAsyncCodeUsingVariablesAsync() + { + // Arrange + Task MyFunctionAsync(string input) + { + // Input value should be "BAR" because the variable $myVar is passed in + this._logger.WriteLine("MyFunction call received, input: {0}", input); + return Task.FromResult(input); + } + + KernelFunction func = KernelFunctionFactory.CreateFromMethod(MyFunctionAsync, "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { func }); + + this._arguments["myVar"] = "BAR"; + + var template = "foo-{{plugin.function $myVar}}-baz"; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, this._arguments); + + // Assert + Assert.Equal("foo-BAR-baz", result); + } + + [Fact] + public async Task RenderVarValuesFunctionWithDiffArgTypesAsync() + { + // Arrange + int expected_i = 42; + double expected_d = 36.6; + string expected_s = "test"; + Guid expected_g = new("7ac656b1-c917-41c8-9ff5-e8f0eb51fbac"); + DateTime expected_dt = DateTime.ParseExact("05.12.2023 17:52", "dd.MM.yyyy HH:mm", CultureInfo.InvariantCulture); + DayOfWeek expected_e = DayOfWeek.Monday; + + KernelFunction func = KernelFunctionFactory.CreateFromMethod((string input, Guid g) => + { + Assert.Equal(expected_s, input); + Assert.Equal(expected_g, g); + + return $"string:{input}, Guid:{g}"; + }, + "f"); + + this._kernel.Culture = new CultureInfo("fr-FR"); //In French culture, a comma is used as a decimal separator, and a slash is used as a date separator. See the Assert below. + this._kernel.ImportPluginFromFunctions("p", new[] { func }); + + var template = "int:{{$i}}, double:{{$d}}, {{p.f $s g=$g}}, DateTime:{{$dt}}, enum:{{$e}}"; + + var target = (KernelPromptTemplate)this._factory.Create(new PromptTemplateConfig(template)); + + // Act + var result = await target.RenderAsync(this._kernel, new() + { + ["i"] = expected_i, + ["d"] = expected_d, + ["s"] = expected_s, + ["g"] = expected_g, + ["dt"] = expected_dt, + ["e"] = expected_e, + }); + + // Assert + Assert.Equal("int:42, double:36,6, string:test, Guid:7ac656b1-c917-41c8-9ff5-e8f0eb51fbac, DateTime:05/12/2023 17:52, enum:Monday", result); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs new file mode 100644 index 000000000000..964f368acb33 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs @@ -0,0 +1,269 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.UnitTests.PromptTemplate; + +public class PromptTemplateConfigTests +{ + [Fact] + public void DeserializingDoNotExpectChatSystemPromptToExist() + { + // Arrange + string configPayload = @"{ + ""max_tokens"": 60, + ""temperature"": 0.5, + ""top_p"": 0.0, + ""presence_penalty"": 0.0, + ""frequency_penalty"": 0.0 + }"; + + // Act + var settings = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(settings); + Assert.NotNull(settings.ChatSystemPrompt); + Assert.Equal("Assistant is a large language model.", settings.ChatSystemPrompt); + } + + [Fact] + public void DeserializingExpectChatSystemPromptToExists() + { + // Arrange + string configPayload = @"{ + ""max_tokens"": 60, + ""temperature"": 0.5, + ""top_p"": 0.0, + ""presence_penalty"": 0.0, + ""frequency_penalty"": 0.0, + ""chat_system_prompt"": ""I am a prompt"" + }"; + + // Act + var settings = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(settings); + Assert.NotNull(settings.ChatSystemPrompt); + Assert.Equal("I am a prompt", settings.ChatSystemPrompt); + } + + [Fact] + public void DeserializingExpectMultipleModels() + { + // Arrange + string configPayload = @" +{ + ""schema"": 1, + ""description"": """", + ""execution_settings"": + { + ""service1"": { + ""model_id"": ""gpt-4"", + ""max_tokens"": 200, + ""temperature"": 0.2, + ""top_p"": 0.0, + ""presence_penalty"": 0.0, + ""frequency_penalty"": 0.0, + ""stop_sequences"": + [ + ""Human"", + ""AI"" + ] + }, + ""service2"": { + ""model_id"": ""gpt-3.5_turbo"", + ""max_tokens"": 256, + ""temperature"": 0.3, + ""top_p"": 0.0, + ""presence_penalty"": 0.0, + ""frequency_penalty"": 0.0, + ""stop_sequences"": + [ + ""Human"", + ""AI"" + ] + } + } +}"; + + // Act + var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.NotNull(promptTemplateConfig.ExecutionSettings); + Assert.Equal(2, promptTemplateConfig.ExecutionSettings.Count); + } + + [Fact] + public void DeserializingExpectCompletion() + { + // Arrange + string configPayload = @" +{ + ""schema"": 1, + ""description"": """", + ""execution_settings"": + { + ""default"": { + ""model_id"": ""gpt-4"", + ""max_tokens"": 200, + ""temperature"": 0.2, + ""top_p"": 0.0, + ""presence_penalty"": 0.0, + ""frequency_penalty"": 0.0, + ""stop_sequences"": + [ + ""Human"", + ""AI"" + ] + } + } +}"; + + // Act + var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.NotNull(promptTemplateConfig.DefaultExecutionSettings); + Assert.Equal("gpt-4", promptTemplateConfig.DefaultExecutionSettings?.ModelId); + } + + [Fact] + public void DeserializingExpectInputVariables() + { + // Arrange + string configPayload = @" +{ + ""description"": ""function description"", + ""input_variables"": + [ + { + ""name"": ""input variable name"", + ""description"": ""input variable description"", + ""default"": ""default value"", + ""is_required"": true + } + ] +}"; + + // Act + var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.NotNull(promptTemplateConfig.InputVariables); + Assert.Single(promptTemplateConfig.InputVariables); + Assert.Equal("input variable name", promptTemplateConfig.InputVariables[0].Name); + Assert.Equal("input variable description", promptTemplateConfig.InputVariables[0].Description); + Assert.Equal("default value", promptTemplateConfig.InputVariables[0].Default?.ToString()); + Assert.True(promptTemplateConfig.InputVariables[0].IsRequired); + } + + [Fact] + public void DeserializingExpectOutputVariable() + { + // Arrange + string configPayload = @" +{ + ""description"": ""function description"", + ""output_variable"": + { + ""description"": ""output variable description"" + } +}"; + + // Act + var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.NotNull(promptTemplateConfig.OutputVariable); + Assert.Equal("output variable description", promptTemplateConfig.OutputVariable.Description); + } + + [Fact] + public void ItShouldDeserializeConfigWithDefaultValueOfStringType() + { + // Arrange + static string CreateJson(object defaultValue) + { + var obj = new + { + description = "function description", + input_variables = new[] + { + new + { + name = "name", + description = "description", + @default = defaultValue, + isRequired = true + } + } + }; + + return JsonSerializer.Serialize(obj); + } + + // string + var json = CreateJson((string)"123"); + var config = PromptTemplateConfig.FromJson(json); + + Assert.NotNull(config?.InputVariables); + Assert.Equal("123", config.InputVariables[0].Default?.ToString()); + } + + [Fact] + // This test checks that the logic of imposing a temporary limitation on the default value being a string is in place and works as expected. + public void ItShouldThrowExceptionWhenDeserializingConfigWithDefaultValueOtherThanString() + { + // Arrange + static string CreateJson(object defaultValue) + { + var obj = new + { + description = "function description", + input_variables = new[] + { + new + { + name = "name", + description = "description", + @default = defaultValue, + isRequired = true + } + } + }; + + return JsonSerializer.Serialize(obj); + } + + // int + var json = CreateJson((int)1); + Assert.Throws(() => PromptTemplateConfig.FromJson(json)); + + // double + json = CreateJson((double)1.1); + Assert.Throws(() => PromptTemplateConfig.FromJson(json)); + + // bool + json = CreateJson((bool)true); + Assert.Throws(() => PromptTemplateConfig.FromJson(json)); + + // array + json = CreateJson(new[] { "1", "2", "3" }); + Assert.Throws(() => PromptTemplateConfig.FromJson(json)); + + // object + json = CreateJson(new { p1 = "v1" }); + Assert.Throws(() => PromptTemplateConfig.FromJson(json)); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Reliability/NullHttpRetryHandlerTests.cs b/dotnet/src/SemanticKernel.UnitTests/Reliability/NullHttpRetryHandlerTests.cs deleted file mode 100644 index c6f6fee99501..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Reliability/NullHttpRetryHandlerTests.cs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Reliability; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.UnitTests.Reliability; - -public class NullHttpRetryHandlerTests -{ - [Fact] - public async Task ItDoesNotRetryOnExceptionAsync() - { - // Arrange - using var retry = new NullHttpRetryHandler(); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.TooManyRequests); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.TooManyRequests, response.StatusCode); - } - - [Fact] - public async Task NoExceptionNoRetryAsync() - { - // Arrange - using var retry = new NullHttpRetryHandler(); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.OK); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, CancellationToken.None); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - } - - [Fact] - public async Task TaskCanceledExceptionThrownOnCancellationTokenAsync() - { - // Arrange - using var retry = new NullHttpRetryHandler(); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.TooManyRequests); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - using var cancellationTokenSource = new CancellationTokenSource(); - cancellationTokenSource.Cancel(); - - // Act - await Assert.ThrowsAsync(async () => - await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, cancellationTokenSource.Token)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - } - - [Fact] - public async Task ItDoestExecuteOnFalseCancellationTokenAsync() - { - // Arrange - using var retry = new NullHttpRetryHandler(); - using var mockResponse = new HttpResponseMessage(HttpStatusCode.TooManyRequests); - using var testContent = new StringContent("test"); - var mockHandler = GetHttpMessageHandlerMock(mockResponse); - retry.InnerHandler = mockHandler.Object; - using var httpClient = new HttpClient(retry); - - // Act - var response = await httpClient.PostAsync(new Uri("https://www.microsoft.com"), testContent, new CancellationToken(false)); - - // Assert - mockHandler.Protected() - .Verify>("SendAsync", Times.Once(), ItExpr.IsAny(), ItExpr.IsAny()); - Assert.Equal(HttpStatusCode.TooManyRequests, response.StatusCode); - } - - private static Mock GetHttpMessageHandlerMock(HttpResponseMessage mockResponse) - { - var mockHandler = new Mock(); - mockHandler.Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(mockResponse); - return mockHandler; - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj index 7bb88e3fa440..7d5e0d56ed52 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj +++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj @@ -3,14 +3,19 @@ SemanticKernel.UnitTests SemanticKernel.UnitTests - net6.0 + net6.0 LatestMajor true false - CA2007,VSTHRD111 + 12 + CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0002,SKEXP0003,SKEXP0004,SKEXP0055 - + + + + + @@ -25,12 +30,19 @@ all + + + - + + + + + diff --git a/dotnet/src/SemanticKernel.UnitTests/Services/ServiceRegistryTests.cs b/dotnet/src/SemanticKernel.UnitTests/Services/ServiceRegistryTests.cs deleted file mode 100644 index 346d402bbccc..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Services/ServiceRegistryTests.cs +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Services; -using Xunit; - -namespace SemanticKernel.UnitTests.Services; - -/// -/// Unit tests of . -/// -public class ServiceRegistryTests -{ - [Fact] - public void ItCanSetAndRetrieveServiceInstance() - { - // Arrange - var services = new AIServiceCollection(); - var service = new TestService(); - - // Act - services.SetService(service); - var provider = services.Build(); - var result = provider.GetService(); - - // Assert - Assert.Same(service, result); - } - - [Fact] - public void ItCanSetAndRetrieveServiceInstanceWithName() - { - // Arrange - var services = new AIServiceCollection(); - var service1 = new TestService(); - var service2 = new TestService(); - - // Act - services.SetService("foo", service1); - services.SetService("bar", service2); - var provider = services.Build(); - - // Assert - Assert.Same(service1, provider.GetService("foo")); - Assert.Same(service2, provider.GetService("bar")); - } - - [Fact] - public void ItCanSetAndRetrieveServiceFactory() - { - // Arrange - var services = new AIServiceCollection(); - var service = new TestService(); - - // Act - services.SetService(() => service); - var provider = services.Build(); - - // Assert - Assert.Same(service, provider.GetService()); - } - - [Fact] - public void ItCanSetAndRetrieveServiceFactoryWithName() - { - // Arrange - var services = new AIServiceCollection(); - var service1 = new TestService(); - var service2 = new TestService(); - - // Act - services.SetService("foo", () => service1); - services.SetService("bar", () => service2); - var provider = services.Build(); - - // Assert - Assert.Same(service1, provider.GetService("foo")); - Assert.Same(service2, provider.GetService("bar")); - } - - [Fact] - public void ItCanSetAndRetrieveServiceFactoryWithServiceProvider() - { - // Arrange - var services = new AIServiceCollection(); - var service = new TestService(); - - // Act - services.SetService(() => service); - var provider = services.Build(); - - // Assert - Assert.Same(service, provider.GetService()); - } - - [Fact] - public void ItCanSetAndRetrieveServiceFactoryWithServiceProviderAndName() - { - // Arrange - var services = new AIServiceCollection(); - var service1 = new TestService(); - var service2 = new TestService(); - - // Act - services.SetService("foo", () => service1); - services.SetService("bar", () => service2); - var provider = services.Build(); - - // Assert - Assert.Same(service1, provider.GetService("foo")); - Assert.Same(service2, provider.GetService("bar")); - } - - [Fact] - public void ItCanSetDefaultService() - { - // Arrange - var services = new AIServiceCollection(); - var service1 = new TestService(); - var service2 = new TestService(); - - // Act - services.SetService("foo", service1); - services.SetService("bar", service2, setAsDefault: true); - var provider = services.Build(); - - // Assert - Assert.Same(service2, provider.GetService()); - } - - [Fact] - public void ItCanSetDefaultServiceFactory() - { - // Arrange - var services = new AIServiceCollection(); - var service1 = new TestService(); - var service2 = new TestService(); - - // Act - services.SetService("foo", () => service1); - services.SetService("bar", () => service2, setAsDefault: true); - var provider = services.Build(); - - // Assert - Assert.Same(service2, provider.GetService()); - } - - [Fact] - public void ItCanSetDefaultServiceFactoryWithServiceProvider() - { - // Arrange - var services = new AIServiceCollection(); - var service1 = new TestService(); - var service2 = new TestService(); - - // Act - services.SetService("foo", () => service1); - services.SetService("bar", () => service2, setAsDefault: true); - var provider = services.Build(); - - // Assert - Assert.Same(service2, provider.GetService()); - } - - [Fact] - public void ItCanTryGetService() - { - // Arrange - var services = new AIServiceCollection(); - var service = new TestService(); - services.SetService(service); - var provider = services.Build(); - - // Act - var result = provider.TryGetService(out IAIService? retrieved); - - // Assert - Assert.True(result); - Assert.Same(service, retrieved); - } - - [Fact] - public void ItCanTryGetServiceWithName() - { - // Arrange - var services = new AIServiceCollection(); - var service = new TestService(); - services.SetService("foo", service); - var provider = services.Build(); - - // Act - var result = provider.TryGetService("foo", out IAIService? retrieved); - - // Assert - Assert.True(result); - Assert.Same(service, retrieved); - } - - [Fact] - public void ItReturnsFalseIfTryGetServiceWithInvalidName() - { - // Arrange - var services = new AIServiceCollection(); - var service = new TestService(); - services.SetService("foo", service); - var provider = services.Build(); - - // Act - var result = provider.TryGetService("bar", out IAIService? retrieved); - - // Assert - Assert.False(result); - Assert.Null(retrieved); - } - - // A test service implementation - private sealed class TestService : IAIService - { - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/CodeBlockTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/CodeBlockTests.cs new file mode 100644 index 000000000000..e9beab7c851a --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/CodeBlockTests.cs @@ -0,0 +1,500 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TemplateEngine; +using Microsoft.SemanticKernel.TextGeneration; +using Moq; +using Xunit; + +namespace SemanticKernel.UnitTests.TemplateEngine; + +public class CodeBlockTests +{ + private readonly Kernel _kernel = new(); + + [Fact] + public async Task ItThrowsIfAFunctionDoesntExistAsync() + { + // Arrange + var target = new CodeBlock("functionName"); + + // Act & Assert + await Assert.ThrowsAsync(async () => await target.RenderCodeAsync(this._kernel)); + } + + [Fact] + public async Task ItThrowsIfAFunctionCallThrowsAsync() + { + // Arrange + static void method() => throw new FormatException("error"); + var function = KernelFunctionFactory.CreateFromMethod(method, "function", "description"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { function }); + + var target = new CodeBlock("plugin.function"); + + // Act & Assert + await Assert.ThrowsAsync(async () => await target.RenderCodeAsync(this._kernel)); + } + + [Fact] + public void ItHasTheCorrectType() + { + // Act + var target = new CodeBlock(""); + + // Assert + Assert.Equal(BlockTypes.Code, target.Type); + } + + [Fact] + public void ItTrimsSpaces() + { + // Act + Assert + Assert.Equal("aa", new CodeBlock(" aa ").Content); + } + + [Fact] + public void ItChecksValidityOfInternalBlocks() + { + // Arrange + var validBlock1 = new FunctionIdBlock("x"); + var validBlock2 = new ValBlock("''"); + var invalidBlock = new VarBlock(""); + + // Act + var codeBlock1 = new CodeBlock(new List { validBlock1, validBlock2 }, ""); + var codeBlock2 = new CodeBlock(new List { validBlock1, invalidBlock }, ""); + + // Assert + Assert.True(codeBlock1.IsValid(out _)); + Assert.False(codeBlock2.IsValid(out _)); + } + + [Fact] + public void ItRequiresAValidFunctionCall() + { + // Arrange + var funcId = new FunctionIdBlock("funcName"); + var valBlock = new ValBlock("'value'"); + var varBlock = new VarBlock("$var"); + var namedArgBlock = new NamedArgBlock("varName='foo'"); + + // Act + var codeBlock1 = new CodeBlock(new List { funcId, valBlock }, ""); + var codeBlock2 = new CodeBlock(new List { funcId, varBlock }, ""); + var codeBlock3 = new CodeBlock(new List { funcId, funcId }, ""); + var codeBlock4 = new CodeBlock(new List { funcId, varBlock, varBlock }, ""); + var codeBlock5 = new CodeBlock(new List { funcId, varBlock, namedArgBlock }, ""); + var codeBlock6 = new CodeBlock(new List { varBlock, valBlock }, ""); + var codeBlock7 = new CodeBlock(new List { namedArgBlock }, ""); + + // Assert + Assert.True(codeBlock1.IsValid(out _)); + Assert.True(codeBlock2.IsValid(out _)); + + // Assert - Can't pass a function to a function + Assert.False(codeBlock3.IsValid(out var errorMessage3)); + Assert.Equal("The first arg of a function must be a quoted string, variable or named argument", errorMessage3); + + // Assert - Can't pass more than one unnamed param + Assert.False(codeBlock4.IsValid(out var errorMessage4)); + Assert.Equal("Functions only support named arguments after the first argument. Argument 2 is not named.", errorMessage4); + + // Assert - Can pass one unnamed param and named args + Assert.True(codeBlock5.IsValid(out var errorMessage5)); + Assert.Empty(errorMessage5); + + // Assert - Can't use > 1 block if not a function call + Assert.False(codeBlock6.IsValid(out var errorMessage6)); + Assert.Equal("Unexpected second token found: 'value'", errorMessage6); + + // Assert - Can't use a named argument without a function block + Assert.False(codeBlock7.IsValid(out var errorMessage7)); + Assert.Equal("Unexpected named argument found. Expected function name first.", errorMessage7); + } + + [Fact] + public async Task ItRendersCodeBlockConsistingOfJustAVarBlock1Async() + { + // Arrange + var arguments = new KernelArguments { ["varName"] = "foo" }; + + // Act + var codeBlock = new CodeBlock("$varName"); + var result = await codeBlock.RenderCodeAsync(this._kernel, arguments); + + // Assert + Assert.Equal("foo", result); + } + + [Fact] + public async Task ItRendersCodeBlockConsistingOfJustAVarBlock2Async() + { + // Arrange + var arguments = new KernelArguments { ["varName"] = "bar" }; + var varBlock = new VarBlock("$varName"); + + // Act + var codeBlock = new CodeBlock(new List { varBlock }, ""); + var result = await codeBlock.RenderCodeAsync(this._kernel, arguments); + + // Assert + Assert.Equal("bar", result); + } + + [Fact] + public async Task ItRendersCodeBlockConsistingOfJustAValBlock1Async() + { + // Arrange + var codeBlock = new CodeBlock("'ciao'"); + + // Act + var result = await codeBlock.RenderCodeAsync(this._kernel); + + // Assert + Assert.Equal("ciao", result); + } + + [Fact] + public async Task ItRendersCodeBlockConsistingOfJustAValBlock2Async() + { + // Arrange + var valBlock = new ValBlock("'arrivederci'"); + + // Act + var codeBlock = new CodeBlock(new List { valBlock }, ""); + var result = await codeBlock.RenderCodeAsync(this._kernel); + + // Assert + Assert.Equal("arrivederci", result); + } + + [Fact] + public async Task ItInvokesFunctionWithCustomVariableAsync() + { + // Arrange + const string Var = "varName"; + const string VarValue = "varValue"; + + var arguments = new KernelArguments { [Var] = VarValue }; + var funcId = new FunctionIdBlock("plugin.function"); + var varBlock = new VarBlock($"${Var}"); + + var canary = string.Empty; + + var function = KernelFunctionFactory.CreateFromMethod((string input) => + { + canary = input; + }, + "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { function }); + + // Act + var codeBlock = new CodeBlock(new List { funcId, varBlock }, ""); + var result = await codeBlock.RenderCodeAsync(this._kernel, arguments); + + // Assert + Assert.Null(result); + Assert.Equal(VarValue, canary); + } + + [Fact] + public async Task ItInvokesFunctionWithCustomValueAsync() + { + // Arrange + const string Value = "value"; + + var funcBlock = new FunctionIdBlock("plugin.function"); + var valBlock = new ValBlock($"'{Value}'"); + + var canary = string.Empty; + + var function = KernelFunctionFactory.CreateFromMethod((string input) => + { + canary = input; + }, + "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { function }); + + // Act + var codeBlock = new CodeBlock(new List { funcBlock, valBlock }, ""); + var result = await codeBlock.RenderCodeAsync(this._kernel); + + // Assert + Assert.Null(result); + Assert.Equal(Value, canary); + } + + [Fact] + public async Task ItInvokesFunctionWithNamedArgsAsync() + { + // Arrange + const string Value = "value"; + const string FooValue = "bar"; + const string BobValue = "bob's value"; + + var arguments = new KernelArguments(); + arguments["bob"] = BobValue; + arguments["input"] = Value; + + var funcId = new FunctionIdBlock("plugin.function"); + var namedArgBlock1 = new NamedArgBlock($"foo='{FooValue}'"); + var namedArgBlock2 = new NamedArgBlock("baz=$bob"); + + var actualFoo = string.Empty; + var actualBaz = string.Empty; + + var function = KernelFunctionFactory.CreateFromMethod((string foo, string baz) => + { + actualFoo = foo; + actualBaz = baz; + }, + "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { function }); + + // Act + var codeBlock = new CodeBlock(new List { funcId, namedArgBlock1, namedArgBlock2 }, ""); + var result = await codeBlock.RenderCodeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(FooValue, actualFoo); + Assert.Equal(BobValue, actualBaz); + Assert.Null(result); + } + + [Fact] + public async Task ItReturnsArgumentValueAndTypeAsync() + { + // Arrange + object expectedValue = new(); + object? canary = null; + + var funcId = new FunctionIdBlock("p.f"); + var varBlock = new VarBlock("$var"); + var namedArgBlock = new NamedArgBlock("p1=$a1"); + + this._kernel.ImportPluginFromFunctions("p", new[] { KernelFunctionFactory.CreateFromMethod((object p1) => + { + canary = p1; + }, "f") }); + + // Act + var functionWithPositionedArgument = new CodeBlock(new List { funcId, varBlock }, ""); + var functionWithNamedArgument = new CodeBlock(new List { funcId, namedArgBlock }, ""); + var variable = new CodeBlock(new List { varBlock }, ""); + + // Assert function positional argument passed to the the function with no changes + await functionWithPositionedArgument.RenderCodeAsync(this._kernel, new() { ["p1"] = expectedValue, ["var"] = expectedValue }); + Assert.Same(expectedValue, canary); // Ensuring that the two variables point to the same object, as there is no other way to verify that the argument has not been transformed from object -> string -> object during the process. + + // Assert function named argument passed to the the function with no changes + await functionWithNamedArgument.RenderCodeAsync(this._kernel, new() { ["p1"] = expectedValue, ["a1"] = expectedValue }); + Assert.Same(expectedValue, canary); + + // Assert argument assigned to a variable with no changes + await variable.RenderCodeAsync(this._kernel, new() { ["var"] = expectedValue }); + Assert.Same(expectedValue, canary); + } + + [Fact] + public async Task ItDoesNotMutateOriginalArgumentsAsync() + { + // Arrange + const string Value = "value"; + const string FooValue = "bar"; + const string BobValue = "bob's value"; + + var arguments = new KernelArguments(); + arguments["bob"] = BobValue; + arguments["input"] = Value; + + var funcId = new FunctionIdBlock("plugin.function"); + var namedArgBlock1 = new NamedArgBlock($"foo='{FooValue}'"); + var namedArgBlock2 = new NamedArgBlock("baz=$bob"); + + var function = KernelFunctionFactory.CreateFromMethod((string foo, string baz) => { }, "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { function }); + + // Act + var codeBlock = new CodeBlock(new List { funcId, namedArgBlock1, namedArgBlock2 }, ""); + await codeBlock.RenderCodeAsync(this._kernel, arguments); + + // Assert + Assert.Equal(2, arguments.Count); + } + + [Theory] + [InlineData(1)] + [InlineData(2)] + public async Task ItThrowsWhenArgumentsAreProvidedToAParameterlessFunctionAsync(int numberOfArguments) + { + // Arrange + const string Value = "value"; + const string FooValue = "foo's value"; + const string BobValue = "bob's value"; + + var arguments = new KernelArguments(); + arguments["bob"] = BobValue; + arguments["input"] = Value; + + var blockList = new List + { + new FunctionIdBlock("plugin.function"), + new ValBlock($"'{FooValue}'") + }; + + if (numberOfArguments == 2) + { + blockList.Add(new NamedArgBlock("foo=$foo")); + } + + var actualFoo = string.Empty; + var actualBaz = string.Empty; + + var function = KernelFunctionFactory.CreateFromMethod(() => { }, "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { function }); + + // Act + var codeBlock = new CodeBlock(blockList, ""); + var exception = await Assert.ThrowsAsync(async () => await codeBlock.RenderCodeAsync(this._kernel, arguments)); + Assert.Contains($"does not take any arguments but it is being called in the template with {numberOfArguments} arguments.", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Theory] + [InlineData("x11")] + [InlineData("firstParameter")] + [InlineData("anything")] + public async Task ItCallsPromptFunctionWithPositionalTargetFirstArgumentRegardlessOfNameAsync(string parameterName) + { + const string FooValue = "foo's value"; + var mockTextContent = new TextContent("Result"); + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent }); + + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(mockTextCompletion.Object); + var kernel = builder.Build(); + + var blockList = new List + { + new FunctionIdBlock("Plugin1.Function1"), + new ValBlock($"'{FooValue}'") + }; + + kernel.ImportPluginFromFunctions("Plugin1", functions: new[] + { + kernel.CreateFunctionFromPrompt( + promptTemplate: $"\"This {{{{${parameterName}}}}}", + functionName: "Function1") + } + ); + +#pragma warning disable CS0618 // Events are deprecated + kernel.PromptRendering += (object? sender, PromptRenderingEventArgs e) => + { + Assert.Equal(FooValue, e.Arguments[parameterName]); + }; + + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + Assert.Equal(FooValue, e.Arguments[parameterName]); + }; +#pragma warning restore CS0618 // Events are deprecated + + var codeBlock = new CodeBlock(blockList, ""); + await codeBlock.RenderCodeAsync(kernel); + } + + [Fact] + public async Task ItCallsPromptFunctionMatchArgumentWithNamedArgsAsync() + { + const string FooValue = "foo's value"; + var mockTextContent = new TextContent("Result"); + var mockTextCompletion = new Mock(); + mockTextCompletion.Setup(m => m.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync(new List { mockTextContent }); + + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(mockTextCompletion.Object); + var kernel = builder.Build(); + + var arguments = new KernelArguments(); + arguments["foo"] = FooValue; + + var blockList = new List + { + new FunctionIdBlock("Plugin1.Function1"), + new NamedArgBlock("x11=$foo"), + new NamedArgBlock("x12='new'") // Extra parameters are ignored + }; + + kernel.ImportPluginFromFunctions("Plugin1", functions: new[] + { + kernel.CreateFunctionFromPrompt( + promptTemplate: "\"This {{$x11}}", + functionName: "Function1") + } + ); + +#pragma warning disable CS0618 // Events are deprecated + kernel.PromptRendering += (object? sender, PromptRenderingEventArgs e) => + { + Assert.Equal(FooValue, e.Arguments["foo"]); + Assert.Equal(FooValue, e.Arguments["x11"]); + }; + + kernel.FunctionInvoking += (object? sender, FunctionInvokingEventArgs e) => + { + Assert.Equal(FooValue, e.Arguments["foo"]); + Assert.Equal(FooValue, e.Arguments["x11"]); + }; +#pragma warning restore CS0618 // Events are deprecated + + var codeBlock = new CodeBlock(blockList, ""); + await codeBlock.RenderCodeAsync(kernel, arguments); + } + + [Fact] + public async Task ItThrowsWhenArgumentsAreAmbiguousAsync() + { + // Arrange + const string Value = "value"; + const string FooValue = "foo's value"; + const string BobValue = "bob's value"; + + var arguments = new KernelArguments(); + arguments["bob"] = BobValue; + arguments["input"] = Value; + + var funcId = new FunctionIdBlock("plugin.function"); + var namedArgBlock1 = new ValBlock($"'{FooValue}'"); + var namedArgBlock2 = new NamedArgBlock("foo=$foo"); + + var actualFoo = string.Empty; + var actualBaz = string.Empty; + + var function = KernelFunctionFactory.CreateFromMethod((string foo, string baz) => + { + actualFoo = foo; + actualBaz = baz; + }, + "function"); + + this._kernel.ImportPluginFromFunctions("plugin", new[] { function }); + + // Act + var codeBlock = new CodeBlock(new List { funcId, namedArgBlock1, namedArgBlock2 }, ""); + var exception = await Assert.ThrowsAsync(async () => await codeBlock.RenderCodeAsync(this._kernel, arguments)); + Assert.Contains(FooValue, exception.Message, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/FunctionIdBlockTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/FunctionIdBlockTests.cs similarity index 82% rename from dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/FunctionIdBlockTests.cs rename to dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/FunctionIdBlockTests.cs index b5b7e884da88..aab336594a57 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/FunctionIdBlockTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/FunctionIdBlockTests.cs @@ -1,11 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TemplateEngine; using Xunit; -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt.Blocks; +namespace SemanticKernel.UnitTests.TemplateEngine; public class FunctionIdBlockTests { @@ -13,7 +12,7 @@ public class FunctionIdBlockTests public void ItHasTheCorrectType() { // Act - var target = new FunctionIdBlock("", NullLoggerFactory.Instance); + var target = new FunctionIdBlock(""); // Assert Assert.Equal(BlockTypes.FunctionId, target.Type); @@ -23,7 +22,7 @@ public void ItHasTheCorrectType() public void ItTrimsSpaces() { // Act + Assert - Assert.Equal("aa", new FunctionIdBlock(" aa ", NullLoggerFactory.Instance).Content); + Assert.Equal("aa", new FunctionIdBlock(" aa ").Content); } [Theory] @@ -86,7 +85,7 @@ public void ItAllowsOnlyOneDot() // Arrange var target1 = new FunctionIdBlock("functionName"); var target2 = new FunctionIdBlock("pluginName.functionName"); - Assert.Throws(() => new FunctionIdBlock("foo.pluginName.functionName")); + Assert.Throws(() => new FunctionIdBlock("foo.pluginName.functionName")); // Act + Assert Assert.True(target1.IsValid(out _)); diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/NamedArgBlockTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/NamedArgBlockTests.cs similarity index 78% rename from dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/NamedArgBlockTests.cs rename to dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/NamedArgBlockTests.cs index 507f0af0c178..2e6fb7052ecf 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/NamedArgBlockTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/NamedArgBlockTests.cs @@ -1,12 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TemplateEngine; using Xunit; -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt.Blocks; +namespace SemanticKernel.UnitTests.TemplateEngine; public class NamedArgBlockTests { @@ -14,7 +12,7 @@ public class NamedArgBlockTests public void ItHasTheCorrectType() { // Act - var target = new NamedArgBlock("a=$b", NullLoggerFactory.Instance); + var target = new NamedArgBlock("a=$b"); // Assert Assert.Equal(BlockTypes.NamedArg, target.Type); @@ -30,7 +28,7 @@ public void ItHasTheCorrectType() public void ItTrimsSpaces(string input, string expected) { // Act + Assert - Assert.Equal(expected, new NamedArgBlock(input, NullLoggerFactory.Instance).Content); + Assert.Equal(expected, new NamedArgBlock(input).Content); } [Theory] @@ -117,17 +115,17 @@ public void ArgValueNeedsQuoteOrDollarSignPrefix() public void ArgNameShouldBeNonEmpty() { // Arrange - var target = new NamedArgBlock("='b'"); + static NamedArgBlock funcToTest() => new("='b'"); // Act + Assert - Assert.False(target.IsValid(out var error)); - Assert.Equal("A named argument must have a name", error); + KernelException exception = Assert.Throws(funcToTest); + Assert.Equal("A function named argument must contain a name and value separated by a '=' character.", exception.Message); } [Fact] public void ArgValueShouldBeNonEmpty() { - Assert.Throws(() => new NamedArgBlock("a=")); + Assert.Throws(() => new NamedArgBlock("a=")); } [Theory] @@ -207,7 +205,6 @@ public void ArgValueAllowsVariablesWithUnderscoreLettersAndDigits(string name, b { // Arrange var target = new NamedArgBlock($"a=${name}"); - var variables = new ContextVariables { [name] = "value" }; // Act + Assert Assert.Equal(isValid, target.IsValid(out _)); @@ -220,12 +217,60 @@ public void ItRequiresOneEquals() var target1 = new NamedArgBlock("a='b'"); var target2 = new NamedArgBlock("a=$b"); var target3 = new NamedArgBlock("a=\"b\""); - Assert.Throws(() => new NamedArgBlock("foo")); - Assert.Throws(() => new NamedArgBlock("foo=$bar=$baz")); + Assert.Throws(() => new NamedArgBlock("foo")); + Assert.Throws(() => new NamedArgBlock("foo=$bar=$baz")); // Act + Assert Assert.True(target1.IsValid(out _)); Assert.True(target2.IsValid(out _)); Assert.True(target3.IsValid(out _)); } + + [Fact] + public void ItReturnsArgumentsValueAndType() + { + // Arrange + var target = new NamedArgBlock("a=$var"); + var arguments = new KernelArguments() + { + ["var"] = (double)28.2, + }; + + // Act + var result = target.GetValue(arguments); + + // Assert + Assert.IsType(result); + Assert.Equal(28.2, result); + } + + [Fact] + public void ItRendersToNullWithNoArgument() + { + // Arrange + var target = new NamedArgBlock("a=$var"); + + // Act + var result = target.GetValue(new KernelArguments()); + + // Assert + Assert.Null(result); + } + + [Fact] + public void ItRendersToNullIfArgumentIsNull() + { + // Arrange + var target = new NamedArgBlock("a=$var"); + var arguments = new KernelArguments() + { + ["var"] = null + }; + + // Act + var result = target.GetValue(arguments); + + // Assert + Assert.Null(result); + } } diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/TextBlockTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/TextBlockTests.cs similarity index 91% rename from dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/TextBlockTests.cs rename to dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/TextBlockTests.cs index 278efa5e108d..6cc8eb640524 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/TextBlockTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/TextBlockTests.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +using Microsoft.SemanticKernel.TemplateEngine; using Xunit; -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt.Blocks; +namespace SemanticKernel.UnitTests.TemplateEngine; public class TextBlockTests { diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/ValBlockTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/ValBlockTests.cs similarity index 92% rename from dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/ValBlockTests.cs rename to dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/ValBlockTests.cs index 90e5c10d9dbb..e840d025f889 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/Blocks/ValBlockTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/ValBlockTests.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +using Microsoft.SemanticKernel.TemplateEngine; using Xunit; -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt.Blocks; +namespace SemanticKernel.UnitTests.TemplateEngine; public class ValBlockTests { diff --git a/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/VarBlockTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/VarBlockTests.cs new file mode 100644 index 000000000000..6dba0af78c94 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/Blocks/VarBlockTests.cs @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TemplateEngine; +using Xunit; + +namespace SemanticKernel.UnitTests.TemplateEngine; + +public class VarBlockTests +{ + [Fact] + public void ItHasTheCorrectType() + { + // Act + var target = new VarBlock(""); + + // Assert + Assert.Equal(BlockTypes.Variable, target.Type); + } + + [Fact] + public void ItTrimsSpaces() + { + // Act + Assert + Assert.Equal("$", new VarBlock(" $ ").Content); + } + + [Fact] + public void ItIgnoresSpacesAround() + { + // Act + var target = new VarBlock(" $var \n "); + + // Assert + Assert.Equal("$var", target.Content); + } + + [Fact] + public void ItRendersToNullWithNoArgument() + { + // Arrange + var target = new VarBlock("$var"); + + // Act + var result = target.Render(new KernelArguments()); + + // Assert + Assert.Null(result); + } + + [Fact] + public void ItRendersToNullWithNullArgument() + { + // Arrange + var target = new VarBlock("$var"); + var arguments = new KernelArguments() + { + ["$var"] = null + }; + + // Act + var result = target.Render(arguments); + + // Assert + Assert.Null(result); + } + + [Fact] + public void ItRendersToArgumentValueWhenAvailable() + { + // Arrange + var target = new VarBlock(" $var \n "); + var arguments = new KernelArguments() + { + ["foo"] = "bar", + ["var"] = "able", + }; + + // Act + var result = target.Render(arguments); + + // Assert + Assert.Equal("able", result); + } + + [Fact] + public void ItRendersWithOriginalArgumentValueAndType() + { + // Arrange + var target = new VarBlock(" $var "); + var arguments = new KernelArguments() + { + ["var"] = DayOfWeek.Tuesday, + }; + + // Act + var result = target.Render(arguments); + + // Assert + Assert.IsType(result); + Assert.Equal(DayOfWeek.Tuesday, result); + } + + [Fact] + public void ItThrowsIfTheVarNameIsEmpty() + { + // Arrange + var arguments = new KernelArguments() + { + ["foo"] = "bar", + ["var"] = "able", + }; + var target = new VarBlock(" $ "); + + // Act + Assert + Assert.Throws(() => target.Render(arguments)); + } + + [Theory] + [InlineData("0", true)] + [InlineData("1", true)] + [InlineData("a", true)] + [InlineData("_", true)] + [InlineData("01", true)] + [InlineData("01a", true)] + [InlineData("a01", true)] + [InlineData("_0", true)] + [InlineData("a01_", true)] + [InlineData("_a01", true)] + [InlineData(".", false)] + [InlineData("-", false)] + [InlineData("a b", false)] + [InlineData("a\nb", false)] + [InlineData("a\tb", false)] + [InlineData("a\rb", false)] + [InlineData("a.b", false)] + [InlineData("a,b", false)] + [InlineData("a-b", false)] + [InlineData("a+b", false)] + [InlineData("a~b", false)] + [InlineData("a`b", false)] + [InlineData("a!b", false)] + [InlineData("a@b", false)] + [InlineData("a#b", false)] + [InlineData("a$b", false)] + [InlineData("a%b", false)] + [InlineData("a^b", false)] + [InlineData("a*b", false)] + [InlineData("a(b", false)] + [InlineData("a)b", false)] + [InlineData("a|b", false)] + [InlineData("a{b", false)] + [InlineData("a}b", false)] + [InlineData("a[b", false)] + [InlineData("a]b", false)] + [InlineData("a:b", false)] + [InlineData("a;b", false)] + [InlineData("a'b", false)] + [InlineData("a\"b", false)] + [InlineData("ab", false)] + [InlineData("a/b", false)] + [InlineData("a\\b", false)] + public void ItAllowsUnderscoreLettersAndDigits(string name, bool isValid) + { + // Arrange + var target = new VarBlock($" ${name} "); + var arguments = new KernelArguments { [name] = "value" }; + + // Act + var result = target.Render(arguments); + + // Assert + Assert.Equal(isValid, target.IsValid(out _)); + if (isValid) { Assert.Equal("value", result); } + } +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/CodeTokenizerTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/CodeTokenizerTests.cs similarity index 84% rename from dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/CodeTokenizerTests.cs rename to dotnet/src/SemanticKernel.UnitTests/TemplateEngine/CodeTokenizerTests.cs index 34afa3d0e35e..d6c185386547 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/TemplateEngine/Prompt/CodeTokenizerTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/CodeTokenizerTests.cs @@ -1,12 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Orchestration; -using Microsoft.SemanticKernel.TemplateEngine.Basic; -using Microsoft.SemanticKernel.TemplateEngine.Basic.Blocks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TemplateEngine; using Xunit; -namespace SemanticKernel.Extensions.UnitTests.TemplateEngine.Prompt; +namespace SemanticKernel.UnitTests.TemplateEngine; public class CodeTokenizerTests { @@ -121,8 +119,8 @@ public void ItParsesMultiNamedArgFunctionCalls() { // Arrange var template1 = "x.y first=$foo second='bar'"; - var parameters = new ContextVariables(); - parameters.Set("foo", "fooValue"); + var arguments = new KernelArguments(); + arguments["foo"] = "fooValue"; // Act var blocks1 = this._target.Tokenize(template1); @@ -140,12 +138,12 @@ public void ItParsesMultiNamedArgFunctionCalls() Assert.Equal("first=$foo", secondBlock?.Content); Assert.Equal(BlockTypes.NamedArg, secondBlock?.Type); Assert.Equal("first", secondBlock?.Name); - Assert.Equal("fooValue", secondBlock?.GetValue(parameters)); + Assert.Equal("fooValue", secondBlock?.GetValue(arguments)); Assert.Equal("second='bar'", thirdBlock?.Content); Assert.Equal(BlockTypes.NamedArg, thirdBlock?.Type); Assert.Equal("second", thirdBlock?.Name); - Assert.Equal("bar", thirdBlock?.GetValue(parameters)); + Assert.Equal("bar", thirdBlock?.GetValue(arguments)); } [Fact] @@ -207,16 +205,16 @@ public void ItSupportsSpacesInNamedArguments() public void ItThrowsWhenSeparatorsAreMissing(string template) { // Act & Assert - Assert.Throws(() => this._target.Tokenize(template)); + Assert.Throws(() => this._target.Tokenize(template)); } [Theory] - [InlineData("f a =", "A function named argument must contain a quoted value or variable after the '=' character.")] - [InlineData("f a='b' arg2", "A function named argument must contain a name and value separated by a '=' character.")] - public void ItThrowsWhenArgValueIsMissing(string template, string expectedErrorMessage) + [InlineData("f a =")] + [InlineData("f a='b' arg2")] + public void ItThrowsWhenArgValueIsMissing(string template) { // Act & Assert - var exception = Assert.Throws(() => this._target.Tokenize(template)); - Assert.Equal(expectedErrorMessage, exception.Message); + var exception = Assert.Throws(() => this._target.Tokenize(template)); + Assert.Equal("A function named argument must contain a name and value separated by a '=' character.", exception.Message); } } diff --git a/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/PromptTemplateConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/PromptTemplateConfigTests.cs deleted file mode 100644 index 30461bebf4df..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/PromptTemplateConfigTests.cs +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json; -using Microsoft.SemanticKernel.Connectors.AI.OpenAI; -using Microsoft.SemanticKernel.TemplateEngine; -using Xunit; - -namespace SemanticKernel.UnitTests.TemplateEngine; - -public class PromptTemplateConfigTests -{ - [Fact] - public void DeserializingDoNotExpectChatSystemPromptToExist() - { - // Arrange - string configPayload = @"{ - ""max_tokens"": 60, - ""temperature"": 0.5, - ""top_p"": 0.0, - ""presence_penalty"": 0.0, - ""frequency_penalty"": 0.0 - }"; - - // Act - var requestSettings = JsonSerializer.Deserialize(configPayload); - - // Assert - Assert.NotNull(requestSettings); - Assert.NotNull(requestSettings.ChatSystemPrompt); - Assert.Equal("Assistant is a large language model.", requestSettings.ChatSystemPrompt); - } - - [Fact] - public void DeserializingExpectChatSystemPromptToExists() - { - // Arrange - string configPayload = @"{ - ""max_tokens"": 60, - ""temperature"": 0.5, - ""top_p"": 0.0, - ""presence_penalty"": 0.0, - ""frequency_penalty"": 0.0, - ""chat_system_prompt"": ""I am a prompt"" - }"; - - // Act - var requestSettings = JsonSerializer.Deserialize(configPayload); - - // Assert - Assert.NotNull(requestSettings); - Assert.NotNull(requestSettings.ChatSystemPrompt); - Assert.Equal("I am a prompt", requestSettings.ChatSystemPrompt); - } - - [Fact] - public void DeserializingExpectMultipleModels() - { - // Arrange - string configPayload = @" -{ - ""schema"": 1, - ""description"": """", - ""models"": - [ - { - ""model_id"": ""gpt-4"", - ""max_tokens"": 200, - ""temperature"": 0.2, - ""top_p"": 0.0, - ""presence_penalty"": 0.0, - ""frequency_penalty"": 0.0, - ""stop_sequences"": - [ - ""Human"", - ""AI"" - ] - }, - { - ""model_id"": ""gpt-3.5_turbo"", - ""max_tokens"": 256, - ""temperature"": 0.3, - ""top_p"": 0.0, - ""presence_penalty"": 0.0, - ""frequency_penalty"": 0.0, - ""stop_sequences"": - [ - ""Human"", - ""AI"" - ] - } - ] -} - "; - - // Act - var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); - - // Assert - Assert.NotNull(promptTemplateConfig); - Assert.NotNull(promptTemplateConfig.ModelSettings); - Assert.Equal(2, promptTemplateConfig.ModelSettings.Count); - } - - [Fact] - public void DeserializingExpectCompletion() - { - // Arrange - string configPayload = @" -{ - ""schema"": 1, - ""description"": """", - ""models"": - [ - { - ""model_id"": ""gpt-4"", - ""max_tokens"": 200, - ""temperature"": 0.2, - ""top_p"": 0.0, - ""presence_penalty"": 0.0, - ""frequency_penalty"": 0.0, - ""stop_sequences"": - [ - ""Human"", - ""AI"" - ] - } - ] -} - "; - - // Act - var promptTemplateConfig = JsonSerializer.Deserialize(configPayload); - - // Assert - Assert.NotNull(promptTemplateConfig); -#pragma warning disable CS0618 // Ensure backward compatibility - Assert.NotNull(promptTemplateConfig.Completion); - Assert.Equal("gpt-4", promptTemplateConfig.Completion.ModelId); -#pragma warning restore CS0618 // Ensure backward compatibility - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/TemplateTokenizerTests.cs b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/TemplateTokenizerTests.cs new file mode 100644 index 000000000000..7ed28deccff9 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/TemplateEngine/TemplateTokenizerTests.cs @@ -0,0 +1,381 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TemplateEngine; +using Xunit; + +namespace SemanticKernel.UnitTests.TemplateEngine; + +public class TemplateTokenizerTests +{ + private readonly TemplateTokenizer _target; + + public TemplateTokenizerTests() + { + this._target = new TemplateTokenizer(); + } + + [Theory] + [InlineData(null, BlockTypes.Text)] + [InlineData("", BlockTypes.Text)] + [InlineData(" ", BlockTypes.Text)] + [InlineData(" ", BlockTypes.Text)] + [InlineData(" {} ", BlockTypes.Text)] + [InlineData(" {{} ", BlockTypes.Text)] + [InlineData(" {{ } } } ", BlockTypes.Text)] + [InlineData(" { { }} }", BlockTypes.Text)] + [InlineData("{{}}", BlockTypes.Text)] + [InlineData("{{ }}", BlockTypes.Text)] + [InlineData("{{ }}", BlockTypes.Text)] + [InlineData("{{ '}}x", BlockTypes.Text)] + [InlineData("{{ \"}}x", BlockTypes.Text)] + internal void ItParsesTextWithoutCode(string? text, BlockTypes type) + { + // Act + var blocks = this._target.Tokenize(text); + + // Assert + Assert.Single(blocks); + Assert.Equal(type, blocks[0].Type); + } + + [Theory] + [InlineData("", BlockTypes.Text)] + [InlineData(" ", BlockTypes.Text)] + [InlineData(" ", BlockTypes.Text)] + [InlineData(" aaa ", BlockTypes.Text)] + [InlineData("{{$}}", BlockTypes.Variable)] + [InlineData("{{$a}}", BlockTypes.Variable)] + [InlineData("{{ $a}}", BlockTypes.Variable)] + [InlineData("{{ $a }}", BlockTypes.Variable)] + [InlineData("{{ $a }}", BlockTypes.Variable)] + [InlineData("{{code}}", BlockTypes.Code)] + [InlineData("{{code }}", BlockTypes.Code)] + [InlineData("{{ code }}", BlockTypes.Code)] + [InlineData("{{ code }}", BlockTypes.Code)] + [InlineData("{{ code }}", BlockTypes.Code)] + [InlineData("{{''}}", BlockTypes.Value)] + [InlineData("{{' '}}", BlockTypes.Value)] + [InlineData("{{ ' '}}", BlockTypes.Value)] + [InlineData("{{ ' ' }}", BlockTypes.Value)] + [InlineData("{{ ' ' }}", BlockTypes.Value)] + [InlineData("{{ ' ' }}", BlockTypes.Value)] + internal void ItParsesBasicBlocks(string? text, BlockTypes type) + { + // Act + var blocks = this._target.Tokenize(text); + + // Assert + Assert.Single(blocks); + Assert.Equal(type, blocks[0].Type); + } + + [Theory] + [InlineData(null, 1)] + [InlineData("", 1)] + [InlineData("}}{{a}} {{b}}x", 5)] + [InlineData("}}{{ -a}} {{b}}x", 5)] + [InlineData("}}{{ -a\n}} {{b}}x", 5)] + [InlineData("}}{{ -a\n} } {{b}}x", 3)] + public void ItTokenizesTheRightTokenCount(string? template, int blockCount) + { + // Act + var blocks = this._target.Tokenize(template); + + // Assert + Assert.Equal(blockCount, blocks.Count); + } + + [Fact] + public void ItTokenizesEdgeCasesCorrectly1() + { + // Act + var blocks1 = this._target.Tokenize("{{{{a}}"); + var blocks2 = this._target.Tokenize("{{'{{a}}"); + var blocks3 = this._target.Tokenize("{{'a}}"); + var blocks4 = this._target.Tokenize("{{a'}}"); + + // Assert - Count + Assert.Equal(2, blocks1.Count); + Assert.Single(blocks2); + Assert.Single(blocks3); + Assert.Single(blocks4); + + // Assert - Type + Assert.Equal(BlockTypes.Text, blocks1[0].Type); + Assert.Equal(BlockTypes.Code, blocks1[1].Type); + + // Assert - Content + Assert.Equal("{{", blocks1[0].Content); + Assert.Equal("a", blocks1[1].Content); + } + + [Fact] + public void ItTokenizesEdgeCasesCorrectly2() + { + // Arrange + var template = "}}{{{ {$a}}}} {{b}}x}}"; + + // Act + var blocks = this._target.Tokenize(template); + + // Assert + Assert.Equal(5, blocks.Count); + + Assert.Equal("}}{", blocks[0].Content); + Assert.Equal(BlockTypes.Text, blocks[0].Type); + + Assert.Equal("{$a", blocks[1].Content); + Assert.Equal(BlockTypes.Code, blocks[1].Type); + + Assert.Equal("}} ", blocks[2].Content); + Assert.Equal(BlockTypes.Text, blocks[2].Type); + + Assert.Equal("b", blocks[3].Content); + Assert.Equal(BlockTypes.Code, blocks[3].Type); + + Assert.Equal("x}}", blocks[4].Content); + Assert.Equal(BlockTypes.Text, blocks[4].Type); + } + + [Fact] + public void ItTokenizesEdgeCasesCorrectly3() + { + // Arrange + var template = "}}{{{{$a}}}} {{b}}$x}}"; + + // Act + var blocks = this._target.Tokenize(template); + + // Assert + Assert.Equal(5, blocks.Count); + + Assert.Equal("}}{{", blocks[0].Content); + Assert.Equal(BlockTypes.Text, blocks[0].Type); + + Assert.Equal("$a", blocks[1].Content); + Assert.Equal(BlockTypes.Variable, blocks[1].Type); + + Assert.Equal("}} ", blocks[2].Content); + Assert.Equal(BlockTypes.Text, blocks[2].Type); + + Assert.Equal("b", blocks[3].Content); + Assert.Equal(BlockTypes.Code, blocks[3].Type); + + Assert.Equal("$x}}", blocks[4].Content); + Assert.Equal(BlockTypes.Text, blocks[4].Type); + } + + [Theory] + [InlineData("{{a$}}")] + [InlineData("{{a$a}}")] + [InlineData("{{a''}}")] + [InlineData("{{a\"\"}}")] + [InlineData("{{a'b'}}")] + [InlineData("{{a\"b\"}}")] + [InlineData("{{a'b' }}")] + [InlineData("{{a\"b\" }}")] + [InlineData("{{ asis 'f\\'oo' }}")] + public void ItTokenizesEdgeCasesCorrectly4(string template) + { + // Act + var blocks = this._target.Tokenize(template); + + // Assert + Assert.Single(blocks); + Assert.Equal(BlockTypes.Code, blocks[0].Type); + Assert.Equal(template.Substring(2, template.Length - 4).Trim(), blocks[0].Content); + } + + [Fact] + public void ItTokenizesATypicalPrompt() + { + // Arrange + var template = "this is a {{ $prompt }} with {{$some}} variables " + + "and {{function $calls}} {{ and 'values' }}"; + + // Act + var blocks = this._target.Tokenize(template); + + // Assert + Assert.Equal(8, blocks.Count); + + Assert.Equal("this is a ", blocks[0].Content); + Assert.Equal(BlockTypes.Text, blocks[0].Type); + + Assert.Equal("$prompt", blocks[1].Content); + Assert.Equal(BlockTypes.Variable, blocks[1].Type); + + Assert.Equal(" with ", blocks[2].Content); + Assert.Equal(BlockTypes.Text, blocks[2].Type); + + Assert.Equal("$some", blocks[3].Content); + Assert.Equal(BlockTypes.Variable, blocks[3].Type); + + Assert.Equal(" variables and ", blocks[4].Content); + Assert.Equal(BlockTypes.Text, blocks[4].Type); + + Assert.Equal("function $calls", blocks[5].Content); + Assert.Equal(BlockTypes.Code, blocks[5].Type); + + Assert.Equal(" ", blocks[6].Content); + Assert.Equal(BlockTypes.Text, blocks[6].Type); + + Assert.Equal("and 'values'", blocks[7].Content); + Assert.Equal(BlockTypes.Code, blocks[7].Type); + } + + [Fact] + public void ItTokenizesAFunctionCallWithMultipleArguments() + { + // Arrange + var template = "this is a {{ function with='many' named=$arguments }}"; + + // Act + var blocks = this._target.Tokenize(template); + + // Assert + Assert.Equal(2, blocks.Count); + + Assert.Equal("this is a ", blocks[0].Content); + Assert.Equal(BlockTypes.Text, blocks[0].Type); + + Assert.Equal("function with='many' named=$arguments", blocks[1].Content); + Assert.Equal(BlockTypes.Code, blocks[1].Type); + } + + [Fact] + public void ItThrowsWhenCodeBlockStartsWithNamedArg() + { + // Arrange + var template = "{{ not='valid' }}"; + + // Assert + var ex = Assert.Throws(() => + { + // Act + this._target.Tokenize(template); + }); + Assert.Equal("Code tokenizer returned an incorrect first token type NamedArg", ex.Message); + } + + [Fact] + public void ItRendersVariables1() + { + // Arrange + var template = "{$x11} This {$a} is {$_a} a {{$x11}} test {{$x11}} " + + "template {{foo}}{{bar $a}}{{baz $_a}}{{yay $x11}}{{food a='b' c = $d}}{{positional 'abc' p1=$p1}}"; + + // Act + var blocks = this._target.Tokenize(template); + + var renderedBlocks = RenderBlocks(blocks); + + // Assert + Assert.Equal(11, blocks.Count); + Assert.Equal(11, renderedBlocks.Count); + + Assert.Equal("$x11", blocks[1].Content); + Assert.Equal("", renderedBlocks[1].Content); + Assert.Equal(BlockTypes.Variable, blocks[1].Type); + Assert.Equal(BlockTypes.Text, renderedBlocks[1].Type); + + Assert.Equal("$x11", blocks[3].Content); + Assert.Equal("", renderedBlocks[3].Content); + Assert.Equal(BlockTypes.Variable, blocks[3].Type); + Assert.Equal(BlockTypes.Text, renderedBlocks[3].Type); + + Assert.Equal("foo", blocks[5].Content); + Assert.Equal("foo", renderedBlocks[5].Content); + Assert.Equal(BlockTypes.Code, blocks[5].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[5].Type); + + Assert.Equal("bar $a", blocks[6].Content); + Assert.Equal("bar $a", renderedBlocks[6].Content); + Assert.Equal(BlockTypes.Code, blocks[6].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[6].Type); + + Assert.Equal("baz $_a", blocks[7].Content); + Assert.Equal("baz $_a", renderedBlocks[7].Content); + Assert.Equal(BlockTypes.Code, blocks[7].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[7].Type); + + Assert.Equal("yay $x11", blocks[8].Content); + Assert.Equal("yay $x11", renderedBlocks[8].Content); + Assert.Equal(BlockTypes.Code, blocks[8].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[8].Type); + + Assert.Equal("food a='b' c = $d", blocks[9].Content); + Assert.Equal("food a='b' c = $d", renderedBlocks[9].Content); + Assert.Equal(BlockTypes.Code, blocks[9].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[9].Type); + + // Arrange + var arguments = new KernelArguments + { + ["x11"] = "x11 value", + ["a"] = "a value", + ["_a"] = "_a value", + ["c"] = "c value", + ["d"] = "d value", + ["p1"] = "p1 value", + }; + + // Act + blocks = this._target.Tokenize(template); + renderedBlocks = RenderBlocks(blocks, arguments); + + // Assert + Assert.Equal(11, blocks.Count); + Assert.Equal(11, renderedBlocks.Count); + + Assert.Equal("$x11", blocks[1].Content); + Assert.Equal("x11 value", renderedBlocks[1].Content); + Assert.Equal(BlockTypes.Variable, blocks[1].Type); + Assert.Equal(BlockTypes.Text, renderedBlocks[1].Type); + + Assert.Equal("$x11", blocks[3].Content); + Assert.Equal("x11 value", renderedBlocks[3].Content); + Assert.Equal(BlockTypes.Variable, blocks[3].Type); + Assert.Equal(BlockTypes.Text, renderedBlocks[3].Type); + + Assert.Equal("foo", blocks[5].Content); + Assert.Equal("foo", renderedBlocks[5].Content); + Assert.Equal(BlockTypes.Code, blocks[5].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[5].Type); + + Assert.Equal("bar $a", blocks[6].Content); + Assert.Equal("bar $a", renderedBlocks[6].Content); + Assert.Equal(BlockTypes.Code, blocks[6].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[6].Type); + + Assert.Equal("baz $_a", blocks[7].Content); + Assert.Equal("baz $_a", renderedBlocks[7].Content); + Assert.Equal(BlockTypes.Code, blocks[7].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[7].Type); + + Assert.Equal("yay $x11", blocks[8].Content); + Assert.Equal("yay $x11", renderedBlocks[8].Content); + Assert.Equal(BlockTypes.Code, blocks[8].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[8].Type); + + Assert.Equal("food a='b' c = $d", blocks[9].Content); + Assert.Equal("food a='b' c = $d", renderedBlocks[9].Content); + Assert.Equal(BlockTypes.Code, blocks[9].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[9].Type); + + Assert.Equal("positional 'abc' p1=$p1", blocks[10].Content); + Assert.Equal("positional 'abc' p1=$p1", renderedBlocks[10].Content); + Assert.Equal(BlockTypes.Code, blocks[10].Type); + Assert.Equal(BlockTypes.Code, renderedBlocks[10].Type); + } + + private static List RenderBlocks(IList blocks, KernelArguments? arguments = null) + { + return blocks.Select(block => block.Type != BlockTypes.Variable + ? block + : new TextBlock((string?)((ITextRendering)block).Render(arguments))).ToList(); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Text/StringExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Text/StringExtensionsTests.cs deleted file mode 100644 index 3d2f57f8b505..000000000000 --- a/dotnet/src/SemanticKernel.UnitTests/Text/StringExtensionsTests.cs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Text; -using Xunit; - -namespace SemanticKernel.UnitTests.Text; - -/// -/// Unit tests for StringExtensions -/// -public class StringExtensionsTests -{ - [Theory] - [InlineData("\r\n", "\n")] - [InlineData("Test string\r\n", "Test string\n")] - [InlineData("\r\nTest string", "\nTest string")] - [InlineData("\r\nTest string\r\n", "\nTest string\n")] - public void ItNormalizesLineEndingsCorrectly(string input, string expectedString) - { - // Act - input = input.NormalizeLineEndings(); - - // Assert - Assert.Equal(expectedString, input); - } -} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpClientExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpClientExtensionsTests.cs index 250ef3e19c5c..2b5ed9ed526f 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpClientExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpClientExtensionsTests.cs @@ -3,11 +3,11 @@ using System; using System.Net; using System.Net.Http; -using System.Net.Mime; using System.Text; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Http; using Xunit; namespace SemanticKernel.UnitTests.Utilities; @@ -57,7 +57,7 @@ public async Task ShouldThrowHttpOperationExceptionForFailedRequestAsync() { //Arrange this._httpMessageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.InternalServerError); - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("{\"details\": \"fake-response-content\"}", Encoding.UTF8, MediaTypeNames.Application.Json); + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("{\"details\": \"fake-response-content\"}", Encoding.UTF8, "application/json"); using var requestMessage = new HttpRequestMessage(HttpMethod.Get, "https://fake-random-test-host"); diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpContentExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpContentExtensionsTests.cs index 433643a34c39..5b8ea7e0dec1 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpContentExtensionsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/HttpContentExtensionsTests.cs @@ -3,10 +3,10 @@ using System; using System.IO; using System.Net.Http; -using System.Net.Mime; using System.Text; using System.Threading; using System.Threading.Tasks; +using Microsoft.SemanticKernel.Http; using Xunit; namespace SemanticKernel.UnitTests.Utilities; @@ -37,7 +37,7 @@ public HttpContentExtensionsTests() public async Task ShouldReturnHttpContentAsStringAsync() { //Arrange - this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("{\"details\": \"fake-response-content\"}", Encoding.UTF8, MediaTypeNames.Application.Json); + this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("{\"details\": \"fake-response-content\"}", Encoding.UTF8, "application/json"); using var requestMessage = new HttpRequestMessage(HttpMethod.Get, "https://fake-random-test-host"); diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/InternalTypeConverterTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/InternalTypeConverterTests.cs new file mode 100644 index 000000000000..91ca7ab24d8f --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/InternalTypeConverterTests.cs @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ComponentModel; +using System.Globalization; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities; + +public class InternalTypeConverterTests +{ + [Theory] + [InlineData(123.456, "123,456", "fr-FR")] + [InlineData(123.456, "123.456", "en-US")] + public void ItTakesCultureIntoAccount(double value, string expectedString, string culture) + { + // Act + var result = InternalTypeConverter.ConvertToString(value, new CultureInfo(culture)); + + // Assert + Assert.Equal(expectedString, result); + } + + [Fact] + public void ItCanConvertManyTypes() + { + // Arrange + var culture = CultureInfo.InvariantCulture; + + // Atc & Assert + Assert.Equal("10", InternalTypeConverter.ConvertToString((byte)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((byte?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((byte?)null, culture)); + + Assert.Equal("10", InternalTypeConverter.ConvertToString((sbyte)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((sbyte?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((sbyte?)null, culture)); + + Assert.Equal("10", InternalTypeConverter.ConvertToString((short)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((short?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((short?)null, culture)); + + Assert.Equal("10", InternalTypeConverter.ConvertToString((ushort)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((ushort?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((ushort?)null, culture)); + + Assert.Equal("10", InternalTypeConverter.ConvertToString((int)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((int?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((int?)null, culture)); + + Assert.Equal("10", InternalTypeConverter.ConvertToString((uint)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((uint?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((uint?)null, culture)); + + Assert.Equal("10", InternalTypeConverter.ConvertToString((long)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((long?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((long?)null, culture)); + + Assert.Equal("10", InternalTypeConverter.ConvertToString((ulong)10, culture)); + Assert.Equal("10", InternalTypeConverter.ConvertToString((ulong?)10, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((ulong?)null, culture)); + + Assert.Equal("10.5", InternalTypeConverter.ConvertToString((float)10.5, culture)); + Assert.Equal("10.5", InternalTypeConverter.ConvertToString((float?)10.5, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((float?)null, culture)); + + Assert.Equal("10.5", InternalTypeConverter.ConvertToString((double)10.5, culture)); + Assert.Equal("10.5", InternalTypeConverter.ConvertToString((double?)10.5, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((double?)null, culture)); + + Assert.Equal("10.5", InternalTypeConverter.ConvertToString((decimal)10.5, culture)); + Assert.Equal("10.5", InternalTypeConverter.ConvertToString((decimal?)10.5, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((decimal?)null, culture)); + + Assert.Equal("A", InternalTypeConverter.ConvertToString((char)'A', culture)); + Assert.Equal("A", InternalTypeConverter.ConvertToString((char?)'A', culture)); + Assert.Null(InternalTypeConverter.ConvertToString((char?)null, culture)); + + Assert.Equal("True", InternalTypeConverter.ConvertToString((bool)true, culture)); + Assert.Equal("True", InternalTypeConverter.ConvertToString((bool?)true, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((bool?)null, culture)); + + Assert.Equal("12/06/2023 11:53:36", InternalTypeConverter.ConvertToString((DateTime)DateTime.ParseExact("06.12.2023 11:53:36", "dd.MM.yyyy HH:mm:ss", culture), culture)); + Assert.Equal("12/06/2023 11:53:36", InternalTypeConverter.ConvertToString((DateTime?)DateTime.ParseExact("06.12.2023 11:53:36", "dd.MM.yyyy HH:mm:ss", culture), culture)); + Assert.Null(InternalTypeConverter.ConvertToString((DateTime?)null, culture)); + + Assert.Equal("12/06/2023 11:53:36 +02:00", InternalTypeConverter.ConvertToString((DateTimeOffset)DateTimeOffset.ParseExact("06.12.2023 11:53:36 +02:00", "dd.MM.yyyy HH:mm:ss zzz", culture), culture)); + Assert.Equal("12/06/2023 11:53:36 +02:00", InternalTypeConverter.ConvertToString((DateTimeOffset?)DateTimeOffset.ParseExact("06.12.2023 11:53:36 +02:00", "dd.MM.yyyy HH:mm:ss zzz", culture), culture)); + Assert.Null(InternalTypeConverter.ConvertToString((DateTimeOffset?)null, culture)); + + Assert.Equal("01:00:00", InternalTypeConverter.ConvertToString((TimeSpan)TimeSpan.FromHours(1), culture)); + Assert.Equal("01:00:00", InternalTypeConverter.ConvertToString((TimeSpan?)TimeSpan.FromHours(1), culture)); + Assert.Null(InternalTypeConverter.ConvertToString((TimeSpan?)null, culture)); + + Guid guid = Guid.NewGuid(); + Assert.Equal(guid.ToString(), InternalTypeConverter.ConvertToString((Guid)guid, culture)); + Assert.Equal(guid.ToString(), InternalTypeConverter.ConvertToString((Guid?)guid, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((Guid?)null, culture)); + + Assert.Equal("Monday", InternalTypeConverter.ConvertToString((DayOfWeek)DayOfWeek.Monday, culture)); + Assert.Equal("Monday", InternalTypeConverter.ConvertToString((DayOfWeek?)DayOfWeek.Monday, culture)); + Assert.Null(InternalTypeConverter.ConvertToString((DayOfWeek?)null, culture)); + + Assert.Equal("https://example.com", InternalTypeConverter.ConvertToString((Uri)new("https://example.com"), culture)); + Assert.Equal("https://example.com", InternalTypeConverter.ConvertToString((Uri?)new("https://example.com"), culture)); + Assert.Null(InternalTypeConverter.ConvertToString((Uri?)null, culture)); + + Assert.Equal("Hello, World!", InternalTypeConverter.ConvertToString((string)"Hello, World!", culture)); + Assert.Equal("Hello, World!", InternalTypeConverter.ConvertToString((string?)"Hello, World!", culture)); + Assert.Null(InternalTypeConverter.ConvertToString((string?)null, culture)); + } + + [Fact] + public void ItCallsCustomConverterSpecifiedByTypeConverterAttribute() + { + // Arrange + var customType = new MyCustomType(); + customType.Value = 4; + + // Act + var result = InternalTypeConverter.ConvertToString(customType, CultureInfo.InvariantCulture); + + // Assert + Assert.Equal("4", result); + } + +#pragma warning disable CA1812 // Instantiated by reflection + private sealed class MyCustomTypeConverter : TypeConverter + { + public override bool CanConvertTo(ITypeDescriptorContext? context, Type? destinationType) + => destinationType == typeof(string); + + public override object? ConvertTo(ITypeDescriptorContext? context, CultureInfo? culture, object? value, Type destinationType) + => ((MyCustomType)value!).Value.ToString(culture); + } + + [TypeConverter(typeof(MyCustomTypeConverter))] + private sealed class MyCustomType + { + public int Value { get; set; } + } +#pragma warning restore CA1812 +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/TypeExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/TypeExtensionsTests.cs new file mode 100644 index 000000000000..7533af205d2b --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/TypeExtensionsTests.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Xunit; +using TypeExtensions = System.TypeExtensions; + +namespace SemanticKernel.UnitTests.Utilities; + +/// +/// Unit tests for class. +/// +public sealed class TypeExtensionsTests +{ + [Theory] + [InlineData(null, typeof(object), false)] + [InlineData(typeof(TestType), typeof(object), false)] + [InlineData(typeof(Task), typeof(TestType), true)] + [InlineData(typeof(TestType?), typeof(TestType), true)] + [InlineData(typeof(ValueTask), typeof(TestType), true)] + [InlineData(typeof(IEnumerable), typeof(List), true)] + [InlineData(typeof(IList), typeof(List), true)] + [InlineData(typeof(ICollection), typeof(List), true)] + [InlineData(typeof(IDictionary), typeof(Dictionary), true)] + public void TryGetGenericResultTypeWorksCorrectly(Type? type, Type expectedType, bool expectedResult) + { + // Arrange & Act + var result = type.TryGetGenericResultType(out var resultType); + + // Assert + Assert.Equal(expectedResult, result); + Assert.Equal(expectedType, resultType); + } + + private struct TestType { } +} diff --git a/python/.conf/.pre-commit-config.yaml b/python/.conf/.pre-commit-config.yaml index fd93114f6f28..1b566b76c0ee 100644 --- a/python/.conf/.pre-commit-config.yaml +++ b/python/.conf/.pre-commit-config.yaml @@ -11,12 +11,12 @@ repos: - id: mixed-line-ending files: \.py$ - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.12.0 hooks: - id: black files: \.py$ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.270 + rev: v0.1.8 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix ] \ No newline at end of file diff --git a/python/.coveragerc b/python/.coveragerc new file mode 100644 index 000000000000..e54128603528 --- /dev/null +++ b/python/.coveragerc @@ -0,0 +1,27 @@ +[run] +source = semantic_kernel +omit = + semantic_kernel/connectors/memory/* + semantic_kernel/connectors/openapi/* + semantic_kernel/connectors/search_engine/* + semantic_kernel/connectors/ai/google_palm/* + semantic_kernel/connectors/ai/hugging_face/* + + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # TYPE_CHECKING and @overload blocks are never executed during pytest run + if TYPE_CHECKING: + @overload + @abstractmethod \ No newline at end of file diff --git a/python/.cspell.json b/python/.cspell.json new file mode 100644 index 000000000000..84f8f4629e52 --- /dev/null +++ b/python/.cspell.json @@ -0,0 +1,46 @@ +{ + "version": "0.2", + "languageSettings": [ + { + "languageId": "py", + "allowCompoundWords": true, + "locale": "en-US" + } + ], + "language": "en-US", + "patterns": [ + { + "name": "import", + "pattern": "import [a-zA-Z0-9_]+" + }, + { + "name": "from import", + "pattern": "from [a-zA-Z0-9_]+ import [a-zA-Z0-9_]+" + } + ], + "ignorePaths": [ + "samples/**", + "notebooks/**" + ], + "words": [ + "aeiou", + "azuredocindex", + "azuredocs", + "contentvector", + "dotenv", + "logit", + "logprobs", + "mongocluster", + "ndarray", + "nopep", + "ollama", + "onyourdatatest", + "OPENAI", + "pydantic", + "retrywrites", + "kernelfunction", + "skprompt", + "templating", + "vectordb" + ] +} \ No newline at end of file diff --git a/python/.env.example b/python/.env.example index 2e7a633e31ba..853022f34fc3 100644 --- a/python/.env.example +++ b/python/.env.example @@ -14,3 +14,11 @@ WEAVIATE_API_KEY="" GOOGLE_PALM_API_KEY="" GOOGLE_SEARCH_ENGINE_ID="" REDIS_CONNECTION_STRING="" +AZCOSMOS_API = "" // should be mongo-vcore for now, as CosmosDB only supports vector search in mongo-vcore for now. +AZCOSMOS_CONNSTR = "" +AZCOSMOS_DATABASE_NAME = "" +AZCOSMOS_CONTAINER_NAME = "" +ASTRADB_APP_TOKEN="" // Starts with AstraCS: +ASTRADB_ID="" +ASTRADB_REGION="" +ASTRADB_KEYSPACE="" \ No newline at end of file diff --git a/python/.vscode/extensions.json b/python/.vscode/extensions.json new file mode 100644 index 000000000000..66114688a305 --- /dev/null +++ b/python/.vscode/extensions.json @@ -0,0 +1,9 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=827846 + // for the documentation about the extensions.json format + "recommendations": [ + "littlefoxteam.vscode-python-test-adapter", + "streetsidesoftware.code-spell-checker", + "ms-python.python", + ] +} \ No newline at end of file diff --git a/python/.vscode/launch.json b/python/.vscode/launch.json new file mode 100644 index 000000000000..306f58eb37e8 --- /dev/null +++ b/python/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Current File", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "justMyCode": true + } + ] +} \ No newline at end of file diff --git a/python/.vscode/settings.json b/python/.vscode/settings.json index 4e6c0ad8e15e..80ce88cd0989 100644 --- a/python/.vscode/settings.json +++ b/python/.vscode/settings.json @@ -1,26 +1,40 @@ { - "python.analysis.extraPaths": [ - "./src" - ], "explorer.compactFolders": false, "prettier.enable": true, "editor.formatOnType": true, "editor.formatOnSave": true, "editor.formatOnPaste": true, - "python.formatting.provider": "black", - "python.formatting.autopep8Args": [ - "--max-line-length=160" - ], + "editor.defaultFormatter": "charliermarsh.ruff", "notebook.output.textLineLimit": 500, - "cSpell.words": [ - "aeiou", - "nopep", - "OPENAI", - "skfunction" + "cSpell.languageSettings": [ + { + "languageId": "py", + "allowCompoundWords": true, + "locale": "en-US" + } ], "python.testing.pytestArgs": [ "tests" ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, + "pythonTestExplorer.testFramework": "pytest", + "pythonTestExplorer.pytestPath": "poetry", + "pythonTestExplorer.pytestArguments": [ + "run", + "pytest" + ], + "[python]": { + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit", + "source.fixAll": "explicit" + }, + "editor.formatOnSave": true, + "editor.defaultFormatter": "charliermarsh.ruff", + }, + "notebook.formatOnSave.enabled": true, + "notebook.codeActionsOnSave": { + "source.fixAll": true, + "source.organizeImports": true + } } \ No newline at end of file diff --git a/python/.vscode/tasks.json b/python/.vscode/tasks.json new file mode 100644 index 000000000000..e565a621753e --- /dev/null +++ b/python/.vscode/tasks.json @@ -0,0 +1,94 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=733558 + // for the documentation about the tasks.json format + "version": "2.0.0", + "tasks": [ + { + "label": "Python: Run Checks", + "type": "shell", + "command": "poetry", + "args": [ + "run", + "pre-commit", + "run", + "-c", + ".conf/.pre-commit-config.yaml", + "-a" + ], + "problemMatcher": { + "owner": "python", + "fileLocation": [ + "relative", + "${workspaceFolder}" + ], + "pattern": { + "regexp": "^(.*):(\\d+):(\\d+):\\s+(.*)$", + "file": 1, + "line": 2, + "column": 3, + "message": 4 + } + }, + "presentation": { + "reveal": "silent", + "panel": "shared" + } + }, + { + "label": "Python: Install", + "type": "shell", + "command": "poetry", + "args": [ + "install" + ], + "presentation": { + "reveal": "silent", + "panel": "shared" + }, + "problemMatcher": [] + }, + { + "label": "Python: Tests - Unit", + "type": "shell", + "command": "poetry", + "args": [ + "run", + "pytest", + "tests/unit/" + ], + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + }, + { + "label": "Python: Tests - Code Coverage", + "type": "shell", + "command": "poetry run pytest --cov=semantic_kernel --cov-report term-missing tests/unit/", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + }, + { + "label": "Python: Tests - All", + "type": "shell", + "command": "poetry", + "args": [ + "run", + "pytest", + "tests/" + ], + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/python/DEV_SETUP.md b/python/DEV_SETUP.md index b851b3b2b8dc..0e06de9aba21 100644 --- a/python/DEV_SETUP.md +++ b/python/DEV_SETUP.md @@ -121,6 +121,14 @@ You can also run all the tests together under the [tests](tests/) folder. ## Tools and scripts +## Implementation Decisions + +### Asynchronous programming + +It's important to note that most of this library is written with asynchronous in mind. The +developer should always assume everything is asynchronous. One can use the function signature +with either `async def` or `def` to understand if something is asynchronous or not. + ## Pydantic and Serialization [Pydantic Documentation](https://docs.pydantic.dev/1.10/) @@ -170,7 +178,7 @@ from this field is sufficient to have these types of classes as valid Pydantic f any class using them as attributes to be serialized. ```python -from semantic_kernel.sk_pydantic import PydanticField +from semantic_kernel.kernel_pydantic import PydanticField class B(PydanticField): ... # correct, B is still an ABC because PydanticField subclasses ABC class B(PydanticField, ABC): ... # Also correct @@ -223,13 +231,13 @@ class A: self.d = d ``` -You would convert this to a Pydantic class by subclassing from the `SKBaseModel` class. +You would convert this to a Pydantic class by subclassing from the `KernelBaseModel` class. ```python from pydantic import Field -from semantic_kernel.sk_pydantic import SKBaseModel +from semantic_kernel.kernel_pydantic import KernelBaseModel -class A(SKBaseModel): +class A(KernelBaseModel): # The notation for the fields is similar to dataclasses. a: int b: float @@ -255,14 +263,14 @@ class A: self.c = c ``` -You can uses the `SKGenericModel` to convert these to pydantic serializable classes. +You can use the `KernelBaseModel` to convert these to pydantic serializable classes. ```python from typing import Generic -from semantic_kernel.sk_pydantic import SKGenericModel +from semantic_kernel.kernel_pydantic import KernelBaseModel -class A(SKGenericModel, Generic[T1, T2]): +class A(KernelBaseModel, Generic[T1, T2]): # T1 and T2 must be specified in the Generic argument otherwise, pydantic will # NOT be able to serialize this class a: int diff --git a/python/README.md b/python/README.md index 381a63047592..f2f5f11ad357 100644 --- a/python/README.md +++ b/python/README.md @@ -25,6 +25,7 @@ AZURE_OPENAI_API_KEY="" # Running a prompt ```python +import asyncio import semantic_kernel as sk from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, AzureChatCompletion @@ -52,7 +53,12 @@ does not conflict with the First or Second Law. Give me the TLDR in exactly 5 words.""") # Run your prompt -print(prompt()) # => Robots must not harm humans. +# Note: functions are run asynchronously +async def main(): + print(await prompt()) # => Robots must not harm humans. + +if __name__ == "__main__": + asyncio.run(main()) ``` # **Semantic functions** are Prompts with input parameters @@ -62,19 +68,19 @@ print(prompt()) # => Robots must not harm humans. summarize = kernel.create_semantic_function("{{$input}}\n\nOne line TLDR with the fewest words.") # Summarize the laws of thermodynamics -print(summarize(""" +print(await summarize(""" 1st Law of Thermodynamics - Energy cannot be created or destroyed. 2nd Law of Thermodynamics - For a spontaneous process, the entropy of the universe increases. 3rd Law of Thermodynamics - A perfect crystal at zero Kelvin has zero entropy.""")) # Summarize the laws of motion -print(summarize(""" +print(await summarize(""" 1. An object at rest remains at rest, and an object in motion remains in motion at constant speed and in a straight line unless acted on by an unbalanced force. 2. The acceleration of an object depends on the mass of the object and the amount of force applied. 3. Whenever one object exerts a force on another object, the second object exerts an equal and opposite on the first.""")) # Summarize the law of universal gravitation -print(summarize(""" +print(await summarize(""" Every point mass attracts every single other point mass by a force acting along the line intersecting both points. The force is proportional to the product of the two masses and inversely proportional to the square of the distance between them.""")) @@ -98,7 +104,7 @@ Python notebooks: - [Using Context Variables to Build a Chat Experience](./notebooks/04-context-variables-chat.ipynb) - [Introduction to planners](./notebooks/05-using-the-planner.ipynb) - [Building Memory with Embeddings](./notebooks/06-memory-and-embeddings.ipynb) -- [Using Hugging Face for Skills](./notebooks/07-hugging-face-for-skills.ipynb) +- [Using Hugging Face for Plugins](./notebooks/07-hugging-face-for-plugins.ipynb) - [Combining native functions and semantic functions](./notebooks/08-native-function-inline.ipynb) - [Groundedness Checking with Semantic Kernel](./notebooks/09-groundedness-checking.ipynb) - [Returning multiple results per prompt](./notebooks/10-multiple-results-per-prompt.ipynb) diff --git a/python/log.txt b/python/log.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/python/notebooks/.env.example b/python/notebooks/.env.example index 5be9659fd888..c727b53f5235 100644 --- a/python/notebooks/.env.example +++ b/python/notebooks/.env.example @@ -3,3 +3,5 @@ OPENAI_ORG_ID="" AZURE_OPENAI_DEPLOYMENT_NAME="" AZURE_OPENAI_ENDPOINT="" AZURE_OPENAI_API_KEY="" +AZURE_AISEARCH_API_KEY="" +AZURE_AISEARCH_URL="" diff --git a/python/notebooks/00-getting-started.ipynb b/python/notebooks/00-getting-started.ipynb index 480f511b25e4..fae229aff099 100644 --- a/python/notebooks/00-getting-started.ipynb +++ b/python/notebooks/00-getting-started.ipynb @@ -16,7 +16,7 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { @@ -44,7 +44,7 @@ "OPENAI_ORG_ID=\"\"\n", "```\n", "\n", - "and add OpenAI Chat Completion to the kernel:" + "Use \"keyword arguments\" to instantiate an OpenAI Chat Completion service and add it to the kernel:" ] }, { @@ -57,7 +57,10 @@ "\n", "api_key, org_id = sk.openai_settings_from_dot_env()\n", "\n", - "kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))" + "kernel.add_chat_service(\n", + " \"chat-gpt\",\n", + " OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo-1106\", api_key=api_key, org_id=org_id),\n", + ")" ] }, { @@ -75,7 +78,7 @@ "AZURE_OPENAI_DEPLOYMENT_NAME=\"...\"\n", "```\n", "\n", - "and add Azure OpenAI Chat Completion to the kernel:" + "Use \"keyword arguments\" to instantiate an Azure OpenAI Chat Completion service and add it to the kernel:" ] }, { @@ -88,7 +91,10 @@ "\n", "deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", "\n", - "kernel.add_chat_service(\"chat_completion\", AzureChatCompletion(deployment, endpoint, api_key))\n" + "kernel.add_chat_service(\n", + " \"chat_completion\",\n", + " AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, api_key=api_key),\n", + ")" ] }, { @@ -98,7 +104,7 @@ "source": [ "# Run a Semantic Function\n", "\n", - "**Step 3**: Load a Skill and run a semantic function:" + "**Step 3**: Load a Plugin and run a semantic function:" ] }, { @@ -107,10 +113,10 @@ "metadata": {}, "outputs": [], "source": [ - "skill = kernel.import_semantic_skill_from_directory(\"../../samples/skills\", \"FunSkill\")\n", - "joke_function = skill[\"Joke\"]\n", + "plugin = kernel.import_semantic_plugin_from_directory(\"../../samples/plugins\", \"FunPlugin\")\n", + "joke_function = plugin[\"Joke\"]\n", "\n", - "print(joke_function(\"time travel to dinosaur age\"))" + "print(await joke_function(\"time travel to dinosaur age\"))" ] } ], @@ -130,7 +136,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/python/notebooks/01-basic-loading-the-kernel.ipynb b/python/notebooks/01-basic-loading-the-kernel.ipynb index 8de0a12ca9fd..01ed9cfff3f6 100644 --- a/python/notebooks/01-basic-loading-the-kernel.ipynb +++ b/python/notebooks/01-basic-loading-the-kernel.ipynb @@ -25,17 +25,20 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion" + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " OpenAIChatCompletion,\n", + ")" ] }, { @@ -48,7 +51,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -56,17 +59,6 @@ "kernel_1 = sk.Kernel()" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Instance with a custom logger\n", - "my_logger = sk.NullLogger()\n", - "kernel_2 = sk.Kernel(log=my_logger)" - ] - }, { "attachments": {}, "cell_type": "markdown", @@ -87,22 +79,22 @@ "source": [ "kernel = sk.Kernel()\n", "\n", - "kernel.add_chat_service( # We are adding a text service\n", - " \"Azure_curie\", # The alias we can use in prompt templates' config.json\n", + "kernel.add_chat_service( # We are adding a text service\n", + " \"Azure_curie\", # The alias we can use in prompt templates' config.json\n", " AzureChatCompletion(\n", - " \"my-finetuned-Curie\", # Azure OpenAI *Deployment name*\n", - " \"https://contoso.openai.azure.com/\", # Azure OpenAI *Endpoint*\n", - " \"...your Azure OpenAI Key...\" # Azure OpenAI *Key*\n", - " )\n", + " deployment_name=\"my-finetuned-Curie\", # Azure OpenAI *Deployment name*\n", + " endpoint=\"https://contoso.openai.azure.com/\", # Azure OpenAI *Endpoint*\n", + " api_key=\"...your Azure OpenAI Key...\", # Azure OpenAI *Key*\n", + " ),\n", ")\n", "\n", - "kernel.add_chat_service( # We are adding a text service\n", - " \"OpenAI_chat_gpt\", # The alias we can use in prompt templates' config.json\n", + "kernel.add_chat_service( # We are adding a text service\n", + " \"OpenAI_chat_gpt\", # The alias we can use in prompt templates' config.json\n", " OpenAIChatCompletion(\n", - " \"gpt-3.5-turbo\", # OpenAI Model Name\n", - " \"...your OpenAI API Key...\", # OpenAI API key\n", - " \"...your OpenAI Org ID...\" # *optional* OpenAI Organization ID\n", - " )\n", + " ai_model_id=\"gpt-3.5-turbo\", # OpenAI Model Name\n", + " api_key=\"...your OpenAI API Key...\", # OpenAI API key\n", + " org_id=\"...your OpenAI Org ID...\", # *optional* OpenAI Organization ID\n", + " ),\n", ")" ] }, @@ -154,7 +146,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.10.12" }, "polyglot_notebook": { "kernelInfo": { diff --git a/python/notebooks/02-running-prompts-from-file.ipynb b/python/notebooks/02-running-prompts-from-file.ipynb index c41dc0294caf..24ca926125c4 100644 --- a/python/notebooks/02-running-prompts-from-file.ipynb +++ b/python/notebooks/02-running-prompts-from-file.ipynb @@ -6,16 +6,16 @@ "id": "692e361b", "metadata": {}, "source": [ - "# How to run a semantic skills from file\n", - "Now that you're familiar with Kernel basics, let's see how the kernel allows you to run Semantic Skills and Semantic Functions stored on disk. \n", + "# How to run a semantic plugins from file\n", + "Now that you're familiar with Kernel basics, let's see how the kernel allows you to run Semantic Plugins and Semantic Functions stored on disk. \n", "\n", - "A Semantic Skill is a collection of Semantic Functions, where each function is defined with natural language that can be provided with a text file. \n", + "A Semantic Plugin is a collection of Semantic Functions, where each function is defined with natural language that can be provided with a text file. \n", "\n", "Refer to our [glossary](https://github.com/microsoft/semantic-kernel/blob/main/docs/GLOSSARY.md) for an in-depth guide to the terms.\n", "\n", "The repository includes some examples under the [samples](https://github.com/microsoft/semantic-kernel/tree/main/samples) folder.\n", "\n", - "For instance, [this](../../skills/FunSkill/Joke/skprompt.txt) is the **Joke function** part of the **FunSkill skill**:" + "For instance, [this](../../plugins/FunPlugin/Joke/skprompt.txt) is the **Joke function** part of the **FunPlugin plugin**:" ] }, { @@ -55,7 +55,7 @@ "metadata": {}, "source": [ "\n", - "In the same folder you'll notice a second [config.json](../../skills/FunSkill/Joke/config.json) file. The file is optional, and is used to set some parameters for large language models like Temperature, TopP, Stop Sequences, etc.\n", + "In the same folder you'll notice a second [config.json](../../plugins/FunPlugin/Joke/config.json) file. The file is optional, and is used to set some parameters for large language models like Temperature, TopP, Stop Sequences, etc.\n", "\n", "```\n", "{\n", @@ -89,7 +89,7 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { @@ -100,7 +100,10 @@ "outputs": [], "source": [ "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " OpenAIChatCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", @@ -109,10 +112,14 @@ "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat_completion\", AzureChatCompletion(deployment, endpoint, api_key))\n", + " azure_chat_service = AzureChatCompletion(\n", + " deployment_name=\"turbo\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your chat model\n", + " kernel.add_chat_service(\"chat_completion\", azure_chat_service)\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))" + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + " kernel.add_chat_service(\"chat-gpt\", oai_chat_service)" ] }, { @@ -121,7 +128,7 @@ "id": "fd5ff1f4", "metadata": {}, "source": [ - "Import the skill and all its functions:" + "Import the plugin and all its functions:" ] }, { @@ -131,10 +138,10 @@ "metadata": {}, "outputs": [], "source": [ - "# note: using skills from the samples folder\n", - "skills_directory = \"../../samples/skills\"\n", + "# note: using plugins from the samples folder\n", + "plugins_directory = \"../../samples/plugins\"\n", "\n", - "funFunctions = kernel.import_semantic_skill_from_directory(skills_directory, \"FunSkill\")\n", + "funFunctions = kernel.import_semantic_plugin_from_directory(plugins_directory, \"FunPlugin\")\n", "\n", "jokeFunction = funFunctions[\"Joke\"]" ] @@ -145,7 +152,7 @@ "id": "edd99fa0", "metadata": {}, "source": [ - "How to use the skill functions, e.g. generate a joke about \"*time travel to dinosaur age*\":" + "How to use the plugin functions, e.g. generate a joke about \"*time travel to dinosaur age*\":" ] }, { @@ -155,13 +162,8 @@ "metadata": {}, "outputs": [], "source": [ - "result = jokeFunction(\"time travel to dinosaur age\")\n", - "\n", - "print(result)\n", - "\n", - "# You can also invoke functions asynchronously\n", - "# result = await jokeFunction.invoke_async(\"time travel to dinosaur age\")\n", - "# print(result)" + "result = await jokeFunction.invoke(\"travel to dinosaur age\")\n", + "print(result)" ] }, { @@ -170,7 +172,7 @@ "id": "2281a1fc", "metadata": {}, "source": [ - "Great, now that you know how to load a skill from disk, let's show how you can [create and run a semantic function inline.](./03-semantic-function-inline.ipynb)" + "Great, now that you know how to load a plugin from disk, let's show how you can [create and run a semantic function inline.](./03-semantic-function-inline.ipynb)" ] } ], @@ -190,7 +192,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/python/notebooks/03-semantic-function-inline.ipynb b/python/notebooks/03-semantic-function-inline.ipynb index 860de47a5aaa..dbaf6c0ebac8 100644 --- a/python/notebooks/03-semantic-function-inline.ipynb +++ b/python/notebooks/03-semantic-function-inline.ipynb @@ -55,7 +55,7 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { @@ -66,7 +66,10 @@ "outputs": [], "source": [ "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureTextCompletion, OpenAITextCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureTextCompletion,\n", + " OpenAITextCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", @@ -75,10 +78,14 @@ "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_text_completion_service(\"dv\", AzureTextCompletion(deployment, endpoint, api_key))\n", + " azure_text_service = AzureTextCompletion(\n", + " deployment_name=\"text\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your text model\n", + " kernel.add_text_completion_service(\"dv\", azure_text_service)\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_text_completion_service(\"dv\", OpenAITextCompletion(\"text-davinci-003\", api_key, org_id))" + " oai_text_service = OpenAITextCompletion(ai_model_id=\"gpt-3.5-turbo-instruct\", api_key=api_key, org_id=org_id)\n", + " kernel.add_text_completion_service(\"dv\", oai_text_service)" ] }, { @@ -103,7 +110,7 @@ "Summarize the content above.\n", "\"\"\"\n", "\n", - "summarize = kernel.create_semantic_function(prompt, max_tokens=2000, temperature=0.2, top_p=0.5)" + "summarize = kernel.create_semantic_function(prompt_template=prompt, max_tokens=2000, temperature=0.2, top_p=0.5)" ] }, { @@ -112,7 +119,7 @@ "id": "f26b90c4", "metadata": {}, "source": [ - "Set up some content to summarize, here's an extract about Demo, an ancient Greek poet, taken from Wikipedia (https://en.wikipedia.org/wiki/Demo_(ancient_Greek_poet)." + "Set up some content to summarize, here's an extract about Demo, an ancient Greek poet, taken from Wikipedia (https://en.wikipedia.org/wiki/Demo_(ancient_Greek_poet))." ] }, { @@ -151,8 +158,7 @@ "metadata": {}, "outputs": [], "source": [ - "# If needed, async is available too: summary = await summarize.invoke_async(input_text)\n", - "summary = summarize(input_text)\n", + "summary = await summarize(input_text)\n", "\n", "print(summary)" ] @@ -163,7 +169,7 @@ "id": "1c2c1262", "metadata": {}, "source": [ - "# Using ChatCompletion for Semantic Skills" + "# Using ChatCompletion for Semantic Plugins" ] }, { @@ -172,7 +178,7 @@ "id": "29b59b28", "metadata": {}, "source": [ - "You can also use chat completion models (like `gpt-35-turbo` and `gpt4`) for creating skills. Normally you would have to tweak the API to accommodate for a system and user role, but SK abstracts that away for you by using `kernel.add_chat_service` and `AzureChatCompletion` or `OpenAIChatCompletion`" + "You can also use chat completion models (like `gpt-35-turbo` and `gpt4`) for creating plugins. Normally you would have to tweak the API to accommodate for a system and user role, but SK abstracts that away for you by using `kernel.add_chat_service` and `AzureChatCompletion` or `OpenAIChatCompletion`" ] }, { @@ -194,7 +200,10 @@ "outputs": [], "source": [ "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " OpenAIChatCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", @@ -203,15 +212,14 @@ "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\n", - " \"chat_completion\",\n", - " AzureChatCompletion(deployment, endpoint, api_key),\n", - " )\n", + " azure_chat_service = AzureChatCompletion(\n", + " deployment_name=\"turbo\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your chat model\n", + " kernel.add_chat_service(\"chat_completion\", azure_chat_service)\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\n", - " \"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id)\n", - " )" + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + " kernel.add_chat_service(\"chat-gpt\", oai_chat_service)" ] }, { @@ -238,11 +246,11 @@ " does not conflict with the First or Second Law.\n", "\"\"\"\n", "\n", - "tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5)\n", + "tldr_function = kernel.create_semantic_function(prompt_template=sk_prompt, max_tokens=200, temperature=0, top_p=0.5)\n", "\n", - "summary = tldr_function(text)\n", + "summary = await tldr_function(text)\n", "\n", - "print(f\"Output: {summary}\") # Output: Robots must not harm humans." + "print(f\"Output: {summary}\") # Output: Robots must not harm humans." ] } ], @@ -262,7 +270,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/python/notebooks/04-context-variables-chat.ipynb b/python/notebooks/04-context-variables-chat.ipynb index e8a5303a8506..1bd69448b77a 100644 --- a/python/notebooks/04-context-variables-chat.ipynb +++ b/python/notebooks/04-context-variables-chat.ipynb @@ -26,7 +26,7 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { @@ -37,7 +37,10 @@ "outputs": [], "source": [ "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " OpenAIChatCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", @@ -46,10 +49,16 @@ "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat_completion\", AzureChatCompletion(deployment, endpoint, api_key))\n", + " kernel.add_chat_service(\n", + " \"chat_completion\",\n", + " AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, api_key=api_key),\n", + " )\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))\n" + " kernel.add_chat_service(\n", + " \"gpt-3.5\",\n", + " OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id),\n", + " )" ] }, { @@ -93,7 +102,13 @@ "metadata": {}, "outputs": [], "source": [ - "chat_function = kernel.create_semantic_function(sk_prompt, \"ChatBot\", max_tokens=2000, temperature=0.7, top_p=0.5)" + "chat_function = kernel.create_semantic_function(\n", + " prompt_template=sk_prompt,\n", + " function_name=\"ChatBot\",\n", + " max_tokens=2000,\n", + " temperature=0.7,\n", + " top_p=0.5,\n", + ")" ] }, { @@ -133,7 +148,7 @@ "outputs": [], "source": [ "context[\"user_input\"] = \"Hi, I'm looking for book suggestions\"\n", - "bot_answer = await chat_function.invoke_async(context=context)\n", + "bot_answer = await chat_function.invoke(context=context)\n", "print(bot_answer)" ] }, @@ -179,7 +194,7 @@ " context[\"user_input\"] = input_text\n", "\n", " # Process the user message and get an answer\n", - " answer = await chat_function.invoke_async(context=context)\n", + " answer = await chat_function.invoke(context=context)\n", "\n", " # Show the response\n", " print(f\"ChatBot: {answer}\")\n", @@ -264,7 +279,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/python/notebooks/05-using-the-planner.ipynb b/python/notebooks/05-using-the-planner.ipynb index 44f6e44ec70c..41700ccc7ef1 100644 --- a/python/notebooks/05-using-the-planner.ipynb +++ b/python/notebooks/05-using-the-planner.ipynb @@ -11,9 +11,9 @@ "\n", "It makes use of the collection of native and semantic functions that have been registered to the kernel and using AI, will formulate a plan to execute the given ask.\n", "\n", - "From our own testing, planner works best with more powerful models like `gpt4` but sometimes you might get working plans with cheaper models like `gpt-35-turbo`. We encourage you to implement your own versions of the planner and use different models that fit your user needs. \n", + "From our own testing, planner works best with more powerful models like `gpt4` but sometimes you might get working plans with cheaper models like `gpt-35-turbo`. We encourage you to implement your own versions of the planner and use different models that fit your user needs.\n", "\n", - "Read more about planner [here](https://aka.ms/sk/concepts/planner)" + "Read more about planner [here](https://aka.ms/sk/concepts/planner)\n" ] }, { @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.12.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { @@ -34,7 +34,10 @@ "outputs": [], "source": [ "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, AzureChatCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " OpenAIChatCompletion,\n", + " AzureChatCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", @@ -42,12 +45,17 @@ "\n", "# Configure AI backend used by the kernel\n", "if useAzureOpenAI:\n", - " \n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"gpt-3.5\", AzureChatCompletion(deployment, endpoint, api_key))\n", + " kernel.add_chat_service(\n", + " \"chat_completion\",\n", + " AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, api_key=api_key),\n", + " )\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"gpt-3.5\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))" + " kernel.add_chat_service(\n", + " \"gpt-4\",\n", + " OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo-1106\", api_key=api_key, org_id=org_id),\n", + " )" ] }, { @@ -55,7 +63,7 @@ "id": "4ff28070", "metadata": {}, "source": [ - "## It all begins with an ask" + "## It all begins with an ask\n" ] }, { @@ -75,10 +83,11 @@ "id": "a5d86739", "metadata": {}, "source": [ - "### Providing skills to the planner\n", - "The planner needs to know what skills are available to it. Here we'll give it access to the `SummarizeSkill` and `WriterSkill` we have defined on disk. This will include many semantic functions, of which the planner will intelligently choose a subset. \n", + "### Providing plugins to the planner\n", + "\n", + "The planner needs to know what plugins are available to it. Here we'll give it access to the `SummarizePlugin` and `WriterPlugin` we have defined on disk. This will include many semantic functions, of which the planner will intelligently choose a subset.\n", "\n", - "You can also include native functions as well. Here we'll add the TextSkill." + "You can also include native functions as well. Here we'll add the TextPlugin.\n" ] }, { @@ -88,12 +97,12 @@ "metadata": {}, "outputs": [], "source": [ - "from semantic_kernel.core_skills.text_skill import TextSkill\n", + "from semantic_kernel.core_plugins.text_plugin import TextPlugin\n", "\n", - "skills_directory = \"../../samples/skills/\"\n", - "summarize_skill = kernel.import_semantic_skill_from_directory(skills_directory, \"SummarizeSkill\")\n", - "writer_skill = kernel.import_semantic_skill_from_directory(skills_directory, \"WriterSkill\")\n", - "text_skill = kernel.import_skill(TextSkill(), \"TextSkill\")" + "plugins_directory = \"../../samples/plugins/\"\n", + "summarize_plugin = kernel.import_semantic_plugin_from_directory(plugins_directory, \"SummarizePlugin\")\n", + "writer_plugin = kernel.import_semantic_plugin_from_directory(plugins_directory, \"WriterPlugin\")\n", + "text_plugin = kernel.import_plugin(TextPlugin(), \"TextPlugin\")" ] }, { @@ -101,7 +110,7 @@ "id": "deff5675", "metadata": {}, "source": [ - "Define your ASK. What do you want the Kernel to do?" + "Define your ASK. What do you want the Kernel to do?\n" ] }, { @@ -109,7 +118,7 @@ "id": "eee6fe7b", "metadata": {}, "source": [ - "# Basic Planner" + "# Basic Planner\n" ] }, { @@ -117,7 +126,7 @@ "id": "590a22f2", "metadata": {}, "source": [ - " Let's start by taking a look at a basic planner. The `BasicPlanner` produces a JSON-based plan that aims to solve the provided ask sequentially and evaluated in order." + "Let's start by taking a look at a basic planner. The `BasicPlanner` produces a JSON-based plan that aims to solve the provided ask sequentially and evaluated in order.\n" ] }, { @@ -128,6 +137,7 @@ "outputs": [], "source": [ "from semantic_kernel.planning.basic_planner import BasicPlanner\n", + "\n", "planner = BasicPlanner()" ] }, @@ -138,7 +148,7 @@ "metadata": {}, "outputs": [], "source": [ - "basic_plan = await planner.create_plan_async(ask, kernel)" + "basic_plan = await planner.create_plan(ask, kernel)" ] }, { @@ -156,9 +166,9 @@ "id": "0f3a48f8", "metadata": {}, "source": [ - "You can see that the Planner took my ask and converted it into an JSON-based plan detailing how the AI would go about solving this task, making use of the skills that the Kernel has available to it.\n", + "You can see that the Planner took my ask and converted it into an JSON-based plan detailing how the AI would go about solving this task, making use of the plugins that the Kernel has available to it.\n", "\n", - "As you can see in the above plan, the AI has determined which functions to call in order to fulfill the user ask. The output of each step of the plan becomes the input to the next function." + "As you can see in the above plan, the AI has determined which functions to call in order to fulfill the user ask. The output of each step of the plan becomes the input to the next function.\n" ] }, { @@ -166,7 +176,7 @@ "id": "cd4df0c2", "metadata": {}, "source": [ - "Let's also define an inline skill and have it be available to the Planner. Be sure to give it a function name and skill name." + "Let's also define an inline plugin and have it be available to the Planner. Be sure to give it a function name and plugin name.\n" ] }, { @@ -181,8 +191,13 @@ "\n", "Rewrite the above in the style of Shakespeare.\n", "\"\"\"\n", - "shakespeareFunction = kernel.create_semantic_function(sk_prompt, \"shakespeare\", \"ShakespeareSkill\",\n", - " max_tokens=2000, temperature=0.8)" + "shakespeareFunction = kernel.create_semantic_function(\n", + " prompt_template=sk_prompt,\n", + " function_name=\"shakespeare\",\n", + " plugin_name=\"ShakespearePlugin\",\n", + " max_tokens=2000,\n", + " temperature=0.8,\n", + ")" ] }, { @@ -190,7 +205,7 @@ "id": "5057cf9b", "metadata": {}, "source": [ - "Let's update our ask using this new skill" + "Let's update our ask using this new plugin\n" ] }, { @@ -205,7 +220,7 @@ "She likes Shakespeare so write using his style. She speaks French so write it in French.\n", "Convert the text to uppercase.\"\"\"\n", "\n", - "new_plan = await planner.create_plan_async(ask, kernel)" + "new_plan = await planner.create_plan(ask, kernel)" ] }, { @@ -223,7 +238,7 @@ "id": "b67a052e", "metadata": {}, "source": [ - "### Executing the plan" + "### Executing the plan\n" ] }, { @@ -231,7 +246,7 @@ "id": "3b839c90", "metadata": {}, "source": [ - "Now that we have a plan, let's try to execute it! The Planner has a function called `execute_plan`." + "Now that we have a plan, let's try to execute it! The Planner has a function called `execute_plan`.\n" ] }, { @@ -241,7 +256,7 @@ "metadata": {}, "outputs": [], "source": [ - "results = await planner.execute_plan_async(new_plan, kernel)" + "results = await planner.execute_plan(new_plan, kernel)" ] }, { @@ -259,7 +274,7 @@ "id": "e8a9b6b7", "metadata": {}, "source": [ - "# The Plan Object Model" + "# The Plan Object Model\n" ] }, { @@ -269,7 +284,7 @@ "source": [ "To build more advanced planners, we need to introduce a proper Plan object that can contain all the necessary state and information needed for high quality plans.\n", "\n", - "To see what that object model is, look at (https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/planning/plan.py)" + "To see what that object model is, look at (https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/planning/plan.py)\n" ] }, { @@ -277,7 +292,7 @@ "id": "0a0cb2a2", "metadata": {}, "source": [ - "# Sequential Planner" + "# Sequential Planner\n" ] }, { @@ -285,7 +300,7 @@ "id": "a1c66d83", "metadata": {}, "source": [ - "The sequential planner is an XML-based step-by-step planner. You can see the prompt used for it here (https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/planning/sequential_planner/Skills/SequentialPlanning/skprompt.txt)" + "The sequential planner is an XML-based step-by-step planner. You can see the prompt used for it here (https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/planning/sequential_planner/Plugins/SequentialPlanning/skprompt.txt)\n" ] }, { @@ -296,6 +311,7 @@ "outputs": [], "source": [ "from semantic_kernel.planning import SequentialPlanner\n", + "\n", "planner = SequentialPlanner(kernel)" ] }, @@ -306,7 +322,7 @@ "metadata": {}, "outputs": [], "source": [ - "sequential_plan = await planner.create_plan_async(goal=ask)" + "sequential_plan = await planner.create_plan(goal=ask)" ] }, { @@ -314,7 +330,7 @@ "id": "ee2f462b", "metadata": {}, "source": [ - "To see the steps that the Sequential Planner will take, we can iterate over them and print their descriptions" + "To see the steps that the Sequential Planner will take, we can iterate over them and print their descriptions\n" ] }, { @@ -333,7 +349,7 @@ "id": "4db5f844", "metadata": {}, "source": [ - "Let's ask the sequential planner to execute the plan." + "Let's ask the sequential planner to execute the plan.\n" ] }, { @@ -343,7 +359,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = await sequential_plan.invoke_async()" + "result = await sequential_plan.invoke()" ] }, { @@ -361,7 +377,7 @@ "id": "d6487c75", "metadata": {}, "source": [ - "# Action Planner" + "# Action Planner\n" ] }, { @@ -369,7 +385,7 @@ "id": "b045e26b", "metadata": {}, "source": [ - "The action planner takes in a list of functions and the goal, and outputs a **single** function to use that is appropriate to meet that goal." + "The action planner takes in a list of functions and the goal, and outputs a **single** function to use that is appropriate to meet that goal.\n" ] }, { @@ -380,6 +396,7 @@ "outputs": [], "source": [ "from semantic_kernel.planning import ActionPlanner\n", + "\n", "planner = ActionPlanner(kernel)" ] }, @@ -388,7 +405,7 @@ "id": "53b1f296", "metadata": {}, "source": [ - "Let's add more skills to the kernel" + "Let's add more plugins to the kernel\n" ] }, { @@ -398,11 +415,17 @@ "metadata": {}, "outputs": [], "source": [ - "from semantic_kernel.core_skills import FileIOSkill, MathSkill, TextSkill, TimeSkill\n", - "kernel.import_skill(MathSkill(), \"math\")\n", - "kernel.import_skill(FileIOSkill(), \"fileIO\")\n", - "kernel.import_skill(TimeSkill(), \"time\")\n", - "kernel.import_skill(TextSkill(), \"text\")" + "from semantic_kernel.core_plugins import (\n", + " FileIOPlugin,\n", + " MathPlugin,\n", + " TextPlugin,\n", + " TimePlugin,\n", + ")\n", + "\n", + "kernel.import_plugin(MathPlugin(), \"math\")\n", + "kernel.import_plugin(FileIOPlugin(), \"fileIO\")\n", + "kernel.import_plugin(TimePlugin(), \"time\")\n", + "kernel.import_plugin(TextPlugin(), \"text\")" ] }, { @@ -422,7 +445,7 @@ "metadata": {}, "outputs": [], "source": [ - "plan = await planner.create_plan_async(goal=ask)" + "plan = await planner.create_plan(goal=ask)" ] }, { @@ -432,7 +455,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = await plan.invoke_async()" + "result = await plan.invoke()" ] }, { @@ -450,7 +473,7 @@ "id": "789b651a", "metadata": {}, "source": [ - "# Stepwise Planner" + "# Stepwise Planner\n" ] }, { @@ -460,7 +483,7 @@ "source": [ "Stepwise Planner is based off the paper from MRKL (Modular Reasoning, Knowledge and Language) and is similar to other papers like ReACT (Reasoning and Acting in Language Models). At the core, the stepwise planner allows for the AI to form \"thoughts\" and \"observations\" and execute actions based off those to achieve a user's goal. This continues until all required functions are complete and a final output is generated.\n", "\n", - "See a video walkthrough of Stepwise Planner [here.](https://youtu.be/DG_Ge1v0c4Q?si=T1CHaAm1vV0mWRHu)" + "See a video walkthrough of Stepwise Planner [here.](https://youtu.be/DG_Ge1v0c4Q?si=T1CHaAm1vV0mWRHu)\n" ] }, { @@ -481,11 +504,11 @@ "id": "e0a00bde", "metadata": {}, "source": [ - "Let's create a Bing Search native skill that we can pass in to the Kernel.\n", + "Let's create a Bing Search native plugin that we can pass in to the Kernel.\n", "\n", "Make sure you have a Bing Search API key in your `.env` file\n", "\n", - "(https://www.microsoft.com/en-us/bing/apis/bing-web-search-api)" + "(https://www.microsoft.com/en-us/bing/apis/bing-web-search-api)\n" ] }, { @@ -495,26 +518,28 @@ "metadata": {}, "outputs": [], "source": [ - "class WebSearchEngineSkill:\n", + "class WebSearchEnginePlugin:\n", " \"\"\"\n", - " A search engine skill.\n", + " A search engine plugin.\n", " \"\"\"\n", - " from semantic_kernel.orchestration.sk_context import SKContext\n", - " from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter\n", + "\n", + " from semantic_kernel.orchestration.kernel_context import KernelContext\n", + " from semantic_kernel.plugin_definition import (\n", + " kernel_function,\n", + " kernel_function_context_parameter,\n", + " )\n", "\n", " def __init__(self, connector) -> None:\n", " self._connector = connector\n", "\n", - " @sk_function(\n", - " description=\"Performs a web search for a given query\", name=\"searchAsync\"\n", - " )\n", - " @sk_function_context_parameter(\n", + " @kernel_function(description=\"Performs a web search for a given query\", name=\"searchAsync\")\n", + " @kernel_function_context_parameter(\n", " name=\"query\",\n", " description=\"The search query\",\n", " )\n", - " async def search_async(self, query: str, context: SKContext) -> str:\n", + " async def search(self, query: str, context: KernelContext) -> str:\n", " query = query or context.variables.get(\"query\")[1]\n", - " result = await self._connector.search_async(query, num_results=5, offset=0)\n", + " result = await self._connector.search(query, num_results=5, offset=0)\n", " return str(result)" ] }, @@ -529,7 +554,7 @@ "\n", "BING_API_KEY = sk.bing_search_settings_from_dot_env()\n", "connector = BingConnector(BING_API_KEY)\n", - "kernel.import_skill(WebSearchEngineSkill(connector), skill_name=\"WebSearch\")" + "kernel.import_plugin(WebSearchEnginePlugin(connector), plugin_name=\"WebSearch\")" ] }, { @@ -537,7 +562,7 @@ "id": "effdf3ab", "metadata": {}, "source": [ - "Let's also add a couple more skills" + "Let's also add a couple more plugins\n" ] }, { @@ -547,11 +572,11 @@ "metadata": {}, "outputs": [], "source": [ - "from semantic_kernel.core_skills.math_skill import MathSkill\n", - "from semantic_kernel.core_skills.time_skill import TimeSkill\n", + "from semantic_kernel.core_plugins.math_plugin import MathPlugin\n", + "from semantic_kernel.core_plugins.time_plugin import TimePlugin\n", "\n", - "kernel.import_skill(TimeSkill(), \"time\")\n", - "kernel.import_skill(MathSkill(), \"math\")" + "kernel.import_plugin(TimePlugin(), \"time\")\n", + "kernel.import_plugin(MathPlugin(), \"math\")" ] }, { @@ -561,9 +586,7 @@ "metadata": {}, "outputs": [], "source": [ - "planner = StepwisePlanner(\n", - " kernel, StepwisePlannerConfig(max_iterations=10, min_iteration_time_ms=1000)\n", - ")" + "planner = StepwisePlanner(kernel, StepwisePlannerConfig(max_iterations=10, min_iteration_time_ms=1000))" ] }, { @@ -571,7 +594,7 @@ "id": "50699ec3", "metadata": {}, "source": [ - "Now let's do a more complicated ask that will require planner to make a call to Bing to get the latest information." + "Now let's do a more complicated ask that will require planner to make a call to Bing to get the latest information.\n" ] }, { @@ -593,7 +616,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = await plan.invoke_async()" + "result = await plan.invoke()" ] }, { @@ -611,7 +634,7 @@ "id": "cb40370d", "metadata": {}, "source": [ - "Let's see the steps that the AI took to get to the answer." + "Let's see the steps that the AI took to get to the answer.\n" ] }, { @@ -623,19 +646,11 @@ "source": [ "for index, step in enumerate(plan._steps):\n", " print(\"Step:\", index)\n", - " print(\"Description:\",step.description)\n", - " print(\"Function:\", step.skill_name + \".\" + step._function.name)\n", + " print(\"Description:\", step.description)\n", + " print(\"Function:\", step.plugin_name + \".\" + step._function.name)\n", " if len(step._outputs) > 0:\n", - " print( \" Output:\\n\", str.replace(result[step._outputs[0]],\"\\n\", \"\\n \"))" + " print(\" Output:\\n\", str.replace(result[step._outputs[0]], \"\\n\", \"\\n \"))" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4652ac81", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -654,7 +669,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.11" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/python/notebooks/06-memory-and-embeddings.ipynb b/python/notebooks/06-memory-and-embeddings.ipynb index 4e43e925be17..81e3cc0124e0 100644 --- a/python/notebooks/06-memory-and-embeddings.ipynb +++ b/python/notebooks/06-memory-and-embeddings.ipynb @@ -9,16 +9,16 @@ "# Building Semantic Memory with Embeddings\n", "\n", "So far, we've mostly been treating the kernel as a stateless orchestration engine.\n", - "We send text into a model API and receive text out. \n", + "We send text into a model API and receive text out.\n", "\n", "In a [previous notebook](04-context-variables-chat.ipynb), we used `context variables` to pass in additional\n", - "text into prompts to enrich them with more context. This allowed us to create a basic chat experience. \n", + "text into prompts to enrich them with more context. This allowed us to create a basic chat experience.\n", "\n", "However, if you solely relied on context variables, you would quickly realize that eventually your prompt\n", - "would grow so large that you would run into a the model's token limit. What we need is a way to persist state\n", - "and build both short-term and long-term memory to empower even more intelligent applications. \n", + "would grow so large that you would run into the model's token limit. What we need is a way to persist state\n", + "and build both short-term and long-term memory to empower even more intelligent applications.\n", "\n", - "To do this, we dive into the key concept of `Semantic Memory` in the Semantic Kernel. " + "To do this, we dive into the key concept of `Semantic Memory` in the Semantic Kernel.\n" ] }, { @@ -28,7 +28,7 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { @@ -41,7 +41,12 @@ "from typing import Tuple\n", "\n", "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAITextEmbedding, AzureChatCompletion, AzureTextEmbedding" + "from semantic_kernel.connectors.ai.open_ai import (\n", + " OpenAIChatCompletion,\n", + " OpenAITextEmbedding,\n", + " AzureChatCompletion,\n", + " AzureTextEmbedding,\n", + ")" ] }, { @@ -51,9 +56,9 @@ "metadata": {}, "source": [ "In order to use memory, we need to instantiate the Kernel with a Memory Storage\n", - "and an Embedding service. In this example, we make use of the `VolatileMemoryStore` \"which can be thought of as a temporary in-memory storage (not to be confused with Semantic Memory). This memory is not written to disk and is only available during the app session.\n", + "and an Embedding service. In this example, we make use of the `VolatileMemoryStore` which can be thought of as a temporary in-memory storage. This memory is not written to disk and is only available during the app session.\n", "\n", - "When developing your app you will have the option to plug in persistent storage like Azure Cosmos Db, PostgreSQL, SQLite, etc. Semantic Memory allows also to index external data sources, without duplicating all the information, more on that later." + "When developing your app you will have the option to plug in persistent storage like Azure AI Search, Azure Cosmos Db, PostgreSQL, SQLite, etc. Semantic Memory allows also to index external data sources, without duplicating all the information as you will see further down in this notebook.\n" ] }, { @@ -70,16 +75,21 @@ "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat_completion\", AzureChatCompletion(deployment, endpoint, api_key))\n", - " # next line assumes embeddings deployment name is \"text-embedding-ada-002\", adjust this if appropriate \n", - " kernel.add_text_embedding_generation_service(\"ada\", AzureTextEmbedding(\"text-embedding-ada-002\", endpoint, api_key))\n", + " # next line assumes chat deployment name is \"turbo\", adjust the deployment name to the value of your chat model if needed\n", + " azure_chat_service = AzureChatCompletion(deployment_name=\"turbo\", endpoint=endpoint, api_key=api_key)\n", + " # next line assumes embeddings deployment name is \"text-embedding\", adjust the deployment name to the value of your chat model if needed\n", + " azure_text_embedding = AzureTextEmbedding(deployment_name=\"text-embedding\", endpoint=endpoint, api_key=api_key)\n", + " kernel.add_chat_service(\"chat_completion\", azure_chat_service)\n", + " kernel.add_text_embedding_generation_service(\"ada\", azure_text_embedding)\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))\n", - " kernel.add_text_embedding_generation_service(\"ada\", OpenAITextEmbedding(\"text-embedding-ada-002\", api_key, org_id))\n", + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + " oai_text_embedding = OpenAITextEmbedding(ai_model_id=\"text-embedding-ada-002\", api_key=api_key, org_id=org_id)\n", + " kernel.add_chat_service(\"chat-gpt\", oai_chat_service)\n", + " kernel.add_text_embedding_generation_service(\"ada\", oai_text_embedding)\n", "\n", "kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore())\n", - "kernel.import_skill(sk.core_skills.TextMemorySkill())" + "kernel.import_plugin(sk.core_plugins.TextMemoryPlugin(), \"text_memory\")" ] }, { @@ -90,7 +100,7 @@ "source": [ "At its core, Semantic Memory is a set of data structures that allow you to store the meaning of text that come from different data sources, and optionally to store the source text too. These texts can be from the web, e-mail providers, chats, a database, or from your local directory, and are hooked up to the Semantic Kernel through data source connectors.\n", "\n", - "The texts are embedded or compressed into a vector of floats representing mathematically the texts' contents and meaning. You can read more about embeddings [here](https://aka.ms/sk/embeddings)." + "The texts are embedded or compressed into a vector of floats representing mathematically the texts' contents and meaning. You can read more about embeddings [here](https://aka.ms/sk/embeddings).\n" ] }, { @@ -100,7 +110,8 @@ "metadata": {}, "source": [ "### Manually adding memories\n", - "Let's create some initial memories \"About Me\". We can add memories to our `VolatileMemoryStore` by using `SaveInformationAsync`" + "\n", + "Let's create some initial memories \"About Me\". We can add memories to our `VolatileMemoryStore` by using `SaveInformationAsync`\n" ] }, { @@ -112,21 +123,26 @@ "source": [ "async def populate_memory(kernel: sk.Kernel) -> None:\n", " # Add some documents to the semantic memory\n", - " await kernel.memory.save_information_async(\n", - " \"aboutMe\", id=\"info1\", text=\"My name is Andrea\"\n", - " )\n", - " await kernel.memory.save_information_async(\n", - " \"aboutMe\", id=\"info2\", text=\"I currently work as a tour guide\"\n", - " )\n", - " await kernel.memory.save_information_async(\n", - " \"aboutMe\", id=\"info3\", text=\"I've been living in Seattle since 2005\"\n", + " await kernel.memory.save_information(collection=\"aboutMe\", id=\"info1\", text=\"My name is Andrea\")\n", + " await kernel.memory.save_information(collection=\"aboutMe\", id=\"info2\", text=\"I currently work as a tour guide\")\n", + " await kernel.memory.save_information(\n", + " collection=\"aboutMe\", id=\"info3\", text=\"I've been living in Seattle since 2005\"\n", " )\n", - " await kernel.memory.save_information_async(\n", - " \"aboutMe\", id=\"info4\", text=\"I visited France and Italy five times since 2015\"\n", + " await kernel.memory.save_information(\n", + " collection=\"aboutMe\",\n", + " id=\"info4\",\n", + " text=\"I visited France and Italy five times since 2015\",\n", " )\n", - " await kernel.memory.save_information_async(\n", - " \"aboutMe\", id=\"info5\", text=\"My family is from New York\"\n", - " )" + " await kernel.memory.save_information(collection=\"aboutMe\", id=\"info5\", text=\"My family is from New York\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "await populate_memory(kernel)" ] }, { @@ -135,7 +151,7 @@ "id": "2calf857", "metadata": {}, "source": [ - "Let's try searching the memory:" + "Let's try searching the memory:\n" ] }, { @@ -156,10 +172,19 @@ "\n", " for question in questions:\n", " print(f\"Question: {question}\")\n", - " result = await kernel.memory.search_async(\"aboutMe\", question)\n", + " result = await kernel.memory.search(\"aboutMe\", question)\n", " print(f\"Answer: {result[0].text}\\n\")" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "await search_memory_examples(kernel)" + ] + }, { "attachments": {}, "cell_type": "markdown", @@ -167,7 +192,7 @@ "metadata": {}, "source": [ "Let's now revisit the our chat sample from the [previous notebook](04-context-variables-chat.ipynb).\n", - "If you remember, we used context variables to fill the prompt with a `history` that continuously got populated as we chatted with the bot. Let's add also memory to it!" + "If you remember, we used context variables to fill the prompt with a `history` that continuously got populated as we chatted with the bot. Let's add also memory to it!\n" ] }, { @@ -176,10 +201,10 @@ "id": "1ed54a32", "metadata": {}, "source": [ - "This is done by using the `TextMemorySkill` which exposes the `recall` native function.\n", + "This is done by using the `TextMemoryPlugin` which exposes the `recall` native function.\n", "\n", "`recall` takes an input ask and performs a similarity search on the contents that have\n", - "been embedded in the Memory Store and returns the most relevant memory. " + "been embedded in the Memory Store and returns the most relevant memory.\n" ] }, { @@ -191,7 +216,7 @@ "source": [ "async def setup_chat_with_memory(\n", " kernel: sk.Kernel,\n", - ") -> Tuple[sk.SKFunctionBase, sk.SKContext]:\n", + ") -> Tuple[sk.KernelFunction, sk.KernelContext]:\n", " sk_prompt = \"\"\"\n", " ChatBot can have a conversation with you about any topic.\n", " It can give explicit instructions or say 'I don't know' if\n", @@ -218,8 +243,8 @@ " context[\"fact4\"] = \"where have I traveled?\"\n", " context[\"fact5\"] = \"what do I do for work?\"\n", "\n", - " context[sk.core_skills.TextMemorySkill.COLLECTION_PARAM] = \"aboutMe\"\n", - " context[sk.core_skills.TextMemorySkill.RELEVANCE_PARAM] = 0.8\n", + " context[sk.core_plugins.TextMemoryPlugin.COLLECTION_PARAM] = \"aboutMe\"\n", + " context[sk.core_plugins.TextMemoryPlugin.RELEVANCE_PARAM] = \"0.8\"\n", "\n", " context[\"chat_history\"] = \"\"\n", "\n", @@ -232,7 +257,7 @@ "id": "1ac62457", "metadata": {}, "source": [ - "The `RelevanceParam` is used in memory search and is a measure of the relevance score from 0.0 to 1.0, where 1.0 means a perfect match. We encourage users to experiment with different values." + "The `RelevanceParam` is used in memory search and is a measure of the relevance score from 0.0 to 1.0, where 1.0 means a perfect match. We encourage users to experiment with different values.\n" ] }, { @@ -241,7 +266,7 @@ "id": "645b55a1", "metadata": {}, "source": [ - "Now that we've included our memories, let's chat!" + "Now that we've included our memories, let's chat!\n" ] }, { @@ -251,9 +276,7 @@ "metadata": {}, "outputs": [], "source": [ - "async def chat(\n", - " kernel: sk.Kernel, chat_func: sk.SKFunctionBase, context: sk.SKContext\n", - ") -> bool:\n", + "async def chat(kernel: sk.Kernel, chat_func: sk.KernelFunction, context: sk.KernelContext) -> bool:\n", " try:\n", " user_input = input(\"User:> \")\n", " context[\"user_input\"] = user_input\n", @@ -269,7 +292,7 @@ " print(\"\\n\\nExiting chat...\")\n", " return False\n", "\n", - " answer = await kernel.run_async(chat_func, input_vars=context.variables)\n", + " answer = await kernel.run(chat_func, input_vars=context.variables)\n", " context[\"chat_history\"] += f\"\\nUser:> {user_input}\\nChatBot:> {answer}\\n\"\n", "\n", " print(f\"ChatBot:> {answer}\")\n", @@ -308,7 +331,7 @@ "\n", "Many times in your applications you'll want to bring in external documents into your memory. Let's see how we can do this using our VolatileMemoryStore.\n", "\n", - "Let's first get some data using some of the links in the Semantic Kernel repo." + "Let's first get some data using some of the links in the Semantic Kernel repo.\n" ] }, { @@ -318,21 +341,22 @@ "metadata": {}, "outputs": [], "source": [ - "github_files ={}\n", - "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/README.md\"] = \\\n", - " \"README: Installation, getting started, and how to contribute\"\n", - "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/02-running-prompts-from-file.ipynb\"] = \\\n", - " \"Jupyter notebook describing how to pass prompts from a file to a semantic skill or function\"\n", - "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/00-getting-started.ipynb\"] = \\\n", - " \"Jupyter notebook describing how to get started with the Semantic Kernel\"\n", - "github_files[\"https://github.com/microsoft/semantic-kernel/tree/main/samples/skills/ChatSkill/ChatGPT\"] = \\\n", - " \"Sample demonstrating how to create a chat skill interfacing with ChatGPT\"\n", - "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel/Memory/Volatile/VolatileMemoryStore.cs\"] = \\\n", - " \"C# class that defines a volatile embedding store\"\n", - "github_files[\"https://github.com/microsoft/semantic-kernel/tree/main/samples/dotnet/KernelHttpServer/README.md\"] = \\\n", - " \"README: How to set up a Semantic Kernel Service API using Azure Function Runtime v4\"\n", - "github_files[\"https://github.com/microsoft/semantic-kernel/tree/main/samples/apps/chat-summary-webapp-react/README.md\"] = \\\n", - " \"README: README associated with a sample starter react-based chat summary webapp\"" + "github_files = {}\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/README.md\"\n", + "] = \"README: Installation, getting started, and how to contribute\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/02-running-prompts-from-file.ipynb\"\n", + "] = \"Jupyter notebook describing how to pass prompts from a file to a semantic plugin or function\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/00-getting-started.ipynb\"\n", + "] = \"Jupyter notebook describing how to get started with the Semantic Kernel\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/tree/main/samples/plugins/ChatPlugin/ChatGPT\"\n", + "] = \"Sample demonstrating how to create a chat plugin interfacing with ChatGPT\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel/Memory/Volatile/VolatileMemoryStore.cs\"\n", + "] = \"C# class that defines a volatile embedding store\"" ] }, { @@ -341,7 +365,7 @@ "id": "75f3ea5e", "metadata": {}, "source": [ - "Now let's add these files to our VolatileMemoryStore using `SaveReferenceAsync`. We'll separate these memories from the chat memories by putting them in a different collection." + "Now let's add these files to our VolatileMemoryStore using `SaveReferenceAsync`. We'll separate these memories from the chat memories by putting them in a different collection.\n" ] }, { @@ -352,15 +376,15 @@ "outputs": [], "source": [ "memory_collection_name = \"SKGitHub\"\n", - "print(\"Adding some GitHub file URLs and their descriptions to a volatile Semantic Memory.\");\n", + "print(\"Adding some GitHub file URLs and their descriptions to a volatile Semantic Memory.\")\n", "i = 0\n", "for entry, value in github_files.items():\n", - " await kernel.memory.save_reference_async(\n", + " await kernel.memory.save_reference(\n", " collection=memory_collection_name,\n", " description=value,\n", " text=value,\n", " external_id=entry,\n", - " external_source_name=\"GitHub\"\n", + " external_source_name=\"GitHub\",\n", " )\n", " i += 1\n", " print(\" URL {} saved\".format(i))" @@ -376,7 +400,7 @@ "ask = \"I love Jupyter notebooks, how should I get started?\"\n", "print(\"===========================\\n\" + \"Query: \" + ask + \"\\n\")\n", "\n", - "memories = await kernel.memory.search_async(memory_collection_name, ask, limit=5, min_relevance_score=0.77)\n", + "memories = await kernel.memory.search(memory_collection_name, ask, limit=5, min_relevance_score=0.77)\n", "\n", "i = 0\n", "for memory in memories:\n", @@ -394,9 +418,106 @@ "id": "59294dac", "metadata": {}, "source": [ - "Now you might be wondering what happens if you have so much data that it doesn't fit into your RAM? That's where you want to make use of an external Vector Database made specifically for storing and retrieving embeddings.\n", + "Now you might be wondering what happens if you have so much data that it doesn't fit into your RAM? That's where you want to make use of an external Vector Database made specifically for storing and retrieving embeddings. Fortunately, semantic kernel makes this easy thanks to an extensive list of available connectors. In the following section, we will connect to an existing Azure AI Search service that we will use as an external Vector Database to store and retrieve embeddings.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from semantic_kernel.connectors.memory.azure_cognitive_search import (\n", + " AzureCognitiveSearchMemoryStore,\n", + ")\n", + "\n", + "azure_ai_search_api_key, azure_ai_search_url = sk.azure_aisearch_settings_from_dot_env()\n", "\n", - "Stay tuned for that!" + "# text-embedding-ada-002 uses a 1536-dimensional embedding vector\n", + "kernel.register_memory_store(\n", + " memory_store=AzureCognitiveSearchMemoryStore(\n", + " vector_size=1536,\n", + " search_endpoint=azure_ai_search_url,\n", + " admin_key=azure_ai_search_api_key,\n", + " )\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The implementation of Semantic Kernel allows to easily swap memory store for another. Here, we will re-use the functions we initially created for `VolatileMemoryStore` with our new external Vector Store leveraging Azure AI Search\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "await populate_memory(kernel)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can see that our function created an \"About Me\" index and that our five pieces of information have been indexed (note that it can take a few minutes for the UI to reflect the document count and storage size).\n" + ] + }, + { + "attachments": { + "image.png": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhoAAAE6CAYAAABQ/fuNAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAFiUAABYlAUlSJPAAACvLSURBVHhe7d0JfBXVwf7xB7HGqgF5BRVIVYJUAraCKCDFWMFYNFEwgAraPyBWxAW0IrSyVCVYWVrBhUVBaAu4sNQlvFDCItFqqEboWyG4JAGNgiQKJFTBQu9/ztxzk5s9wRwSwu/7+QyZObPcuffOvfPMOWcuDQIeAQAAOHCC/QsAAFDjCBoAAMAZggYAAHCmloNGpubc0FbtYhI152NbBAAA6o0SQYMTPwAAqDk0nQAAAGcIGvXA9u3b/QEAgLqGoFEPrFu7VuvXrbNTAADUHQSNemD9+nVa/8Z6OwUAQN1R5aCRMqqok+jBrGQlDY1XlwtNWVt1iR+spBU5dsky5KVp/qibFds5uHy7Tt01cNR8bdxj55fnYKaSJw1WQmi9CzsrYehEJWcdtAsY+Uq+v7M/v++sTFsWJn2yEsy6N0xXxmFbFhLar04l9ivPzi/moLJem6gh13dXR7OsGTqb5Zcqyy5RG/Ly8rR//37l79vnjwMAUJdUu0bjwEezNfD6UXo9P0pX9+6n2AsbqSArTYtHxWvk8ny7VJht3vK/GKypKzarIKKDYhP7qW+XKOWsn6whg6ZrU8mTf8i2RRpyVbxGL0xTbrOuSvDWS+jSSJ+/vUijr4/X+DWhx2qkhAmjFdtQynhmipYVCy+ZmvP4fC8ItNOw39+nGG+ZkIL1E5Xwc7NfGVKbK9TX235s9HfavMLbr1/crPnF7rrJ94JWTyWMWaSNu85UF/Mc4rsqOqLAWz7Ne5Tas2bNGsVecYUuvzxW69attaUAANQR5v86KfJJYHafCwIxbW8IzP7IFlmrHzDl3tD+Z4EHk3NtaVDu4juC83o+Hthky3yH3ghM7G7W+1lgxMuf2ULrwJbA87f9LLheyccLW6/kYx341xOBxPbevO6PBjYcsoWez/4yyN/W5Y+8YUsCgfyXg/uVOGOLLbFyFgYGm220vyEw/V8HbGFQbvIDgcvNPg1aGCjc4/ceD8SZsltmBTLDHtPI/fs7gUw77tprr74a+NXtQ4sNV13VM/Deu+8GNm7cGIjzxkvON+sAAFBbql2jET34aU2Jb2qngpoOGK4hLb2RzzdqY1g7Qt4L87U4T4rsP0kz+kfZUiuinYZMu8+viSgptF700NKPFXHhfRqZ2Mhb6HWl/t0WeqJuTdK4LmbdGcHfAClI0cQnNkht71TS3e2CC1mp82Zo42Ep9ncLNPLCCFsa1DT+Pg3p5I1sTFHqrmCZcnNlGoYifxyj6BL727RbV0Xbcdeuu/56XRufoB07dqh3nxs0bNidevz3j6tDx466+OKL9Xtv3JRdf31vffbZZ4pPSPDXqUl79uzRvffcrb1799oS6YXFi/Xiiy/aqbKXAQAcn6oZNBqpa+cOdjxcB8X8xPzNUW5YV4209DTv3yj1631FsKCkJh3VsY0dL5SvDW+b9drput5lPZbU8ScdvX/ztWlLeKNFlAaONc0jWzVj0nwlz5mu5D2lm0ykzdqYmi81TFBfE1hKiVLHDiYUpWnz5mCJvMfr4m2j4OUpGv/aVh0sr7nnKOjTp4+emTlTixb+RXlffaVOl1yihg0b6sQTT/THTdmLL76gp55+Wr1797Fr1awGJ5xgasLslFViusxlAADHnWoGjSg1a25HS4jwT+b5yv/Gn/Rk6nP/px1aK/rHfkEV5Spvp/nrBYbrbafLEkOXCRv8JUtpY2svNk7W6HmZihmepJFt7bxCucr93PtzOFkjbWfWksPAeSU6tra8RRN/F+c9+0wtG5Oojp2v0pBJi7S5lvpenn9+G82dO09PP/Wktm3LsKXSli1b9IwXMJ59bq6/jAtNmjTRk08+5f8NGTBwoG4eMMBOlb0MAOD4VO2mk+o7SRGn2NFqiQp2uqxgiG3T2C5bJOqcloq04zq5eLNIMU3aKa6MbYYPHcJCVVT/p7Q6fYVmjEhQTMMcbVw4UQMv764hC7faJY6yBg307bff6sc/vsBvSjFDTEyM9u8v8GY1sAsBAFDL/J4ahSrrDFq6PCTUWfTBlbbAbKufKbs2MD3DFpWyJTD9upLbzQ0sGmTLqtvL8uslgWGmk+egRwPjzPNoX9b+vhGYeJmZNzaw2pZU34HAZxseD3ZKbfuzwMQ0W3wUrVu3NvDb34wJpKWlBX5xdZw/mA6hY0aPDqxfv94uBQBA7XJYo9FaXbqYvg6ZSkkOdXYoIf11pZT6z9uaqt1Frb2/W7VhfQW/zVFKjhbfP06ph9tp2G/Ga+K4IYo+vFUzHppd4ncuOirGdPY8vFqpR/wbVxGKih2jpDvMfuZp0/tH/wbX1A2p/u9mPPHHP2jOnGf9wYx//fVXevPNVLsUAAC1y2nTSYfe/RTj/c1aMFZJa0p0aMhL1uj75iurjLtOOtw40O98ufmJEZr6dumOEOYHw8aPMr+PUSTr2RFK2mjuVJkQ7JfRaYxGD2gqfTBdDz4bHgQaqW//W7w4k69lE+7V4mI//hWU9/Zs3fl4ip3yplOTlfp56eXyC4JlUWc18/8eLV5AVGrqBv3onHP0578s1LnnnecPf/rzX/yyN9avpyMmAKBOaGCqNey4x/w38fGasa2dRr62XMPC+hOaXwYduaJ0eUhwvpTwxDZN6WULPRnP3qyBT2yWOSVHRnfVFeaOjl2btGFjphr3vk+xW6drcRmPV7BqlOLvT5aJGRFNO6hL7Plqplxlvr1Jm3flSxfep+QldwZvLd02W337TVdGk1s0+43xRbfM7knWyPhRSsn3tv/X4tvPmJWovk8G+1eE9iviYI42bdykrDxvbxOf0tZJcf78LG/ZBG/ZyLM7qGM3ux+padpslvP2Y9mLd5a4s8Wtjz/+WNnZWbr66l/YkuJWr/6b3xk0Ovpo3XgLAEDZnHcGjbnjRaXMG6OEi5rqu6w0JS9fqg27mum6x1bode9EXl5dQGSvaUpZMU0Du7VW5Debleqtt2x5mnIaRithxFNKXmhDxuGtmjPe/Lx4U/X93cjiv8vRJEFJ91/hL1OyCSVm+HKl2v3SjuB+LVu1VflNOmrghBeV+mgwZBjNrhzo70fEv8P2w1uu76gFSj3KIcNo06ZNuSHDMPMIGQCAuqBEjQYAAEDNOQq3twIAgOMVQQMAADhD0AAAAM4QNAAAgDMEDQAA4AxBAwAAOEPQAAAAzhA0AACAMwQNAADgDEEDAAA4Q9AAAADOEDQAAIAzBA0AAOAMQQMAADhD0AAAAM4QNAAAgDMEDQAA4AxBAwAAOEPQAAAAzhA0AACAMwQNAADgDEEDAAA4Q9AAAADOEDQAAIAzBA0AAOAMQQMAADhD0AAAAM4QNAAAgDMEDQAA4AxBAwAAOEPQAAAAzhA0AACAMwQNAADgDEEDAAA4Q9AAAADOEDQAAIAzBA0AAOAMQQMAADhD0AAAAM4QNAAAgDMEDQAA4AxBAwAAONMg4LHjtS5jxYe6Ry21Nv40W4JjzfIFW9T3Qzvhubtfez3dyU54NqXu0LrIM3R3x9N0si1DJQ58pNXL3tKu/9hpT8OW3ZUY92P90E4DQF1FjQZqVOLgdspKPFU9TmqgVqWOrkM6+YwfKHvtp2o3ZbuW59aZjFu3FXylfRExumbwbfp/ZoiP0Un7vtJ+OxsA6jJqNFCDAtq08hP1TT9BScOidfKKrVr3k+I1GkEBZb+Vpb4p/9UtvzpfD0Q1sOXw5b6jJSsy9K2drJLTvCDS7zI1s5MAUFcQNFBztmxXu6UBJf26lRIjg80oZQeNoL3p2eqW3EBPjz9PPahbK86EjQ3SzysLD1VdDgBqCV/vvmzNGzBA8z6xkyWlPKROD621E8Wte6hjufMqk/3cAA18LttO1bDDB3XwsB2vzMGDduT72KdJr36ja/ud54cMI3Fw+SHDOL3TeXr6gm807vV9tsQd81p3uth7rwbM9d7t78+8787euzouN22J/rw6w04BQMXqddAInlwe0jo77UKPxzYp/bGedqruOJg6UQPvW6qcysLGp0s18uaJSv3GTh+pLXs075TTdXf76jSDNFCPuNPV5P09VXqPCsNC4VBBOCxmrebOku562XuvXrhdrWzpMcPUWix9R7kl/9rZ1ZehlQue15+LDUv09pFvsGxmPxes1FY7CeD4VI+Dhjm5tFKvXiu1NsUWHUcirkzSgvg0jawobJiQMTJNCc8nKfYUW3aENmUd1NmtT6v+SfyM09Tt1IN6J8dOV+KC4cuV/r4XGMwwuZVmjq9qDUUrtTrfjsLTXJeEOpf6HUyj9PmK57UkbbedDwA14ygHjX2atzBb95Qz/CHjsLIzvixzXnDYqU12S5VKWatVvXpq0pXXaNX60k0b4VfHA5/LsqUhazW28Kq54hqR8Cp0f5sPrQ02p5R1xf3JXA0MbbeMKvxiV+yh5hh/nbDtmGacKtbSRPaaVn7YKAwZ0xTXxJZ9D9lfHdLFLRrZqepopJgzD2nnl3ayOuJ6qteHWYWvY/mv3yit0ko96JWb9yr4Ps3VvAFm2dBrGf6eF3/fit7Pks0lWXYbpdc55jS7TP3jY6Rt6UU1EH6NRKjGo4KaibKW27ZSf/Y7tO7Ue155UYAJr01xUIsCoM6ptzUa69avVK8rewZPRqu8k0r4ScA7Wfdb11NL7ZXxJM3VzMLffjD9NUZJk+1V8/s9tXbMSjuvClaN0torg+suHa6wK27vRHbjWvU01fdmuxOlubO2+XN8JfZpqkYFT2rn365JZjvPmxOnt2/Pe89r8mPqEVyrUmWGjRoOGbUl+7m5fpj0X4sKXr/F709TL12jqV754l/ZOpdV3us50SxrXssS7/nL3nt+YzCAmFDyoKYFy8PX93w4a67dhvd4vbbZ9+gY1qy1Wp62Uzv8w9ILBCty1DLe1nh0ld4rs7mmnOXaXuPfhvtDW3PSv+uZ3rK79bY3T13tsn4tCk0rQH13lINGYw29tZWeLmd4IKahWsWcVea84NBcHe2WKuRdxc5ddY16xpmJnurpnQTWri+87g2erG8raqtv9avHdNcFdiLlOc3U3brdX9foqUmTr7HjVeCd2CfZdVt5QecCe8UdPCnerqGh6ns/QLS1E6X3qceV1+jD7GBNi79/2V5Yes7bt1ZF26+q8LCRleUmZLQ640S9/0W+naqOfGXsPlHNz7KTlfhwVmJh7cJYPWb7x1T8+pUp/L0o+Z6f7x0zF2Qr2wunrc7ztpjtjdtZ4S4Y/ljhNszjlbfcseNMnW4rpXLTPlBuiwvVLXQrS9vz1Gz/3lJBo6rL+bal6xPF6JLQYe8Hm/3aS60GUK/VyxqN7PVr9WHoStfT47a7pXVrw04CbRVddGFaWqtW1e9rUJbzi2/nglbRdqxsq8YUVdF3MrUohSeuVhp6WyvNnCVNPcKOp6GwMXiQm5qMjtER2pW5v/on2q/26+1/R+iyKDtdicI+Gl74+3DWc8WakMp//argw2fUL7TuxYma+eE2ZZmV4x7T0h5rg/MqumPFe29DWfXYtVt78yPV+Aw7+cU7tonDDKY2o5xQUNXljP3hTSfJ+mR/gfZ9ZecBqJfqYdDI1rp12/wmjMKTzo3P6EPvRDK3sFOoPYkUylJW2M9mlzxBZW+v8umqQiWvsLOzw5pOvPDj3xVhq+j9ofAOCXPFnq1evbL14BHeSmuYsJH6hqPmkvZNNPSbvXpmS3V+liWgdSl7tefiJlVuCirkBYCpvVZqbmGfiYpevyrwXpti63pDYc3Ur17wp/3A8T1e/zrPr3GIUhtbO/HDtglFnUX9oX9RzUWYqi7na3FZiWVv0zWhGg4A9VL9Cxq2GjzUVh8alg5vazuFtlKPHt7480VXp36zhh0PdjAMDyXm7pXwQHBk/GaU8L4ifvOOHff3Kbw/R3HZzz2kma1u16THbi/d36S6Gtq/Na6xxvY+Rf+7dLuWF9iiSuxN3657PjxFSdc1tiXV0+OxaWo16yHv9aj49auU349nlMZWcndSRc0oxzzTeTNtv86/IvjDX81aR3llGyrtrFnV5XymWeWLd7Ty+3+cABxD6l3QMJ1AL+jRs9SVbPiJ3lyhTm1VVFU+VrcX9dEwfTJevlvZhdXwa9WzOn00ymM6JZrbMW+02x0v3V7YR6P0Pvn7ZU58ppPjrFa2ycT0FzHbcPvbIEes/bla1PmQRs3I1LwvK/oBD/MT5Jnq8dohDR167vf4VdCe3mvoBQzv9cgu7/WrkpLvuTfYZpJid7KMkabWxu9wmDtCzC9/lvxrZx+Z4N0ghU0eH5yua8JrIsxjdD1Nn6wIW6asH+mqaDlvXvsW4XedxOia+BjtTwtb9nv9HgiAY0EDfoIcNcsLEe/u0O3J30htTtfY2P9Rt6iTdbIJEwcOaFPm15q3cq/+97+naNqQc5XYjP/npEzmltE3Duvy/t11ti0q05dvacmbDfkJcgB1FkEDbhzar+V/y9OiLd/o7T0B7TJlJzRQjxan6NpuTflv4iu1W5uSV+tfed/Z6XKccJL+56KrlXCRuX0UAOqeOhU0AABA/VIP7zoBAAB1BUEDAAA4Q9AAAADOEDQAAIAzBA0AAOAMQQMAADhD0AAAAM4QNAAAgDMEDQAA4EyN/zJoenq6HQMAAMc7foIcAAA4Q9MJAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcKbGfxn0zTdTteSll1RQkG9LAKBuiYxspP433aTLL4+1JQBcqfGgMeKeu3X/rx9Qq+hoWwIAdUt2Vpae+OMf9OTTz9gSAK7UeNOJqckgZACoy8x3FLWuwNFBHw0AAOAMQQMAADhD0AAAAM7UmaBx4MvduuePW3Vx0jZv8P7+MVvPfHjQzgUAAMeiOhE09qZnq9sL3ypx0AV6f1xbb2in94c21oE123VPutuwkfrkLzXkybf98R1LxhSO16y39cSgMXrpUzsJAMBxovaDxn+/UlKKlHTnuepxRkNb6Gn8P3pg+Nk6PSVHiw/YsiO2Qy/9xgsUg8KG3yz1SqXYEX/R/BHdgosVE1zniTQ7CQAAqq32g8a/8rWp/Rm69mQ7He6Exrqn83+14r3vbMH30UK9Jnqh4k92eLyfzrVzAACAG7UeNDJyvlNMi0Z2qrSzG5+oXftqImiUzTSdjF5i6jbCmaaOcVq1U/q/WUW1H8HyUK1IWFPIp0s1etAMvWSaXrx55daCfGGWK16jElo31V8gqLwmnFC539xTch88ReVh2/cE11taVKvjzwur5Qlb1vCXD23HSVMSAOB4UetBIybqJL2/Y5+dKi3760Ne2DjJTh0t3XT/n5LUq7n00+Gh2g9zYp4lmWlTIzLxUqWPDw8I7yldd/nz7u9qi4r5Qqteke62NSpDWryqh81J/JxL1am5t25hONmhtHSpV5+ymnM86bOU3jm4jYcTpFUzi0JQuoYH983su17VM+EBKv1d6a6ieQ8PmllsunDZtBl6OP1SPRzaT80qI4gBAFA1td908pPG6vZRnpYX2Olw//1az6SfqL6X1ETQ8E704+1V+pFcqae9olXqrcRQiPADwhfaWVijcIkS+lfUGNNCve4qaq6J7dNbzbyTf6pX0rVTC+38zJ7MP33XCwyXqus5wclSOg0vDDLndrlUzXZ+boOGF44K+5oEt1lMp966yd+mnVdiOtfbjgk5L73ynn7aJ2w/O19i5wEAUH21HzROOFU9ov6joc9ka3HOIVtob3edtlsH4loosaz+G9VWoo9GmR1AK7HT1ASEwoppWvlCX3xh51XXOS3U3I6awGBqHExg2LHxXS8EXFp4oq9Q2DaM8CaPh5OPdMdsc1Hoec56z8toX9gwAwBA9dRy0Diod5Z/qmdOOUsZQxtp55rs4G9oeEP80m/VbVAbPd0pwi5bB3QKNU0UDWU3k1TBp19oZ/OWwUBxTj8ltHhXaZ8Gm006dalSzCjGhIyHd/Yu3K+HE0rUaFRZiUBmBjrOAgCOUC0GjWDISFJTLUpsorObnaEHBrcJ/oaGN6y9+1wNPCvsdtfa1vVS/TR91ve43dX00Qg11+zQSzNfLVZzEdu5hdJfeUXpLUJNGtWzY+cXauYFFzvlBZYjqdEwzSjh/T6KK+o4G3brbxmdWQEACKmloFE8ZJxuS+uWc3VTn0vC7jrppvsn9tbO8GaFEndrVKyFejV/1647TqtaDNeU8D4dXpBpnv6emnc+giYdT+yI4WqePM5uf6a+aHFkNRrn9p8c7Kgaeo7ewG+JAACOVIOAx47XCHNiMtXt5TsWQkZtMLfOvqtOfxqpWFsCwJ3Kv6sA1ISjXqOx960cQkYZdix5VTsT+hAyAAD1ylEPGqd3b60VhIwifh+HX/q/XXF3hbfHAgBw7Knlu05g7jiZwp0dAIB6iqABAACcqfGgERnZSNlZWXYKAOoe8x1lvqsAuFfjd528+Waqlrz0kgoK8m0JANQtJmT0v+kmXX453a8B12o8aAAAAITQRwMAADhD0AAAAM4QNAAAgDMEDQAA4AxBAwAAOEPQAAAAzhA0AACAMwQNAADgDEEDAAA4Q9AAAADOEDQAAIAzBA0AAOAMQQMAADhD0AAAAM4QNAAAgDMEDQAA4AxBAwAAOEPQAAAAzhA0AACAMwQNAADgDEEDAAA4Q9AAAADOEDQAAIAzBA0AAOAMQQMAADhD0AAA4KjKUcq0cZqzPt9O1291IGhkas4NbdUuJl5T022RlTUr0SsfpRQ7DdSe0HFaNHS58maNnpemvMN2kWNenlIm3KzYCRV/4g5mJStpaLy6XGhfi87xGv1anp3rzuZ5g5UweLay7DTqt4KN8zX65u7qaD9vHS+fqFR/TtWO07qg/OeQo42vLtWyjVv9qfquDtVoZGr+hMnaXG++tFEvXTVeqevf0rI503TnVd7J74nBir15tjLqxXG7T1n/2qy8b+xkGQpWjdN114/Ssq9jdNvYaZoyeZrGDYmRcvfZJdzJzUhTlvuHQV3w+SKNGDpZaaf205Qlbyl1yRyNuDLSzqz8OK0TKnwOXTXuzW1a/ZuudrqeC9S6TwKz+1wQiLnjnsCIyy4IJM7YYssDgcyZNwRi2j4QWO1P7Qts+OOgQPyl3rJtveHSGwIT1+3z5wQ+mhVI9MpGzFgYGHb1T735Pw3EjV8dyM0ITV8QiLtjYWDroeDigUO5gdVJNwQ6t/e20/7SQOL41wOfheYBZbLH6QPBozEkf+UDgcvNsbfMHovm2Jo6KBB3cfA47XztHYHZaXae8fU7gdnDrw0ee+Y4nfqOV1h628WO/Ro8vlc/4JX1eTTwfGj+xaHPkd0Hb1uh4cGVwXUKHXojMM77jMYMXxjILe/zUtHzt8+jaLvFn3fwOd8TmP6XO4Lrt+8ZGPaX4PeBv99h+5Y48xO/HPWU97mKaXttYHqGnS5UznFa4edudeBBc8w88GhgWE9vfp9ZgcyKziee/HWPBhL9ed5n7YEnAuOKfT4PBLbOtceomT98ViDtazsrXLnPwbD75B/HwfHw52SGcSlmuSo+Vh1Xd2o0Iq9X0tg45cwapxnbbFkxuco/NU5JL/5DG99eoGEtt2rx2Bm2Gioo5c0cDZy1XLNvba2cJfcq9p40xf5hrZaNvUL7Uidq9qvB9rDUSX00clVLjVz6T216baSarRmlO5/c7M8DqiOy1626rqV37KVv9Kf9Y2tBvuKeWKHU9cs1MnqLZgwdocWfezMPb9WcOwZrxr+ideefvPkrnlRcowJ/vaqoseN729+0OTpJK16bo4HneJ+jSbO0Wa01+LkFGtLGm29rbcZfGVy80PoULdsTpSFDb1HThrashAqff5WkKHXXQM1esVzjYvO97c3QMu8lih37lsZd5c1uM0TzvX2bM6h1cHHUTz/7hfo2zdSce4ZpTmqODtpilXOcVuW4y9i4TwkvbtPWv96p6IrOJ6Ym4t5Fyu0yXsvWr9WMTpnaEHZOylk4TH2fyPUea402vTlHsTuna8iEpSr1SS73OZQUq/He8zDPJfVvSUo4xXyvTNNo73gv77GONXWqM2hk/CSNvtL7Mv7t9DKqolsr4Y5b1CG6kSKbdNV1V7eT9uwr9ubFDR6j2OjWir3xasV40zF979PAC5sqZsB1MhVUOV/mev+mKOXlPMUM9Oa1jVBE9C3q203KejvNbAKopkg1MrWh/oEYPLaiBk/Qg7Gt1fTsdho4dri6HE5Tyvo86e/L9PwHURo47SkN6eTNj75CD94RZ1asku97fBf2bWjTT3cOaOc//kDzOfo8Rzne5y2iaTM1MgEi4kxv35sqMiK4eEhWlmlPjlFMh+B0aZU8/yqJ05D7r1C0Wff6WG/a2zfvZBHRpKmamf1p2FjNvH1r6n0Zox6LjNPEvy7QyNaZmjHsKnX9xTDN/yD4bV/6OK3acRfV+1YlNLUTFZxPslYs08bDV2jkY7coxtt+zIDh6muCjS9TK171zhW9hnuPFeXtS9fgZ2j9ZpU6g1TwHIqLUKQ5pr1h64LpSo6I0/gJCd43S/mPdaypY3edNFLfSUmK/Xi2xj27tUQCPKis1ybrzpuv0tVXdlbCk6U70USUuMpq3cpe9TT0vnCDY9LHmdrqfalmPBlf2Klv5Aqv/FBwNlA9Ocr90vsSO7tZ4bEVE34mPvtMeXOUX7BPWds2eVc9MepwSXBWddXY8d3w5KLlfZ/r8yr0sIxu2dL7N0uZHwenS6nk+VeN91yKPc9Mfb7djuL44p1Yh83xruT/Os27yk/T1JuHlV0zVsXjLvLUUP8Io/zzSeYOb7xNjNoVLh6hkwuPSe/4/8D7s+Lews+Xv+7hA8HZJVX1OXgK1ozTuBe+U9zYSUpoYkqq+Vh1WB0LGp4m/TT1d1co45lxWpxpyzwFy0cpYcxGRd01R4tX/EPJI7xkdyTatPayrNRh1BptzdhWNPz1zuB8oBoKlizW4j2tFR/nfcl5x1Y77wspIyPsimPXbpl6hmbeVVj0OdHeWIbCZxdzuCha5+Tm2LFqquD4No/+vXTqqi7eiX/Z3JTS1cRGJc8/5KB3UggKhjSgIhFtEzTx+dGKPZymZSvCTgohVTzuwlV0Pok6y/sEeeEls/A4LfACix31PkWt23qhpf+c4p+vjGmqqG6y0uewJ0UTH1kqxU9SUnwjW1j+Yx1r6l7Q8ET2n6qJsVu9N6QoZebmBWNgZERj7yDaoGXriuZVTxfFeW/k5gXjNCc1U3m78pSxaqLmr7ezgYoc3B08ZlKTNX9ComInbFDMiKkaeZGZGae4G5sqZ8GjmuofW8H+Dxub9lPfXt6Xh99mm+OtN07JH+Qp74MUJT1rbtFrrWiTDlKe14yNecpKnVzY36L6vs/x3ViR3sdLGWlK3ZVfokbRc/YtGu19Ied5V1jxN3vbfC1Zyd6weNYojZ5nvjwref7ek2ztnRBSFkzXxl2ZSp02S8uq8TQjGnnb+HiT0rLywsIK6qU1kzVw0nylpHvHUdZmLZ62WKlqqo4XmQ9KyeO0kuOuDBWdT2K6dPUeKUVPPpKiLP84fVTzC2shWivu6nZeUJmi8a9t9T9fWenzNWNJGeGhwucQLl8pkx5R8jdXaMjtHXXQ26bZbsHBajxWHVcng4bfhDJ2vLoUVld531H9R2rghTmaM7i74h7w3q4OR1ij4W07btICPfiTXD1/V7xir+yuwU/lKCLKzgYqsmaif8z0vcv78vkoWiPmvaVlw4uOxdixL2pK4kl6/X5zbCVqxq6fa8qiJMWZaljTZrtomvqeslqj+3dX7KDJymwYvOKKuz9JCS0z/eN7wPMR6jugNo7vprru5n6K2rVId17ZWUlrbHGYmOHLtXraLWpd8LqmjvEChjfMeDlHzdoEn0eFz79hnEY8mqCorNkactUAzT+pnwYWtn1XLvbG29TllA3eFV93DfSDDeqtZmcqYuM8jb7VO47ib9aMtyPVd9qLGtfFzCx9nFZ43JWhwvNJl/GaM/YKv8kiwRynDftqYFs7zxN9x7OaMSBSGx7xLjTMd8F9f9N3Z5VRc1LhcwizcYYmrsiTvtmgqTd43wveNs0w0bs4qPJj1XENzK0ndhwAAIQ7vEHjLx+m1b3maOMEL4Cg2upojQYAALXg4/kaPWGRNmflBZs8JkzRsj2tNbA3IeNIUaMBAEDIrhSNv+8RJf8zz++nFBndVf3un6YHryq8NxbVRNAAAADO0HQCAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcqfH/vXV/Qb4dAwAAxzv+m3gAAOAMTScAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAmTodNMau3q2+Cz+zUwAA4Fhz3NZoLNq8T11nZmnvgcO2BKjYzJnPqF1M28JhyOBBKigosHMB1LSdO3fqml6/0OjRD9qSoMo+i+WtV9L76enqdllXf3slhT9GeZ/1VStXFnuM8Mc1y5v1wvezrMc5HtB0AlTDPffeq60Z2/xh/oI/KTIy0s4BUJPMSfm2IYPV9bLLbElx5X0WK1svxISBp556Uh0vvtiWFDHb2L59e7U+6yZYPPTb3+i666/XlClT/bJTTj1VCxcu8rexdt16rV+3zg83x5s6ETRME0mD327xhx89/pG27/mPnRNkmk9C801NRLjweeHrbsj+t+LmbS+ssTB/zbQpN49360s52vjZt2ryyDZ/m6a8/ROfaPCSz/1tNX44o3DZ8OkQM27KSj4u6i/zxRPdKtpOAXDprrvu1spVf1OzZs1sSZGKPosVrRfOhAETIE71wkA4Uythtj9+/ARbUjUTJz6qSzt39h+/LKeddpo/7N6925YcP2o9aJgAkO8Ne37XVoHft1fnqB/qgRW77Fxp+ZZ8JV7YyJ/3xh3naczKLwtP+CYEfJ7/n8J1H+91lm5+4bNKm0MmXX2mFt4UpS4/+qG/7i0dGvvlOfv+o5aNf+Bv655uZ+jnz273y0PT5rHNtk2ouOuVnXpt0DnVelwc+3796/v9KlBTPWq+kADUDlefxX9u3qx///vfGnHvPYVNHqaJpCKh5pPyQobx8Ucf+TUcP+ve3ZYcP2o9aJx+ckM9dX1z/69hQkW4xPaNCoPAFa1O9cPB6o//7Z/sX/FCyORrzipcN77taYqMOEH/3HnAn66uKC9k/OrSJv741W1OVbszI4pNFxz8r/Z++1899+4etW0W4e+P8bNzT/H/mnmov8wVUKgq1VSPmmpS+mgAR5/Lz2JWdpY2vf++7r13hL990/Qxb97ccsNM8uuv61//93+6//5f25Ii33iB5dZbb/HDivl7XcJ1x2Vza51oOjFNF6HmD9OkUZG23sk/xISKc08/yU4FQ0ujiIbK2XfIlrhjalpC+9xqykfK2H1QO/Z+Z+eivvvlL/+ff3VirlIA1B4Xn8Xul1+uizt18sfb/PjHfpOHqekoS8J115UbdsL7aJhh3fp1x2WH0FoPGiZkPPX2V4XNH6ZJoyLbvBO6qWkwTA1D+Mndb4Y5eFhRjU+0Je48dGUzf39Dw76HYwprOFD/7d+/X7u//NJOAagtNf1ZPJJ+WKbJpNmZZ/rNLRXVrPS4soff/+N4U+tBY6sXHFo2+kFh88fyD/L9vyFrPtlf2CfDhJJ/5HzrN1Wc1+QHfvNFqN+EsWLbfj98XNT8ZL+m44v8Q4XNKFNTv9I/PvvWH/++TDPKn9L3FuscivrNVJv+Ydo0OyX99a/L/ascc7UD4Og50s+iudujf7++lfbnuKhDB78pJNQv4+9vvaVdu3b55RUxzTkmbJhOoeUxNRrnnXeenTp+1HrQeDD2DL9DZ6gZoqTOP/qhHybMvLte+UILb2rphwxj2a0/8kOKuXPEzDc1I6tuO9cPLWaZPu0b+R06zTzT4dRsK8T05zBCd51Uh6m5MH1DQts2Az8sVv+tWZNS2Dns3X/8Q08+9TS3twK1wOVnsXnz5po06TE9+ugj/vZnzJiu5+cv8MsrY+5Uyd292//9DFPTEt5HwwxGRR1G66sGAY8dBwAAqFF1ojMoAAConwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcKbG/6+Tr/N22zEAAHC84z9VAwAAztB0AgAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwhqABAACcIWgAAABnCBoAAMAZggYAAHCGoAEAAJwhaAAAAGcIGgAAwBmCBgAAcIagAQAAnCFoAAAAZwgaAADAGYIGAABwRPr/EuE4d8kNtv0AAAAASUVORK5CYII=" + } + }, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![image.png](attachment:image.png)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And we can see that embeddings have been conveniently created to allow for semantic search.\n" + ] + }, + { + "attachments": { + "image.png": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAqwAAAJ1CAYAAAAc86LXAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAFiUAABYlAUlSJPAAAJaXSURBVHhe7d0LfFT1nTf+T+y/25bHkgckqzJsSQiSgLC1w0PkqcAGFEeFhxAuLqHVRXuJJip33NB2XauSVe7iBLO1SlGByjUUlBGVlEsfjGW0G0oSTEjaZRAahA3t0nb3qfP//c75nZlzzlwygVzOJJ/363V0fufMucwl4ZPv73fOSQkKICIiIiJyqGvU/4mIiIiIHImBlYiIiIgcjUMCutR/48zRCvx03y/wmwv/Jdp/BXy+FwaOn4kH7/4qUj+nP4uciJ8dERFRZ2Fg7Sp/aca+dStwzDUb3536VaR9Xs2XQajyJazZ/3n8/T9+GyO/rGaTc/CzIyIi6lQMrF2k5ieP42cDl2JJbqqaY9O4Dd97GfjuUzMwUM0iZ+BnR0RE1Lk4hrUrXHgTO343EYXmwHPsJTzz5hnVEDJm4Lt/+wF+WnlZzSBH4GdHRETU6RhYu8Dlj34FjPw6LPW5/9kfA/v9D9XQDRwzCi2/PqFa5AT87IiIiDrfFQbWRvy44GsY6bZO39uvFie7+pcw270U76lme2v+9D/Qq1cv1VKaP8a//eY/VEO5sT/6Xf5P/LdqOt17S83fhwL8uF7O1b8r3eW70V0/OyIiIie7igprNore+BDH/Ma0AnhcBJWl76rlFEuv/9EL/51Ikrl8GZe/8HmEzulxMBlWF2NF+Pvwxu0yq3Y73fGzIyIicrp2HBJwO54RofWufS+pypr0Lr5nqsBGhlnrcq0KJ6ubBS+Zso6s0BnVOvl88fhHS63r7Le1TcxVv9k/MraqtrNfVlLVcuPY5Lbu9aIOb2GxaZ3o27kyacNvxuWaX6pWbJerPsB/D75ZtZzvrvEipBoGfxvfmijf52koqwP2yT9mTJ+rtRprrmZbP2P9vbZ+T6zvv3mZ2I78/MzfM61ablquZl+p7vrZEREROZq8SkDbnQq+NGtW8KWPVdPk3ZJbggX/eko8ks8xHuvCy6R3gku/dktw6duq+fGPgi/Jx+L/BbN+JNY2mPelr+MueUdbEny7JOiOaJcE39VbwVP/Oiu8TB2Pvj+1ndB+Io+lIOZ22sN/Bt8qXRJ8Nfwig8H/+s/gf/yneixdOhJctXhN8P+a5zmZ9t5H+06Y33ed9n6aP2O5ru2zsLzfYrn1szH2Y9+2fV3ZNh2TZT9Xqht+dkRERA7X7iddZWRk6w/2/whlKMYz38nQ28KEB4uB997Vq2z738W+u1bgmYnaIlWRU4/jykbRg6qSN/F23BXRbkSjqsa+tB7hZcjAhAnZaGwyqnNivae+LeZKt+P2u2BaZpWRLp7VKLar2levF+4qzkPzC9/Ha//Wos/6fC+kGkMjL/wS5U+9if4Pz8Vo23BJx5q4TBsG8O69spJpVMSjUZ9L6L0XJn4HRWL+e6F1TJ+pJLYd/p6IzypLPVbfsW+Hvje345ln71aPxUf2o5fEd0x8rwarGfL7UXfqKj/HbvjZEREROVy7B9bGxlo94EkZGeFQIg0WbRUYZDjMyhikz+8wtSjTApQ+zVhfi7rGU2qZVShoRyMC07YJ72KG3I5luMJV+PLXMf9finDjkeWYt+RJPLd+PV4Q03M/XIAlL3yEoQufxt9b3rwkIP7o2KTGr8rgGvtEK/E9MEKkRrQzanEq5htrPslPH2IQYv+O2e1bFPr8R7oXYZ/Ylv4HzVXojp8dERGRg7VvYK1/CS/tuxu3GxUve1WyXrSzBoUCRqzw2H7uxvLQSWFqWmaq3LVBxnc2a+trwbW9Tiz7fH/c/vDTWFO6GIUzZ+DvxVS4aBWe+6dvY9z16jnJSAbXZ+/GvgOx3id7aBTtxmwMihryZFidhlMPGp/hDhQZFVbJ9h2zV8mzHt4R/uy1aXO44no1uutnR0RE5EDtF1jlyS33epHx7DJMkG2t+9WL75lOkHnvZS8w4XYtsGaMvx1Z+xaFq3Bi/R/Lx1oVNtw93PijpdaKWsJkN/9bWNzOVy1o/+EBwud6IbVfGtLEFOpaTioiVC61Vp7fO/CWemQnP5dalP3A9Hyta/92TIgaJE/hVJ0pzNa/i3eN74P6jr0UquTK4Qa16rH+HcP6pXGGJ7SDpP/siIiInO8qAqu1u33kvafwbf+H4bGGcjyhfwUy1k8LPeeljB3YZIxplVW4N4rRKM8e19YXwVJbdDu+/TBC2/4evm2tqLXBhGU7UNRo7hKON7bSRBzbt2XYFevIM9Ibf1QQ3sbjwPLNpvGXJGTgW+NP6UMm1KRd4kqrZotlD95tuUrAhGUfYnmGN/z8lwdhW8z3VI5LzQh/135wChmh74NYZv4Oud/F7aYxrHql17SunNr5DxgiIiLqeCnyzCv1mCjpyT8uvodl4T+MiIiIKOm1+0lXRF2m/iV8bz1w+3iGVSIiou6EFVZKYvKmAfLM/7C7njUPSyEiIqLugIGViIiIiByNQwKIiIiIyNEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0RhYiYiIiMjRriKwNqA8PxvDhkabpqH8Y/U06nHO/+JFPDA2G9PXN6g5yr5FUb4ralq0Xz3JKua24mh1nd/uR/miWRg3Uu17eA7GzVqDj9TimELHvwgRR1u7BtPVa5m+/oSauR9L1DzLJPY3+VtPYdPxS+p5REREFA8rrNQ+/vJn1Ox7EUvyv4px31qD98+r+VfiSraV4Dq/3/d93HnPo1i79yOc/3Nv9HMNQL9el3D+V6fRrJ7TZhdFMP3Oi6gRD/tNWocNDw/T55t8od8ADBD7GtDvC+JYL+HUL17H0zPvwQ/eYWglIiJqzVUEVvEP/Dn5/2GYu7sWJ2rM0w4U3qQ9iXqI/Y9/FdPnr8Ge2j8Dn1Mzbc6fVylShDrr90VMKybqy4REtmWX0Dofv4g5i7bh9F/6YeI/7cCHv6rCwXfewcEqsf9jz2Ccelqb/OUEyr/7KPbIlzZ8HsqfnYgv60ssJn7vHbwt9vX2oV/hxKEVmNxPzj2P7a/9TPyXiIiI4rmKwPpfuHRRPSTq1Q+3TJqHtXt3YG6MP1Yu/T7BamIC24rQ6jqXsP35Naj5CzD04X/F2oJh+II52Pb6Ar6gHrbFwWe+i7XHxYN+k7H2Xx/C0EQCtnjuAzMz9cctvxdHRkRERPFceWD9fYv6h3YQBrGa2uNN/OFhbFrxECYOih37Lv2n/o0ZmjlI+38siWzLrtV1fv829r8jH/wdZj8Q2WV/JX6/bxG+v/k88LlhmPujFZjYRy1IwOnfqvG1A/ohTX9EREREMVx5YD17Xo3524O56mSSr42dhAee2YNTf9YWEFk0nz2t/b/m+Umhk4/GzVqEV37RCZ3i73+Eg/L/2V/DgBMv4qFJOZYToPb8VntWwv5cK4cX7MF59MPkFRtQmK0WtEaOtd38KJ7aKx7LoFs8I+oQAiIiIgq7upOuPqeftDLA1U/rTv3z+Qa8/9oiTP7mi1rXK5GdcfJRP5nS/iJPdtqD5d+aiiX7OrZj/NQpdeb+udew+FtrcPDsX+nfW3UC1JL8R7E94SEuNXhxoT68YNB3/xXP3dVbzY9tz3zjCgFfxfQf7gdGPoC1b+5IPOgSERH1YFceWG96CNuP6yetvP3OYXx4/FfYtGg0tHNJjq/Bc7KrlMhk4opafHhIP/lInuj04TvrMFsbHXAee364Vq+AdhTjD6iL4nt51wqx/8P699Y4Aeryfjy37uf6c1o1QIRP7ZuOUztexv4Egm7oKgE36OH2/LFXMPeeO/DQa8YlsIiIiCiWq6uwmn3uC7jlWxvwg0l68/2PPtQfEMXwBddEfH/NPAyVjYsfoqZTrt37d5j7xGT0M06O6jcZD83Wx7T+/sManNIetebLmPjsM5gtM+v5PZj73dZ7FEJXCThQhRPHD+OVb4l9/uU0Dj6zGGt/pZ5EREREUbVfYFUyM9vnhBbqIW7KhDpfvkMN+opLPfoyetsGjQ7KiH8SWFSf+zt8/0cibMvge3wNCh/fj9/rS1r3uX64ddHDmKw1GnDwF4nfFIGIiKgnaufAegkfnlBdnFdyjSDqeX71a4Q6xRO5JNSVusWNW7QHp3DKVsmtOSkv+S/c2MYz9rMfwoYVk7VhMOf3Poo5oTtcJeAvfwbPTSQiIkrMFQfWU7tf1C/SbvjLeby//jGs1S4d1A+zPeELwRMBR7Fp/c9x2vSV+fOpPfjB0hf1bvhbp2NSooXOj1/BbHlb1Tu+n9D4UY1rOqaPlw9OYO3SNahRx/HnU69j7Ra9wjnxjjvbfMb+l+/6J/xgmj6eteb57yZ28pj8WVn9srq9a28My+6MGjMREVHyuvIKa+Bt7TaYw3LG4M47xuDWr47BA88f1e7aM6jgGcy9otsGUff1e3z0fCHuvOWrGHfHHbhz7FfxtUmLsF2m1X6j8f1//AYG6E9s1al3foaPLosHgW3Y/74+r3W9MX3+47ill3h4/EVMH5mDceNzxDE8hYMi9Mpbqj49rfWz/SP1xsQf/ivmDpePz2PPojkor9UWWOx/Rrxm+brFz8rXhouflR/r1dhBBcuxRAvSREREFMsVB9a0m8fh1pv64Qu/P4/TgfMijvRGv69OxuIfH8aef/o7XluSbAbh1km3oN+X/4zzgdM4ff7P+EK/TNz6zRXY884GzG7D5Z0G3fF/9ODpmoGJt+rzEnLTA9jk24DF8ji+cAnnz14Sx3ALJi/agB0xbqmakM8NQ+G/rtOvNvCXE1j7nUURld8/nxevWb5u8bPyZ3k5OP6sEBERJSwlKKjHRERERESO0+5XCSAiIiIiak8MrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GhXH1jPH0V50SSMG5mNYUPFNHIM5m49rxZ2lv1YIve9aL9qdyUnHUtHaEB5vvqsQ9M0lH8MnFo/DcPyX8Qp9cx4tOfGeY9aW95l9i2yvfZF4hMP0447znJNxDayMX19g1qo2J/TFe9FJx1DW743jhJ6fxbhNctr0H9GluzTGklK/R4LTVG+xw7kzO9S+Hfm9PWvau+rk74bSfvzd4X2L4ry+1bTHX5u2+DjFzFd+9mW/37rP+/Ga3fsv7/Bq/H/KoNPjckKDnXnB7+/4WfBn1X8LPh62cLg4pfq1RM6y9vBxdniOBa+rdpdyUnH0hHqgy9OzQoufks1r1BDWX7c96i15V3h7YXic81eKD5hk7fE9129F7GWDxXfB8v7JedNXR9sUM3gyfXBaabnaK/dvFx4u8za7mhOOAZnkz/n+cEXT6qmRfv8jHQZ7ftoe21vrY/xWruW/JmbVtbZ/960jdN+lyXDe9ahtN/Jtt/Tkva9jzK/DZLnvY3/O8qJ//5KV1dhPVWDD88D4/5xA576h8mYPGUyZj+8As99K1M9gah7kH9xzm2Yhz01KzBRzdPcJb7vd4n/71uEuXsnY22U5SdWT8ae8jgVjJsewkOTxN/3jfpf/Q0NJzD0zokYpLV0Ex9+yNLuaE44Bkf7uAEN4t0YdJNqdycN9ajJvhMTza/trodQ2B1fayfQfpYy+ZPjGHd5IH4jY7+tknrqnbdRM8lj/f3dbZ1CQ+0wZCZZVLu6wPrlL6O3+N/BN15GzZ/1WRF+uwc/mDUGX5Ol55FjMPuZn+P32oJL+Oi1RZg99qt6l9PwHEwPLVMl6aHT8PSPn8J0OdxAlafP/+JF8Y97TmidhyzDD36H91fPwa3D9WUP/OsJNT+Ki/pQBuO5k4teR81fxPy//BxPjxXzJq3BR/ozcfCHY8RzCrH9Yvi41v5iP56eqR/H1zyFKH//knp2pNN7n8IDpmOeXPQi3hfb0qmut/lrsKnwDm3bsns95vEJsd6bLie7SC1dS7bhA/GO09L9HNn9KLtxQss7vfuqAfvfPoHJhbED2/539mDoYw9H/2Unf0HWvo398nNNQGbmMNS8vb9Lu+gSOQbLZ2L+zOINJVDfkXK1rtYFZfneyJ8H8TOwz+iukpP9+2DurjaeG6vLWm1PdXmF1gl9FqobcL3aX+g4bN9d8zryeKesQY34R2+uXCZfn+U1RNJ/Zo1tWY+1a7/bUWQOxtBWvq8xX496H/aHlqv3Tc43nm/7PWDdlrlLNt53Qf985u4Fap6fJOab9mN5D22fY7z313yMYgodh22+9fjbeozq+2YOS+btG++dsQ+t29b6fdHer9AxqP2vV9vQ5tu/u219z8w/K+b1JeP4Y/08xf+uO8NETJwE7HnHfGTqd/wdxm9w63vQ2pCtJftivLdRnmv5/qj33vL70Mb4vC3vayLbiLVf7Tv1qPjtdQJrp4j52mcf5XtpEfv96NTPW1Var9iJV78bnHhzVnDozbcHC57+WbDhT2qBdOHt4GI5ZGDGvwR/3tAcPLHpkeDY7KzgnFf/XSysD7688HvB139ZH2z+5NfB1x+5zdJ1qpWkZdf6jNXBE2qbl/Z/T1t/7IOrgz8T6514a3XwuTdk+V11w9/8t8GCJ98Onqh+O/jUDNk9+w/B1z/R17X4f78OviiXj3kk+Hp1c7Dh5/8SnCZew9gnK7XFxn4K32gJdddqjwX9uEYFc+55JPjiz38dPPHz9cHH5Gv83wuDb1+Sz7AOCbj01kJtW5OWvhz8eXV98MNN3wtOku/XDKN7VT0/+7bg4j3N2pzWji/ae9N54nQlWLq69eeFu0es61m6HOxdNOo9tyw3dU83vPV2J3dNx+v+lVrrArYtt72eaF1U+vCCru1eincM2jJTl1FD2Xr9+LXXYn6vbN8DbbntvbK8H+rnwfY9Cu9LXx5eXy2P2ZVn/HyZllveb7W++fOI2Kdg/4zs3YeW16Cvb/mum7Yf8d03L+v073YM2uu1vy+6Vl+PWM/4vLVlcjvG8ojhBuK9WmjbVmjbrX0X9O+h5fsZ5XMwL485rEU7buvn+6L8/LT5cb7PbT5Gfbnld0HE98r+fpmWC5b329i/aX9yncW2/Vneg7jvmb4983LrZ6Jen+mYtN8FxnL78Yrvs/nYHcP+vlva+nsQ/h0j26bvQJTPTPuuCFHf27b+PrQxfobC201gG63t1/6a1HLL7yzbdyzq+9HJn/dVn3Q19Jvl+NnudSgcJ/6yeG2R+AtlDjbV6stObV2PPedvweI1j2PcoH4YWvAEHroVeL9ij0j0mXhgxdOYPTIT/W4YhtmPfRNDxTpGt6iuN2Y/Og9DvyAfn8fPXtuG865vYPm/zsNksd7Qu+Zh8UxTTfumh/H0P03E0OETMffevxMzjuIjo0xqdmAT1h7vjek/XIfZw/th0LjHxfN74/wb+7W/Dr58xxL8YFo/HHz+h/jB6jWoufUH+P5MWUsOu3ORfM3DMHTcQ3j6MbGvi3tw8H21MKQBm368B+cHiec88wDGDc/ELQVP48X5twDHt2H7MfU06daHsWRSP/1xK8enM783nW/P/PBfW9a/zpWP9+Pt2sl46GHj88lEYeFk21+1uojq5E0PYfljw1RDqa0X76Zu0F3WruqkVLsGk4337x0PTtiGEkxcUSvm7cWdb9v+Wu9EMY9B/IX+ohz+sCJ8xIMefkg7fv2zXG7qPtY/d0u1NnseHpLDKGIahrnPGdVsfX00NOjr7/Nhj2V9sfw58XOgWtGJ7e02vb93PYy52dYuQUv1XH13za8v2jqJiazOD3r4YUze6wv/LDvxuy2HstTUYs+db2vf03BFJYHXIz6f5ernftAdd4rPRrz/heq9vGki7sw+IT9ORXx+K0zbks83vR9xvwut2bceaxE+Fin6sJYGlJfvweTVpu+I+B1UKL5jCX2fr+IYE/rd1yrT+yuJbTxn+r078c5hqGlI6B0L/XyZ3zPt87VV3M3v1cRC8fNn+cxO4ZTxXPF9Nh2Zc9iGBVg+h4jfMRPx0GPA2+/IVxj7uxJN+/w+FCatw3bbv6XxtpHY9zZBcd8PqfM+73a5rNUXBomAWPYOju79Acb95Sie/pfXRbwUH+1J2SX/EZbfYYSbMXhahrr/J9f6M07tfhZLvjUJd94xBl/TutjsBiDtRvUQH6Lml+J/t4zGrZ/T50TIzAz9Mvryl7+sHkU69bE8rkvYXmQcVzYe2nwJ+Muf9CeIMDhx/sO49fwebD8gPuhF3xBHYjYAA0wzjH39WXtdZqdwWr6ooTdDRNSQAS658mk0N+ttTb+/hoqrCRyfZH5vOt/k1TLMqGlnlH8E5Dg4o8vUmObvifKLXLTF9z4zI/wLMoI2DhRqW10R3gYh0/KPrJ343onDt/6xZRZlvJD4BbBHe//WiX/sH43RFSO+ezvFc1YPwtopXdW1FuUYtDGOg8USuxifpdbFbP4H7cqdahTfHtPP+ZXRP6+Yor6+1j7jWORnb/sDT+uOU7/ku/y7Hd+gh3do39HM5yep72grr8fupkz5zsUf62vuuoz678CVSfy7Ems8X0d/nxP43XeFzMNMJj8fZ2icTfT3rLXffyYivG3ffSfell3NYt+xu5i7mgxdw1QBZT/27x0mcoj+OWjvgbmgoN5DPfTH+q5E04HfH7kN9TBS++437vvRyZ93uwRWwxcGfQOTbxUPflkj4qV4f4bIvxRHY/Huwzh4wDT96AGk7ViEyY9vx6WvP4MXX9uFg1tbq5IMwgD5hJpfh8aWXqlBN8nj6ofp62zHdeCfME57xiXsX70e7w/6O4wbpFdJjbG1uku4ZBqyejpwWvy3N3r30tth0Y9Zf37sL33rx5cE5A9HKJSZpohwGz0IyBMVLFTF54T2w9HZ/7DrVYpo1WHDxDvi/PWq/YVqO4klZCKekydlzY8TSLVqQIxA0FnMxxDzF1+cUBc14F4h+x892h9HbaH/Qo8pzi/2tocL+Y+9rPDafg5qdoSrH1363U6EPuZP/1wTeD1tIcNq+eDw74ndrf07kLhBGeI3TUKVzliBrHO+z63+7msjGVZfzNwb+mz2tKFiG/s9a8MJOjLEaPteB4g/bJwaWrVqvuwZsP1+1t6DSetC719o0npc2hDeO/L7E/d3XvvuN/77IXTi5311gfWdpzC96Fm8snsP9ohp0+o5eE4cbL+8vxMxVbzQ8eIL8bmjeOVfXtOuJiBPivpw55N4/aMvoPl8QM4QKfev0PvyaWwrf62Vf3QyMSlPbPXUi1hSuAZ7jjWgZt8aLN+a0DfH6ta/w/R+57F99Rq8HWgRM1pw+sB6rD3QDNnD/vt9P8RTO/4Lkx9djuWPTha/VH+I7+81n1R1Gpue/j72HD+vHcMPNog4Omg2JkekyfAxf/97r+Dg8QZ8tPspzF0tnn/rdEyK9Qu+leNLCrLrD2uwOOr17qy0E3yeXx8ObFqXs3osnFq/KPyPuFax6Xx6t+ejkcMfxD+42g+oCB1rM8VfoVGWD5t/ytRlGIVcd9IezFWD4vcvsoVX+QtVrN2ZZ6THPQatWzd8vNKp9S9qz9eC+/OLTaGrAeVL1gC2Kw5cKb3LeA1eDP1S1Lvo4juBteXmY12sdRXH7IaL8vrk5yivAjGxta67CPKPHWDtkijDZgQnfLcjGN/pEFmBEq9EC+vxX09b2St62pna6vFV0052tP4O2i++p5HHrf4gNf/RKH4HlYv3oGO/z6q7Ps7vPv07YRqKIpYvjlsxtVfX9CEcCYvynuk/L7H+4LaxfHdkuFMPncj4OZ+/x3pFFPkexOn1ivVdiabdvj9715u2sR9LxDHHOwm4Xb+38d6PTv68ry6wpg1A79M/w4tLxUE/vghPb25G5jdXYNMPJ0LrJBfJe8OP52HY2U2YO3MMxo0vwFM//wIGDBQvbeZczB4OHHxmGsbNWoNLX/e0+pf1gG+WY/s/TUa/E69gyTcnYfo/7UfzF1LV0jb48kQ89foKTP9yJZaL7YwbPwmFrzWgnysNuLgfTz2zB+fHL8EP7uqNL981D3NvvYT9z3wPe0Jn9g/D9Hv7YfuDYzB9/os4ccM3sPYn8yzd/gZ5zHuenYHeB9fgoZmTMPtJH74wbQX2lNuHGZjEO76kIbuS9a5EoxshVpeB7Hbc85gIdcbzlkDrrjEMyhD/QKouB9n9iNVXWM25KhPxnPgrUgulxnHKqXxwKPjIMZ/GmL/Q8vni2BOoPk1coQ8NkGMFMzNN70VoG9Yxrh0t/jHIz3Yv5jaIAK+WT35bzhW0Lm45fMBYdxLevnOvafzVVZJ/zWsVaWP7iwE5hk4tjm4Y5mb61PNld5Y4vmjDWEIiX9/VfAby+x3xvVFh2BnfbZvMwWiwdfljda1++TYh3utpq9Afgmo7ixsGtanCKsdPwn5Wdoj8mbX+DprbEA7HZhG/g6aoL3QHf5+191L+sWrs1/a7T3sN5u97xHI78d0ttP58NGRan9/W92zy23diT9yfFxPLd0d/r4zvjfPo4VP+fjCGA+jEe7B7nu1nIPxexfyuCBHvbXt9fyaJvxKXGNt4FA2PtfK+tuv3Ns770cmfd4o880o9pgTISzhMfh6Yu9sB/7AQkfZXvtatHPUfVXk5lvXI5M8rJQmjS7/d/sijpKZljoaHw13wPVi7jmElIupceveY/SYHRElJG3pir/gRkcTASkRJxHoBa6N7jNUoSkqyd8D8fZbj3dkbQBQVhwQQERERkaOxwkpEREREjsbASkRERESOxsBKRERERI7GwEpEREREjsbASkRERESOxsBKRERERI7GwEpEREREjsbASkRERESOdkU3Dkgp+bV6RE4TLL1ZPSIiIiLqHlhhJSIiIiJH461ZiYiIiMjRWGElIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdLCQrqccKq/+3fcPDnB3D58mU1h4iIiIioY1xRYPWuW4vpM+7FDTfeqOYQEREREXWMKxoSICurDKtERERE1Bk4hpWIiIiIHI2BlYiIiIgcrVMD65/O/Q6PrDoB99O1YhL/X9UIb92f1VIiIiIiokidFlj/41gjvr75j5j2D1nwfz9bTMPg/1Yq/vROEx45xtBKRERERNF1TmD97FM8vR94+qGBmHDd59RMIbUvFj58A/7n/tPY9Cc1Lxn9dhuW/MNaHFRNIiIiImo/nRNYqy/hw5uvwz1fVG2za1LxSM5n2PvL/1IzroIWHO/DA8b0/C/UAiIiIiJKVp0SWGtO/xeG9u+tWpFuSP3/cLblKgOrDKs/+AAjn3oVr/xEnx648Qx+oxYTERERUXLqlMA6dMBfwf+bFtWK1Hjh/4nQ+leqdRVuHIXRX1GPhXEzZ2Cgegz8AqtD1dfH8dPfqtnCwedNVdl/3BYKuXL+kq3b9PVC883buQ+rj2ozdUfXhrfD6i4RERFRu+icIQEjUvH1k+ex4/eqbfbZBXiP/X+Y/r+uMrB+ZRRGogL/HDUo/gY//cf1wMOq+vrUKBz7gTHm9Bc4hodVVfZp3CW24d0arss27wlgpFz2LzL8yrBq3k4e+qvnAb/EK1WjQvPTjlVYQjERERERXZnOCazX/A9MGPDf+Ja3EZtO/z81U13masXv8KeJ/TEt2vjWNhmIv/+XV/EA1kdWOI/uwj7kYdpo1Zbh9sYz+EQLlF/H/Me+rs2W2xg9MhxBpbTJUzFOPcbRD/BvIx/G/NB2ZuDvjcf4X3jA2I7a/pkzepOIiIiIrlwnBNY/4//u+C28va5Hzbd645N3GvVrsIpp0rY/4uv/cBNeGPkF9dyrN+4xvfqpBVdT9z4+qcA/G931//B97PskHCh/s/XxUFf+P++JnTJ/8+9nkHajS7XiGYgbrbmXiIiIiK5QBwdWPaw+jX54fVof3JB2HRbOuUm/BquY3i0eiNnXmy5z1Y7GPSa79z/AUaNbfqTR7R+eZKVUhtV//iQvNO+fJ8dPms2fBNQjIiIiIuoMHRhYrWH1f6q5HeboWusJUL/9AMc+UY9Hj8LfHltvXa785hNz1fQ3OHosdoV14K2jkGbezm+34adRtklERERE7aeDAmsnh1Vp9FT032V0+YtJu8TVs/h77aoBX8f8p/LwyXrTcjVcYNxjD+PGPd9X88twpn+cCutXZuA583bEPkxnXRERERFRB0gJCupxwpY/W4rFj5eoVqT/ONyAb/yub+eFVSIiIiLqtjqkwvo/x2RiL8MqEREREbWDTrhKABERERHRlWNgJSIiIiJHY2AlIiIiIke7osDaq1cvnP3EuGYUEREREVHHuaKrBFT/27/h4M8P4PLly2oOEREREVHHuKLASkRERETUWTiGlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiBwtCQLru/ieuwA/rldNIiIiIupRrjCwNuLHBV/DSLdpKnhJzO0E+5d23r6IiIiIqMtdRYU1G0VvfIhjfn1anuHF937EGElERERE7avdhgRMGH836hpPqRbQ+KOCcPV16btqrlD/EmaHKrNL8Z4xz1I1lRXcyGEA2jYffwuo82KGWP97++VcOWTA2B6HDhARERF1N+0UWEXAfPkt3DX+dr25fylmvHc7thnVVyzCbK36KsLlve/i9lBldhkm6GskJOM7m3Hs2buBrGJt289MlMF2EfCssb3N+NZg9WQiIiIi6hauIrDWouzecKUUT8kAKeer8Prgt5GhPc9cfR2EQVm1ONVuIwcykCF20tjEoQhERERE3VW7jGFdfpcIry+buv2FfY8bYVZMshu/sVFE2Qx8a/MKQC3Tu/SvzoRlO3D7e9O07elVXCIiIiLqTtplSMCEZStw176XTONHrSdkadNmo+J6O57R5sng2h5jTmUIltvTg2t7hGAiIiIico52GsMqQuizGSj7gTxxKgMTJkA9jsc0PGBwBjLq3sV7Krw2/mgpyur0x4nj8AAiIiKi7qidAqsw8Tsoghczlr6rnRwlL3Mlz+Q3hgVEntE/De9O2KHGvd6Obz8sQq4aE/s9fBtFWXJ+FMZ+tG1arwe7GCuw6TvGyFkiIiIi6g5SgoJ6TERERETkOO1XYSUiIiIi6gAMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GhXdB3WM2fOqEdERERERB3rigNr//43oqWlRc3pWrXvH8ZWZOIHt96o5nS8Q4cOY+zYMarVMf5w6BU04FaknjyFL31rMq5X83HuMH71/MdIfewBpMuZJ/bgl68fxefvmouvjk3Tn6M5gdrvvY8vGs9T5HZrT96E7G+NwbXaczbhDxiNv3lG7qMZTT9ei/Nps/G/pgxTa7Tdr7bOw8uHx+LBtdPxVTUvMedw4IVSfDpuDWb8rZpFREREPdoVB9bU1N7a45SUFO3/nensLyvxxInLqiW4bkb57QNVo3Ps3fsmJk26R7U6yi/x4f3r0DLhUeTO+V9qnu70hn9A/XuqMXQWbrxxCz7961J8/Z7+wLFXUbn2HbXQMAw3LnscWQPk4zOoKy3BJzVq/ty/xadrz+MrG++DttiyXPdXs9S2E/R/X5yKf3nrHvxjxXfxv9W8xJzGru8/gnOTd6FwtJpFREREPdoVB9brruurWj3TG29sxb33zlQtaj+NeO2he3Hm7z/AkvFqFhEREfVoVxxY09L6dUl11Sl+8pON+Id/uF+1qP3U40cFE/HvcxrxQ4+aRURERD3aFQfW66//6x4dWH/0o5fwne98W7Wo/ZyEN+9/4weHVPPBn+DCismqQURERD3RFQfWG264vkcH1vXrX8TDDz+kWkRERETUUa44sMqrBPRkL7zgxSOPFKsWEREREXUU3jiAiIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgTXZ7XwUKSnXiOlR7FCziIiIiLqTJA+sZ1G2wovBJa+i7BM1q0fZgUemeVG84zMEg+swTc0lIiIi6k6SOrA27N2PVf0GYIpq9zjHG1GDYkzIV20iIiKibih5A+snBzH/MLDAM0jNICIiIqLuKEkD61mUvV4NjJmIop59/wIiIiKibi85A+ux97EKI7B60g1qRg/1cRPeG5+OoapJRERE1B0lYWCtxoJtl7DgG+OQqeb0OMdX4nZ5ZYD94xF8byEDKxEREXVrKUFBPU7YmTNn0L9/F/XFH6vA4G2nVcMmaxzq54xQjY71wgtePPJIsWp1EXlJq3XpOMHQSkRERN1Y8lVYR+ahvrTYNI3DFPTGgsfE404Kq45xUzomHGhCjWoSERERdUfJe5UAIiIiIuoRukFgHYFVpffxagFERERE3RQrrMlseAaGogaNx1WbiIiIqBtiYE1q0/DCjqFYNOIapKQ8ih1qLhEREVF3wsCa7PLXIRj8TEzrRHwlIiIi6n4YWImIiIjI0RhYk9Dx479Wj4iIiIi6PwZWIiIiInI0BlYiIiIicjQGViIiIiJyNAbWHmkvfH99PbZ7TwK1ZdguHvveVIsMav5r2lSSBLd/PYmj06K8ju7gzRLxGczA0Vqg2TsjST4PIiKi9pOcgfWTg5hS4sVg87ShWi2kqyYD0rh/x7DfncM35bQBODatDM1qMXU3RzC/dDZ6lW2FyMRERESOk7wV1utGwFdajHpjmjNCLaDW3YTUMcC1GUOA7IG4FmOROkgtkpXKlz7G4IOlGKrm4J4HMBjvoYFppmsM+ht8SX5m2UBaxk3AmL9BP7XoqvnXoFfpB0h3u9QMIiIi5+GQgB5pCEbvOAfPPfLxJHh+tw2jRRjSvPkK6gc/EG5rhiDzdiDw7km9qXVRG8MF1NACE73bOtZwAr3rPrTcUrkNd+vXLDGeo3eFh+nDGaKvL5mXt7HrvJXXFXffliEUYlqyVy0QtGXyWEzr24477nuWXYTpv1N/QNxTim/uKEKatsDk+ErcnnINbl/elld8BPOPuuAvmQePmkNEROREDKxkUVP5MtJyJ4lH1nAFWdnTiPlzZAVWDRcQ0/TiIWqZIEKf78mbMFIt8zzxsWU4QbP3FWCZse4hDMaTOGgPvHOux4mBh7TnjJxzCPUvG+FPHtOD+MMT+jJtsoW35jmvIFU7NrHtMS/jRETojKWV1xV332LZuCdx7QZjXfEebnjQFnhfxrG/No5NLD/8JPzGeNtW3rOOcxtWF82E5W8TIiIiB0rewPppNTyhMawV8KnZdDVOoqVeHx5Qs0SFMxG8MHigraJ3KFxttdEC74bwcIK04gdEOAsPJ0grLjVVb2Xldiz++JuPVVuZ83IoLA7NFfuv/40W3mTYbR7zBMZZgqRV2gajWhxj23HFfl3x9m0sc2sVa2kSPOJ9++O771hCZ/jYJuErc4A/NOr7au09S8jwhXg3+BneXRwayEFERNRtJGdgvXEcdpvGr/rGXEIxQ2s7kWMl9+K3InANE+GsufFjfGmgUV2VRBg7+ATw5Ngo3dcy8OoV0nD39oPWSqGt69z35CG1IEyv8CqmLvDzvxHPjQjP7SXe60pg360e14P4SijQiiD+nFHBTeA9IyIi6uG6xZCATPdADFOP6Wp8jJbD4n+1v8Ef9BlaUJMnZ8ngqp2kJWljKk3d17ZwlxbqGjcmo7Kod53D1K3ueWKstk4i+g1M/LlXJM7ranXfqgpskO9XW8R+z4iIiKgbBNazKHu9GieyBvHEkaumXz0gTFZa5f9PouFdmK4kEKadtR6id8M3z7GdNGQTCr61ZTgYpcIaS9rtE/ClDQ9e3bVWjROrzCdFRWF9XfH3rS0zj0kV75tfvK60b0c5OSpCYu9Zq67opKsE7XwUKWLbKUU71AwiIqLOlZSBtWHvq6ZrsG7HvqHTeVmrdvMxWnAHXGPUSUJPPCjC1NjwlQNsZ9K/pp2oZB5/uU1VJ03PCZ1ANAnuJ2Q4U/PH/TtcbaiwahXQg0/gD+bu8/Y6OamV1xV33xHL9PG/+lUYWhf/PetITVhbNhu9SmfD7Q8ALTvhjnY91vzxKJb/r228ulBNRER0hVKCgnqcsDNnzqB//xtVq2d64QUvHnlE+2e80x0//msMH36zarUveTkpeYa+9Qx56tlqsHLCzViUvQ3BsmlqHhERUefpFmNYqf0MfVCeeLTUdO3TvfC10n1O3VfN8juQksKwSkREXYuBlaxk9/aGm1A/Lty9DfNZ+9SjDF38DoLBzxhWiYioSzGwUiR5KSnTGeuJjsUkIiIi6ggMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKw90l74/vp6bPee1G6Pul08vuLbnWrrm28r2o7bjnAeqx/djM8/K/dWg8embcadr5/XF3Up/bgeS/wus+2q9vWd+Py0/dgpHu98Vrw/j1ZZ71RFRESU5JI6sPo2GLdnFdOGajWXrs5JHJ1mukWomrQAmrRiBUo5fydWN8rHegD+vHnqlOCn79d6bNHmERER9VxJGljPomyFF8UYh/rSYn2aM0Ito9bdhNQxwLUZQ4DsgbgWY5E6SC3CEIzeIa+/egiDxXO+9MQh7Vqsid+qNd62r1Y/pA8Axg9IE4/TkOESu/hKP31RAmp/21o1th/e2FGA/zamdTnIVkucLPsr1wKuVAwVj4cO+CIw4MtJcdxERESJSs7Aeux9rOonwipD6hXSQ6l+Q4BJ8PxuG0a3JeG8WRKuvo57En9Us3WJbHsHHkm5BilFO1Q7cfmPF+Dtb8iQ2g/z1xXg+bH6fJ0aMhBRIdWDbqRmNAbUw7hkxXM/Vmtd73IYQo3aj1GdNZirtHoXvebQfjXPNLVn9XbsxFC4zv5GPv77cRldiYiIuo+kDKy+6tMYlnYRC4zhAGJacEwtpI4lx6XO+RiDD6o7YR18Al9Si7pa7esfAguMCulEPIcGPBYxxtU8DEC6FukZ6mFc57HkdDb+e34/HNj+ERrvLcAbt/4Jb/0ivP31q2uRsVLf/xu3nse92lhbQQbKUOVWHJfri3huQXJUb4mIiJwgCQPrWTSIjHDi8EV4jOEAMwZg97ZXUfaJegp1mJqXnwSeWNa2imyEaXihA+5Pn/2NiZgfCp/9cPfoL+LA6WatJbvKtceNp/BWwBo0w0TIDFVB7dVTETLvVZVLVyaKLJVd3cPz80P7z/96P+D90+Eqq1L7+iG8NXqs6ThbE2VsrZx4YhUREfUgSXvS1ZQZefCoxxh5KxZcdwn1Z1SbeqbGKtxpCnUjtv9JLTA53YLs+bcg++gp1Db+HrVq7KeItHg+VAUV0/xrsWShPbS2wYBUjFcPQ8TxPXbUhee1IQ2Jsh2XMSXJ+FoiIqL2kISB9QZkin/v68+eVW0iqQaPLWwApoe736unf1EtUycmCTt/AYwfOxTjBwTw1mltVnRjB+Bh9fCKiGB8IBSGpfNYvSqAuyOGAugnj0X6IjK0cbessBIRESVlhdUzYgBOHH4fPtXWTsL6dAA8I1WbOky/gWPxx3ffgd7Rvhe+iJOuEnHlJ121JnTVAFnNtFdYT9di/WnjbHrgrV+06POj2PnsR1jvcuHuhLvuzUQ4feM8xo8eFAqnO5/dH2MogH5C2PpfhK9kW/u6OM7QvtunwrqjSLzf4j1/xD5GgYiIKAkk55CAkXnwjbmEYuOkq22At9Q0RICugnEd1rGoPwz88cmxluuwphUvw2A8qd0c4LW/fgWpB1+GvMhU1xuKoulfxPrVqgK5sAV3myqsmoAIsCpEZn/dBbx/HgeMS0DZzuS/F7e0ORSG9j1NhtOJ6moGgtj2ve8DB7ab9xEebpD/+EQ8d/qj0LIR26/FG+3c5T9tYrH2/5r6cDAmIiJKFilBQT1O2JkzZ9C//42q1TO98IIXjzyih4DOdvz4rzF8+M2qRZSA4ytx+4jFGLrjM7yQr+YREREliaQ96YqIElGDlROuQQrDKhERJTEGVqJubSgWvvcZgkGGVSIiSl4MrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrD3SXu1OVdrdq2rLsF089r2pFnWxZu8MvPbXJZD3Y6pZcj1em1ambgN79Wpf3wl5Fyp5d9Kdz27ugvvxn8fqR407XYnp2Xa861RjFe6Mtk1jvjbpr7296e/rZjx2SM1oVx34npGJ/j7f+fp51bZTn4P2/tfgMfFZxH4uEVH7S8LAehZlK9QtWS3Tqyj7RD2FrtybJdqtWCMnPUS2BxlEjVu9OoUWYM2BTgW99vxHufb1Q1iCTFTvKMB/y+nxoWpJB8rIwdtyXyszMV7NSiZd8p7FE+0PA3lb3w7/4ydGSFTH0zF/LBAROUcSBtYbULSoGPWlpmnGADG/NzJ79t1i2+AmpI4Brs0YAmQPxLUYi9RBatE9pfjm786J6RAGi+d86YlDql2KzogKaRk3AWP+BvIu/P0GjgUGD0SavuiqZX/lWsCVqr2OoQO+CAz4srpf/3k0nf4ixrv+gCZ1f//aXwTEc8Vz2lHN6T9h/OhBap/tzAimXRDosr+Rr4XJ58XH1d469D27YuK7cvp0h1SrYxuK5+f3w4HtH1r2u/ONBhy49ZYOee+t+iFd/JodP0D+NKYhwyV/nuRPKRFR5+gWQwJ81acxbMyt8Kg2tWYIRu84B8898vEkeH63DaPbkggsVdgZOBoqLZ3E0WlinrkbXz1XDjnQu/uvx7ENwB+fHBvexpK96smCDMw7irSQmla8Dd98bpI+36Rm+R1ISbkDK4+rGYkaOxH/vS5HCz9ayLKFu7tHX4u3fiErWOfx1mkXHh4NHDgtX4le3bJWsaLNi0eGYvUwCqNbXZ92YrUKzlr17tkq1S0u5h9SFT5TRU+vDutTmyrCctv2YQLR5sWkvwcRx2xifV1trQTGec+MqqZ5yINluID52MRker/kMd35epVaLl6r9prt68dzrfiu/AHrY77X1n2HPhPtWKO8t8ZrUc2Yxn4Nz7nOh/crtrf+/S/iuXvD3+O473er75mZ8RrCx5v/eAHe/ob2pyTmr+uYP1CIiGJJ/sD6yUF46wageNINagZ1KDnmdc7HGHxQVl3FtOEm1I8zhgvIIPwy0g4/iYNal/9e+OaI9gY9HGsBVKwzco65ciumKKG0S3x9ALKPnhL/oJ9C44BBporyUIy/FVj/C3M38GmsF/9wj2/1H+3wP/xLAiIAb1fhyBzwRIAow1i9y1tM1dOBJatMAeb9BjTeW4A3bv0TlqxuwcM7bsHDgQDeUuvLICHXe0McY5uMHYCHRSg8YAo1O2Vgv3UAEruL61A8rx2zOB41x0K8rse2X4s31OtKvAqbwHsmBRowYqF8P8S25ZCH92vVcjne8iPUThd/oGj7nYjnIJ5rCmcHtgeQsVLMFwHw3jdSUa2tn3jVNP0b2fp3RbXD7Pu+Bdni+LXgmCEr+uEqftuJoHhvuMqqV1ezMT9DXyoD6Qjz+z2/H9avTvQ9M5Pvv/EaJib4XSAi6lhJH1h9vmqA1dVO0/zue8ATy8IV2XsewOAxL+O3oZO2JsFz8AngyVdw1PsKmkVg1Su57Wfo4ncQDL6DhcPVjKvWjEYRjLRgOiCAsjdakPF1a3dn/r3WQCOD3fjpX0vgH3Mj1MlwBLGOEWTyw0EjIwfPa5UrXfbXXRgfaFF/BAiuTBSpoJfYPhM1FEXTv2gK4jU4YKvYXT1rIE5MAu+ZRhyrCJ3a+5ExCHe7/oRGWZE99KEIuv3wcOg9FUFvgS2QmoLew/fqVfe2kd+VBpTZX5v4Y+ctZJo+T/N7rHela0zV1trf/sE0PKUVYyeKP0zOY/2z+23VVRGU37B9J7Xn/kn1GhhivGchp0JhVa+mEhE5Q3IHVq262ht3uVld7Sznf3PI2p3/12NRf1gtNGQXYdwTH6P+yZsw0inV0wTlf/1arD+dirtFmNHGvBq0f9yN8CWDnTkQXS11BrZWRRTTwgYcUEs6mhaOjSAnq8bmit3VkuNqtSqfel2JdHu3hculfU46Wze1GqvckeQfMbXmqrt0ugUHZBXT+CzFNGL7n9RCOQ5UD4i1v2hB9q0IVVv1saGJkfvF+yKETh8b8Vm1Oq403nsmHNjegPUi1N5t+4ONiKirJXVgldXVE1m3oIgnW3UqS3e+mixV1DdL4Ht3AkaK0HqsHS9L1bGuRbr8h9w0zlVz+vcqZPXD3aNVpUwLdol2m7du57P7rWfCd+YZ/aYgLqvGD3+9nWOefD/V63pjgAhy7R1aYzFXqCUZJNXDdiPfu9O1WP1b1Ta4TJ+lManx0vJkv9rf1uCtoyKk3puKRvGeyxPL2nQCkza0IHo4rf2tuZoaf9x0NLKarQ1JWZjoOGYios6RvIFVVVcXeEaoGdQZhuY+iD8+udR0opWdGrf67SIMLV6GwTDGs4bJs///+O47Vxxkr/ikq6uU/Y1sPPx+Le584w/t3G0umK5YsHpV51VYtSrbvf2w/o2dWH86PPSgI2hXZugMamxu+KSoKN3l7UK+d9eK8PkH1RbkvgMNeCzuyW+/R+MA8QePCJ44+iEOnP4iMuSFTq6K/geV5SoCEUMjEiNPSJTDDu5t8x8XNVg54Zou+dkkou4vSQPrWZS9zupql5Bn8WsnWhlDAuRknHQlb0jwoGnc6hCM/rYMuGMt111NU0FW3rwg4ioBXaHRqKK2Rp589ScRJs3dqldPHx/7kepC3o/G0W2psIbPSL/3/fAJSsaZ6aGzxrVhBiKEqH1YqmdayPoT0NbLRxln10/7COvxJyxZKB+HT/Kxn7GunRBkrl53GDkGVj/ZSd/3fiwZcEvHjMkU7122fO9C7PvWJ+NsfTnMRHa7Q6tky3GwIlgHVHX/KsmgWT39D+ozFtNq8YeVMV61jfIfVyeqxbj6Q3RDcc/dE8T/30Pjx/ocIqL2khIU1OOEnTlzBv379+yk+MILXjzySLFqda7jx3+N4cNvVi3qTPISUusHdLcTUmTorUXGSvtJTURttPNRpEyrwYrq9jwpkoioG1wlgKjTHNqPe9v1ZCtn2PnsR+17shX1QDvwSMo1DKtE1GEYWIlaY3R9X0UXqxMZNxy493Qmqrv6lqeU5KbhheBn7Xy5OSKiMA4JuEIcEkBERETUOVhhJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYO2R5B2prtfvPlVbhu3ise9NtegqNXtntOOdq07i6LQ4x/ZmCV6bVma7xav+2iLnX62Oe8808oLr8jqW2sRbWxIREZklbWD1bfBicEl4WnBMLaCrVrPEuOVqeDLfWpU6yPjlOMFrWRIREUVIysDasPdVFGMc6kuL9WnGAOzeVgGfWk6tuQmpY4BrM4YA2QNxLcYidZBapHzpiUP45u/OhabpxeK5SWESPPKYdxQhTc1pH62/Z0RERNQxkjKw1jdfwrA0Uxzp3wfD1ENKxBCM3nEOnnvkYxnwtmF0tragFbKLfgaOekv0yuuSvaFqrL173FyltS7Tu/mNZZHDB1SXvjaNRf1hNVvRhhwYy+e8rObqLMuibrcENao7X3uObdiAZX1jCm0ngffs+ErcnnINbl9eo2YQERFRe0jKwOoZMQAnDm9XwwDOouz1apzIGgSPtpQ61iHUv/s38Bx8Al/a8CBODDwEzxNj0VxpCohqvladFY+b54igqBbVLBEhdPDLqnJ7CIPrHzQNN5Bh9kH8IVTdFcvHqEXSmyXwPXkTRmrL9G2bpRVv0+bL44nuZRwb9+8Ypq3/MtIOPwm/EaZFkD34JDD4oL7tkXPEvDFPwPPcJH05ERERdZnkHMM6Mg/1peOAbXL86nbsGzod9XNGqIXUHv745FhLpdFcJU37ttHd/iCGRRsqMOfl8BCCe8aI576M32rr78VvRcgcGQqBQzD62w/ij+++o1c633wF9XgC46IOPxBh9iURMjeU4srvej9WBFJj/Un4igilf2jUw3Lzu+/hj2MmIFNVTYfmijB8+N9xXm8mZvhCvBv8DO8u5n35iYiI2lNyBtZjFSKofoTBj8kxrOMw+PB2DF5xEA1qMV09+xhWvSv8SuhjPzW1v8EfZJXTFITt3fpdJS3jJhFQ30NDrd6uqRTHNWfMVYRjIiIiai9JGFjPouzAaQwbMxFFN8r2CKwSoXXKp9Xw8koBDvQxWg6bT1B6MNylb0ztfoLUFRj0N/iSHO4wTg/SxyyVYCIiIupKyVlhFU40m06XOXYKu9Ebg/urNjlGs/cVNBtd7dl3wDXmZRyLdZ1WGRrNVU453jV00tUQpA4W2zPGysqTp9qxOlvz8pOApap8BUMPeNIVERFRh0jCwHoDihaNw5S6g+HrsG67hAWP3acqrtQe7GNY23Qd1g0PhtbzvTsBnlAFVZ5pr59oFXXb2UUY9wRCVU554pZ28pMy9Dk5hlWtK0+ekid+qWXmqw/4njwUPoZY4dhm6INix7bX/Jq8qoBaTkRERF0nJSioxwk7c+YM+vfv2enwhRe8eOSRYtXqXMeP/xrDh9+sWtQe5GW4ZEA2X29WzjuGl/HNzhgaIO90tS4dJ95byHGzRERENkk7JICo/ZxES716GCKvaAB8aeBNqk1ERERdhYGVSA5VWGYfEqBfD7ZT7/B1YDGGpVyDlJQ7sPK4mkdEREQcEnClOCSAiIiIqHOwwkpEREREjsbASkRERESOxsBKRERERI7GwEpEREREjsbASkRERESOxsBKRERERI6WtJe18m3worhONa4bAd+icchUzc7Ay1rFVutbDLc/oFo52FwyD3mq1bojmF/qRblq5bqX4U1PumrF37Z1ma7Qswmr3aoR2Ip7Nu5EpWpalgn29duyb42x/YxiXJ51m5op2PaL1Hz4i2YiWzWBJqwtW4qSFtW0r6+xvi/2YyciIurWZGBtq0AgIP77WZdN9Xt+EsxcXhmsV+19r6wLZr7yq4jndeS0bt26qPM7Y6quro463xHTsVXBLy1bFdyl2rs2zwp+yfvTYI35OTGnU8E13lnBu/ed0tunfxq8e9ms4Lxjankr267ZtzC8bsR0KDjPvC1t2wuDa06rtty2+TijLW9l37K9Rs7ffEhfJ8akrWt6jvW49eO0vA77+8CJEydOnDj1sCkJhwRUw3v4EqaMD1dUPZ4RGFZ3Cj7Vpq7ShLVHq5DrnhqqPOaNzUduSxV81sJndP5dKGnJQaFR1XTNRGEGUH7yiGhc3bZrfbtQnpqPQqMqqW07AN/xJq1Z2yw20rd/uOrp6o8s9bDVfQe2YsHFqbhcNBPhemxsWX1c6pEu27PcVMm9DbniNVdeDL+oikM7kcWKKhER9WBJOoa1Nwb3Vw+lG/tgMC6h4RPVpi4SQFOLC57hRvg6gvlaV7iYf07NikMLjRmjLF38BY3iwYUzqL3KbWvMgVSQwdEIhtnDc5Db6MU9Pj3AVmzxioCbA4+WLVvZtwi/b0Z04cfSBF9DAIVDEn3+EVQ2upDevAa9SmeraQ0q1FIiIqKeIAkD6wh4si5hla9ateV41oPYrR6TE8gxmTJYeQHPJmzOAOqa9SCYEL8eztwNOfDfLyuZARjDlVvbdqV/qQp1YtoiK7M6PZDuwlqjcCmroubxrjJ0liyDp0FfvwDFWsXUHHCv6nWp19SrdClKYKr02onnFYiAWjpWBdrAGfHaAyi5OAqXSzZp0+aMKhSUbRUhnoiIqGdIygqrZ844TKk7iMElXm3yjRBt9EZm154H1mNUbFGBUE1GVVInwtXGpWgarYer1e4mNF0AstIS6SwXGr3oddQFvwxnMjCeC6Ay1aW65+NvW3atG6HusgifpRfEtozQKgLpKhESSzaq466ACI0u5Brd8/LEKBEm9W2rdS2VzKt8Xe554WMbHYA7WpVUHoOvCoXidcy1jBrIwWZTBVcfjmAO8URERN1bkg4JGIFVpcWoV9Oq/hdRf50cFkCdIW+WCl5qCo+/dCE9VT+7PjzeUu9OT79eNePITpMpTYQzU2UzPLa0rdtOhyczcqxo6LjFPnAxEAqccpyoPLtf33Y65haJ0JpahXItjF/d64rgHoVCub6pwGtcSQCWfQjaWFrbc4mIiHqYJA2sJp8cxJTnf4O7vtG5l7WiaPSQWOn3hrre9ZOdjLGgilbNnI1e9m5tLchVoSDUlX8E5X5jvGeC2zaoLv9YY0VllVh2+1vCoTZWVgl8AF+LUUFt475bEbGuKayaL6OlkydhBVByKDy8QQ/X4bG+mp2PIiXlGqQU7VAziIiIuo+kvA5rw95X4Tl8SbUGwFuaB49qdRZehzU2y/VKI645KqiAVhltmTyhKc71RmNvW44vNV3LFC6U3m/uWrdu136N1cj127Jv67ZDjOupyvGrvio1U7C9bi08y5PLLMzHbzu2qNdp3YFHUmbAO345Try3EEPVXCIiou4gaW8c0NUYWMlZarByws1YlL0NwbJpah4REVH3kPxDAoh6uJrldyAlhWGViIi6LwZWoiQ3dPE7CAY/Y1glIqJui4GViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiByNgZWIiIiIHI2BlYiIiIgcjYGViIiIiBzN0Xe6Mm7BOmVGMVaNVDOVrr49K+90FZvlFqbIweaSedb73scV/xaq8bdtv0WqdXnkLVDDtz+NfnvUyNuz6s+z3/ZVMG43q5r29QzGfizLbetqot5+lYiIqGdyaIX1LMpWeDEfAzFFzbE4ViHCam8RUotRLyZv1mkUrziIBrWYupB/jQiULhEUN+GymDZnVKGgbCtq1eL45D3zvagTIVWue/n+fLG9pZjvV4vjbltfFyIIausay7cc0ZYaZFA0ll8uCYfOvFnm+WIS+84VgTb9en25HoZno7KPnG8nlonAmWVsW6xbJ4L1WiNXG8TxF1zIQWGqapul5sNv3j/DKhERUYgjA2vD3v2oH1+M3ZP6qDlmIsweOI1hY24NVVQ9nhEY9ulv4PtEzaAuIkLj0SrkuqeGqpp5Y0XAa6mCzx7eovHvQkmLCHRGRdU1E4UZQPlJGTpb23YATS3mgAlk9TGXQNum9ngVKjOmhgJtxZZdSL9/E1YP19tmtb5dKBeBs9ComGrHHYDveJOaIYlQ66tC4eipCNeLiYiIKBGODKyZk+6LGAIQ1oz6T3vjLvcNql2NBc9X4wQuof6MmkVdRA+NnuFGJNMrj5Vy/jk1K47aZpE8M0aFAqns/te66S+cQW2r274NuSIklmxcgwrZDGzFAn8AhUOupFJ5BOV+oHRseN28WbYhAHZ9+yNbPZRkWK68GE7pFVu8KM8ojjpMgIiIiOJL4pOu9GEDg0sOAjPksACg/uxZtYy6luyen41epXoX/eYMoK7ZXG1shX+NWHc23A058Muu+ZYA6tSieNvWuvU9QIFYt9fGKnhkRdQWEMt9cl19uscX/Zi0iqmputqa7OE5yG3cFR4CoMJyiGiXN+Zgc7xu/padcKvj6lWqQjcRERFpkjSwXsKq57drwwbkGNZVI8+i4Tww+Aaj6kodSZ44ZIS+yOAnq5xL0TRaH4u52t2EpgtAVlqCHeGNXvQ66tLHcxbNRPa5ACpTXRB/jwjxt60dl7GuxyWeaz026zjVYmT5l0YJrbK62sbKrGsmVolgLPenvScibRa6XcjVhiSIgF0hx7fGOfFMrP9m6Lg2we8OiNDN0EpERGRIwsCahsHXAcPGTDcNG9CHCQzur5rUoewnKIXP4nchPVU/sz9c2YwcWxpLdpoMeDnYLIOqPksfJqB1t7eyba2K6UJpnlrXPU+E1hxU+nfFCH5yCIF6aBIxHjVB2Z7l4fdEHD8uBvQgHfgAvhZzZXcpSox2jJPRtIqtekxERERJGVhvgGdob5w4vB9l6iSrhr0fYfd1A+Hp+CttUVzp8GS6REj0hrrH9QCYA4+5e11exkmGN3tgc49CIcxn9purnYls2zpWtuJkFRCqztrIM/ZFwA2PiZXU/kaHA/OV0C5dBTVe1VY9vVyyDKUieGtXKzAF8zC9IltpGsurOb4St6dcg5QJK1GjZhEREfUUzrwO67EKDN52WjXCzNdjtVyH9boR8C0ah0y91Sl4HdbYLNdKlZdrsgczGVjlCVPRlsmTqUzXUrVfzzTutuXYV58IqQbLcut2o10fVguaF6Idk22/Icb1WK3btl871kqOwdWHNRivy77t6OvXYOWEm7HoQDG2B9dhmppLRETUEzgzsCYBBlbqbDuKrsH02uU48d5CDFXziIiIeoIkvkoAUQ+x81GkpDCsEhFRz8XASuR0+esQDH6GIMMqERH1UAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORojg6s8varg0u8WHBMzbDxbfCK5a+i7BM1gxxB3mq0V+lsNa1BhZqfGHmbU2Pd2bjH16Tm61rftlq/bCtq1ZwI8hau8jlbjqgZVvIWrXL5fL+aocTbt7FOeFqMtbY7uRrr27crtWXb0dYnIiLqzhwaWM+ibIUX8zEQU9Qcq2osEEHWlzYCw9QccggRBt1+FzaXbMJlMW3OqEJBvPBoIe+z70Wde5m27uX788X2loYDWmvb1oLoB0h3u9SMaESg9QVQmBHjOWIbBRdyUJiq2oYEXlehR1+mT8sxN7QL+bpmYwHEdtUci1a2nTfLtF3xntSJcGsPw0RERN2ZIwNrw979qB9fjN2T+qg5Vr4NH2HwY8VY5VYzyCFEMDtahVz3VOSpOXlj85HbUgWREVvn34WSFhHqPOl62zVTBEug/KSshLa2bRFEj7rgL5kHj7Y0uootXpRnTEVh1K+WDLNVKBw9FeoIlKt7XbU+L5pGb8Kbnv5qjlkbt+3qjyz1kIiIqKdwZGDNnHQfVo1UjSg8c+5D0Y2qQQ4SQFOLC57hRtwTAXDjTlTK+efUrDhqm0VCyxgVCm6ym7ygUTy4cAa1rW77NqwumolsbVkMsnramIPNs25TM6z0MFuM1RF/CF3d68r2LI+yTUMbt+3/AOWpOfDEKyITERF1MzzpijqA3gXeq9Qr/rqQXdxAXbN1LGpcWtf+bLgbcuC/X1YbA6hTi65823ols9AzLxSILQJbUR4nzOri77vcFx5nah9727p42zaWiUmrALcSzImIiLoZBlZqM/tJQNZwFkDJxqVaF7gcc7na3YSmC0BWmrWTPaZGL3ppXftifVkxPRdAZapLdYNf+bZlt3xJ32jVU0kEwoqdyIoVZjXx920ZZ1pSjCz/0jaE1tZeVzrmFhnbXob0o1cSiImIiJIXAyu1mTWcybGZRrByIT0VyHUvMwVDvcs7/XrVjCM7TfZz52CzqWtfGybQt79oX822m+BrENuRYViFbLffaC/G2o8+gK/FXCFdihKjrZ381NZ934bcDPWwVW3ddjo8mS5UXow2wJWIiKh7YmCldqTClN8bOou91rcrcsxlYCvukcHQfvUA9ygUogoFoctNHUG5CJaFQ2Q3fYLbjspcodQnv7ySQEaxeLwcc2+ZiTdNy2QVs1SESO2sfy08t3Hf2lhZ87jUeNr6uszvicnOR5GScg1SinaoGURERN1HSlBQjxN25swZ9O/fgWc9HavA4G2nVSNsyoxi7WQseX1Wz+FLaq6hNxY81nknY73wghePPFKsWp3r+PFfY/jwm1XLeeTJUloFU0rNh99+MpQMrPLEomjL5ElHpV6Uq5YMjeZu/NjbluM89cqoRdR9qO1cnIrLUces6tuSXfSJ7dt6zFqVuMQ0vECOyfVVqUaY+bW15XXZ3xPdDjySMgPe8ctx4r2FGKrmEhERdQfODKxJgIGVnKUGKyfcjEXZ2xAsm6bmERERdQ8cEkCU5GqW34GUFIZVIiLqvhhYiZLc0MXvIBj8jGGViIi6LQZWIiIiInI0BlYiIiIicjQGViIiIiJyNAZWIiIiInI0BlYiIiIicjQGViIiIiJyNAZWIiIiInI0R9/pyrgFq3FL1rCzKFuxHas+Vc2scaifM0I1OgfvdBWb5Taj9tuUtsp6m9Nc9zK86THuyW+7TWlGcfRbq0a79WuM26NatmF5jgul9y/H3Ij7+RvHYF5uv31q5Lqx35PIW6/qYu2fiIio53FohVUGUi/mYyCmqDlmDXv3Y9/Q6agvLRbTOEypO4gpe8+qpdSlROhz+10ikG3CZTFtzqhCQdlW1KrF8cnw5kWdCKly3cv354vtLcV8v7601ueFL1MtKylGYaMX9/ia9IUhYhsVO5GVkaPainueWs+YlqE0VQTiPioRypDrC4iQqC/3u4GSjWtQoS8NkcdQ0jcHhaqtS8fcItO2PS7rurb3xO8OmN4T27ra+vLYXUhnWCUiItI4MrDKQFo/vhi7J/VRc6wyJ90nlt2gWiPgyQJONDerNnUdERaPViHXPTVUUc0bm4/cliqILNg6/y6UtIgwaFRUXTNRmAGUnzyiNbM9y03V1tuQK5ZVXrRuWAuUyEfhEDUjlsAH8Jn2VXFoJyozpoYqmtmeqSKUVqFShWWNCLULRLt07Cg1I4brXchVD6O9J9q247wnFSetzyciIurpHBlYZSC1DgGg5BBAU4sLnuFGqDyC+bJrXs4/p2bFUdssElzGqFBQk93oBY3iwYUziVVojUCZp4YBxCEDKkKhsAlNFyBCrjG8QK/0ymEJdc1GBVev3MJd3Go3fe1xEXRNr0PKSjPeE8mF9NQY74l4DeWNptBORERE3eCkq2MVKK7rjQWezh3DSvHIwDcbvUq9gEcOCzAHvwTIsaSls+FuyIH/flmhDaBOLQoRzylodKF0bHgMqx5CWw+U8UJhxRZ53Eu1oQd+tytcwZXVX+RjVcwgKcfeynXFcWtVWOO40uHJdKH8aHhYhFYFjhizqrMGaSIiIpKSO7B+chBTtp3GlBn3oajjzwEjRQ914ck6jjSAko1L0TRaH4+52q1XL60Vxjgaveh11AW/HMspT5g6F0BlqgtZarFGG29aJQKn6aQkGWAvxAuUYXr3v7UCKpX7ZqO8jz5GVg49qBNhVR/jKsKoHN8at3J7G1YbY1BLpqJp4+zQ2NtsTzFKsRNu9X4twFSUiteUfr2+PEQL0uYKNREREUnJG1hlWH2+GhgzncMHOlneLCOY6VN4XKns6tbP7F/tVrPUMIGIcBZFdpoMhznYbJzZL2jDBPr2DwdFGVY3yiqkeR8ihJ6sAlrCoVA721+1jeCoUaHQXJmVVdD0vuJ/GcWWKxKEgrb/A5RrQVxtW7uKgWpv0cfXWunja8NVZeuJVW96xNble2KrBNvH0RIREZEuOQOrKayGT76irqd3f1f6vViretJrfbtQnpoDjzmEydApg5/96gHuUdqJTgWhEHgE5f5AeGypKayGg6XOHqK1M+3lZa3EY0uwjREK84aI5zd6w+FWnQCWK9eNuMJAsThOedkp8TjGZbViV0r1y3bBY7vUV9QgbXJ8JW5PuQYpE1aiRs0iIiLqKZx5HdZjFRi87bRqhBnXY/Vt8KI4YlBjbyx4rPOGBvA6rLFZrjlqvhaqQQVPy3VSQ6zXYS30hAOnHIqgnYRlEeN6pXIcrBxaYN6+nKdduirG9U215cZ1WONdP1Ye4y6kh7ZjPeaIY7JsN9rxyjG/S1HSN8Z1ZTU1WDnhZiw6UIztwXWYpuYSERH1BI6+cYCTMbBSZ9tRdA2m1y7HifcWYqiaR0RE1BMk/1UCiLq7nY8iJYVhlYiIei4GViKny1+HYPAzBBlWiYioh2JgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHc/Sdrhr2vgrP4UuhW7KG2G/dmjUO9XNGqEbn4J2uYrPcmjXuLU6jsd7mNNe9DG96rPfk12/RGu0Wp/ZbpNr3HW/b9nWFqLeOVbdRbYmyf+OWsxmRt1i1vie2fRvr6S1dlG0QERH1VA6tsJ5F2Qov5mMgpqg5FiPzUF9arKbpWHD+IKbsPasWUpfyrxHBzCWC4iZcFtPmjCoUlG1FrVocnwyDXtSJMCfXvXx/vtjeUsz3q8VaqJyNyj75yFVzwvR14dH3G9r3liOW5bG3LcmAG17/ckRYlcHTi5K+OShUbYMMpL0qxO4z1Awz+Z405MBvbFfbtxdrw/lVD8fmfTOsEhERhTgysDbs3Y/68cXYPamPmhPPDcjspx5SFxOh8GgVct1TQ1XNvLEiXLZUwWcOZ7H4d6GkRYRBo/LomolCEQDLT+qhs2LLLqTfvwmrh2tNmwCaWlxIv141haw+pvJnK9tOSGArFoiAWzp2lJqhyPkXp2oB11oL1tU2ixfft384/Lr6I0s9JCIiotY5MrBmTrrPOgQgrmr46nrjLvcNqk1dRw+NnuGmbnatq1vMP6dmxaEFu4xRobArq5YFjeLBhTNahTZvln0IgNltyM0IoGTjGlTIphYuAygcolcqW9t260QYr9gJuIsjj0GE3zfjVESzh+cgt9GLe3xNWrtiixflqTnwxHwtREREZJa0J13J8a2DS7xiOojdWbegqOOH1FLCZPf7bPQq1bvoN2cAdc16WEuIf41Yd7bejX6/rNAGUKcWxZM3axMue4ACsW6vjVXwyGqsWy00xN12lb6uNi22dtnLCi3ysco2njYhMtCWLIOnYam27QIURw43aNkJd2jfKnQTERGRJmkDq6zCGuNYfWkfYfCKg2hQy6hjyZOe9GClT0blUCernEvRNFofi7na3YSmC0BWWoJBr9GLXkdd+nhOGerOBVCZ6kqoC107LmNdj0sch+3Y4m77NqyW841JW98IrUcw3xdAaV7kmNaEyJOqSo33ZBlKL4jjMIdSLdCG9+13B0RwZmglIiIyJG1gNct0D8SwTy+iXrWpY2mVTFPACp9p70J6qn4GfLiyGTm2NJbsNNlHnoPNpupjxPjPWEQoLJdXDjBCpXueCJ05qPTv0oJfm7ftHhU+scr/Acq1IG6EdHk1AdUOndQVW8Uh/coB+nuSjrlFIrSmVqHcEvTDtCEE6jERERF1k8Dq81XjRNYgeFSbuko6PJkuERLDZ8DX+nZFjtfUKo4i7NmvHqCFRPOZ/UdQbhqH2jrrWNmKk1WAUUFt47Yt40xl+DUF9MslxWJb8rJW4nGiZ/Obx8oGPoCvJVbVWR8rW2kab6s5vhK3p1yDlAkrUaNmERER9RTOvA6r/TqrinE9VuP6rCG8DqujWK45Gu1apjKwypOxol7n1Ho91EJPeByq/VqmOtP1UOX4VJ8IqYaI7cfedsS6ca+DKrcjr1hgnARm3W5IaBtyTK+8dqs+W4r3uqJdexYipq6ccDMWHSjG9uA6TFNziYiIegJH3zjAyRhYqbPtKLoG02uX48R7CzFUzSMiIuoJusWQAKJubeejSElhWCUiop6LgZXI6fLXIRj8DEGGVSIi6qEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0Rx9pyvjFqzGLVkjnUXZiu1Y9WlvLHjsPhR14s23evSdriy3MTXdGjUhttuU2m+BGnfbraxru0Wq9Ran9tujxj7uii2zUdDYhlu32pcZ1HOM7dlZtk9EREQxObTCKoOoF/MxEFPUnGga9u7Hqn4D4j6H2llgK+7xBUTY24TLJZvgF4GrZOMaVKjFranYIkJjXxHkxLqXS4pR2OgV22tSS0Xg9AGbtWWR2671eeHLXBZjXRlIvagTIdVYnuVfivl+tRjpmFukb1ebPK7oxy3CZ8GFHBSmqrbBPS+8bskylF4w7duyTC0X6+f20dNw3izzMjHdn49cEZjTr9cWExERUSscGVhlEK0fX4zdk/qoOVF8chDzDwMLPIPUDOoMFYd2ojJjaqgyme2ZikJUoTIUDOMQYbe80YXSsUZV9DYUul2obPgAtaq9umQe8rTHYtvDc0SwC6ApoNqe5aaK6W3IzQAqL6qF/l0oaRFB07Rcbrv85BHVtrneJbZtJwNzFQpHTxXxNp50pPdVD6MJfACf5Visao+L98v0HhIREVF8jgysmZPuizEEwHAWZa9XA2MmduowAGpC0wWgcIgROPWqpuyCr2s2Kp1xnAugMjUHHiOo+dfA7ReBsyWAOjXrqqS6kKUeStlpYkcXzqgwbKWHxlGhcCxVbBGvJaM4gW76I6gUwdszPHoglaEe7qmWbYcdQbkI9+HQTkRERK1JzpOujr2PVRiB1ZNuUDOos8lxmb1Kl2pd9H5ZJTUqnYmQwwpKxfpa938xCk1V1DARhius1VwL2XVvrta6R6GwZacWBnV6tdRKjnGVxz1bBGVbaNSqvznYbBkTa1XrW6yt20uOk411XGo7Maurvl2x1yUiIqKokjCwVmPBtktY8I1xyFRzqHOV+2ajvI8+VlR20deJsGqM12yVCJXujQEUauM55yEvcAZ1cjynbXVtrCvy4Y8WILVxtFUiFJpPmroNqz052rHpofID5Io2+vZHtnqGPuTAGEs6FU0bZ6sxrno4zvKEhyNEI4ckGONQ/X12oVfZ1ojqrT5kwlq5DZPVVfHaQxVqIiIiSkTyBdZjp7Abl7DqeS8Gl8jpYLi9oVo9iTqGGruZUWw5+14OE8hKiz/qU6ONG5Vn55uCoTZMwNqVr59Vn4PNRTNNYVORYXWj7HJfFtl1bzn5aR6ymuMFaX0MrDaUQRtzqgdxPezqVxPQ2lFCqaSNr7UPZdCqq+YxulZadTU1H4W8MgAREVGbJF9gHZmH+tJi0zQOUyAvayUezxmhnkQdJW9IDtDoDZ99r052yrWEMDm2VQa/xVhr7up3jYInNYCSCiMEiucdrUJu5qhQMA2FVdPJVyGmsBoOzNHJ7nt3Qw5WxXqeCpfaOFTXTLwZCrpy0s/yl5eduhwtNAvRKqn2E9KsVHV1dPTtATVYOeEapKTcgZXH1SwiIiLSODOwHqswVU+B3dv0auqCY/pi6kKyimnuetfGocbvSg+Tl5YSYRA74VaVTDkGNhQ+tRApH1ShQFuupi36mf5aIBT/r/QvDS8LhWIjJOuT++JUW9gMj1/Vpo1V8LTh+rHh8av6VADbNWDtY2pttBO64lZXh+KeuyeI/7+Hxo/1OURERKRz9I0DnKxH3ziAOsbOR5EyrQYrqt/BwuFqHhERESXpVQKIupUdeCTlGoZVIiKiGBhYibrcNLwQ/AzBIMMqERFRNAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORojr7TVcPeV+E5fAlTZhRj1Ug1U/rkIKY8X40TqqnJGof6OSNUo+P16Dtd+degl69KNVwobcMtTvVbqC5FSYtqZthucardQtWLctXKdZtu3SpUbJmNAu32rTp5v//V9tudBrbino07UZmaD3/E7Vljb1uSt2B1y3v+R2y3teMWjP1qjejvi3H8UY+biIiIonJohfUsylZ4MR8DMUXNiXDdCPhKi1FvTJ0YVns0Gcp8ARHGNuFyySb4Regq2bgGFWpxayq2iNDXV4Q9se7lkmIUNnrF9prUUhkKvagTQVJbfn++CMdLMd+vL5VhUruHv7aumDw5KPfZ9y22UbETWRk5qm2Iv219+WwsQA4K1RyzWp8Xvky1bsRxCzLEb6yCR70vl0uihHjxnIILYvupqk1EREQJcWRgbdi7H/Xji7F7Uh81h5yi4tBOVGZMDYWxbM9UEfCqUBkKfnGIsFve6ELpWKMyeRsK3S5UNnyAWtn070JJiwh0RtXTNROFGUD5ySNas+5iALl9TCnwehdy1UODDJYlyEfhEDXD0Mq25XpNozfhTU9/rW2X7VluqsbehlyxbqU4Hp0Iu0dliI9XaT6C+b4qFI6eCmtNl4iIiFrjyMCaOek+6xAAcogmNF2ACING4NSrlrKLva7ZVG2M5VwAlak58Bihzr9G635HSwB1olnbLB5njEKevlSvqMru/wtntECbNyRHBGOjKqpXUitNz5eBeIFYVppnHgaga23bMpBecRd94AP4WsSLOr4YvUpn61PZVj2EKxVbxPuUUcxhAERERFcgeU+6+rQanhIvBmtTBXxqNnUOORazV+lSrZvcL6ukoWpjAuSwAhnqxIe2WXavI4Am8+qye10sdzfkwH9/PnJVoIV7ntYdD1943+ZxpLL6C3dx/PG0sbbdFrJr31wplkEcVeI7aAxXWIZS7IR7i1691SvLOdhsH/NKRERECUnOwHrjOOw2jV/1jbmEYobWTlMuAmN5H308p+wmj+iqj6dFBLmNARRqwW4e8gJnRGB0Id1YvdGLXkdd8Mvl8oQprSrrQpZcpoXNXUjXxokWI8u/NFzJlCHyQj5W2U6isoi37URpY3irUOixdf+nmvedjrmjc1T1Vq8EZ3nEa1VLiYiIqG2St8JqkukeiGHqMXWkdKT3Ff/LKDaN59SHCWSlJTAyUxtzKs+eN4U3U2jMTpMJMAebTWf2a135ffuLthwnWoXcUAX1NqyW1VkRgMv9QMXJKj0MG13y8ioGqi2HEMTfdoJkWN0oq7jLrF378nXFqtRqwwX0kK8PF9CvNKC1bcMGiIiIKLpuEFjPouz1apzIGgSPmkMdR44jlZXK0Nn16mSmXMvYTDm2VYazxVhr7up3jYInNYCSCiOoqRCaOUoPje5R2glcBUZXOo6IMBowjZk1n+gk+D9AuazOXi+Oa5asupomjzhOeVkr8VgLlwlsOy5TWLVfCkt/XVUihJqudmC8LtdMvGk+LjlcIFW/rJVW5VVrADVYOeEapKTcgZXH1SwiIiLSOPM6rMcqMHjbadUIM67Halyf1TBszHTsnnSDanUOXofVuA5rDjbLrn3V0snAKiuJ0a5FaizTW5HXQrVeK9V6vVLrsljXOtXIY5Td/5ZQGGfbltcUZjzHfv1XnXn/rV/jVae/fnlFAkuVVqhZfgeGLXkPxTs+wwv5aiYRERE5+8YBTtajAyt1jJ2PImVaDVZUv4OFw9U8IiIi6h5jWImS2w48knINwyoREVEMDKxEXW4aXgh+hmCQYZWIiCgaBlYiIiIicjQGViIiIiJyNAZWIiIiInI0BlYiIiIicjQGViIiIiJyNAZWIiIiInI0BlYiIiIicjRH3+nKuAWrcUtWO98GL4rrVCNrHOrnjFCNjsdbsxq3MY1ze9SorLdmRUYxLs+y3c9f3be/MtoywXKbVNNzan2L4fYHtMeG2LdfjX3cxvbj3rrVfmzGMasmUvNtt4XVRd02ERERxeXQCutZlK3wYj4GYoqaY6UvL4YIqaXF+tSJYbVHk8HMFxBhbxMul2yCX4Suko1rUKEWt6ZiiwirfUXYE+teLilGYaNXbK9JLdVDZy+xMU+GmmEhw64IfOKT19cXky3Qynv4h5aJKRwKj2C+D9is5sc8bhFMCy7koDBVtQ3ueabtLkPpBfNxi21vrIJHvSfacuzEAtPr0sTaNhEREcXlyMDasHc/6scXY/ekPmqOzbH3sapf51ZUSVdxSFY+p4Yqk9meqShEFSr9ejsuEXbLG10oHWuEzNtQ6HahsuED1MqmWL7g4lRcLpqJdG25jX+XHnajVF1bdxtWl8xDnmplD89BLgJoshRkZaitQuHoqdH3H5KO9L7qoRQ4gzq4kB6q1tqWaxLdNhEREdk5MrBmTrov6hAAg6/6NIalXcSCEi8Gq2nBMbWQOlATmi4AhUOMwCgrnl6Ui0d1zbZqYjTnAqhMzYHHCHb+NXoXfktABD7BNRNvxgmjFSerkNvnDOaXzkYvNc1PJCgnqGKLeC0ZxQl01R9BpQjenuEqerpGwZNahYKyrXrwlpVU83Ih8W0TERGRXRKedHUWDeeBE4cvwmMMB5gxALu3vYqyT9RTqMPJsZi9SpfCl7kMflklvWgdOxqXHFYgA6fWRV+MwohKZzR6WK4UATfX6Jr35KDctxhrTetW+peGwmyvLUfUXDsRtCuslWK9+puDzXECszZcQdu2DJ+mdZGOuUWb4M+sgjv0ukzjYxPYNhEREcWWtFcJmDIjDx71GCNvxYLrLqH+jGpThyr3zUZ5H32s6JuedNSJsJrbJ5Te4mvZCffGAAq10DkPeRHd6fEVesLd+nBPRWmqCLvn9Ga2Z7kaQ6rGkV7wRg2t2jha5MMfCpB6gM0ybzsK8/b9fXahl1FR1SrNs+GWwxnkMncABSK46tXfxLZNREREsSVhYL0Bmf2A+rNnVZs6jxqbmVGsBVWdXvnMSktgZOb1LuRqZ+ebwps2TMCFLNWMTd93QkMPNOnwZEamYP0s/RxsNp/BH/gAvhY9iOsVVP0qBlo7FEqttDGwxlAGOba2JVxBlcFWVp3Lj4p1r2DbREREZJWUFVbPiAE4cfh9+FRbOwnr0wHwxBn3Su0jb0gO0OgNjx1VYS3XMjZTrzj2KrV21+tjPQMoqTBVJo9WITdzVMTln6KR+64U+wud2R9134o8gcsfMI23NYVV08lXGjl2NlSZVdXZVP3SU/IEsGjHpp98Nsq0HfOwhib4GkSjb39kJ7ztGqyccA1SUu7AyuNqFhEREWmceR3WYxUYvO20aoSZr8dqXKNVNwDeUtMQgU7A67Aa1ySNEgC1wCoridGudWos01vyMlThau0RzJfjQ1UrJOa1Vs37tm434jqrctys+TqpBvv1VDX6tppGhy+LFXGNV9t6rS0Pi9y2oWb5HRi25D0U7/gML+SrmUREROTsGwc4WY8OrNQxdj6KlGk1WFH9DhYOV/OIiIgoeU+6Iuo+duCRlGsYVomIiGJgYCXqctPwQvAzBIMMq0RERNEwsBIRERGRozGwEhEREZGjMbASERERkaMxsBIRERGRozGwEhEREZGjMbASERERkaMxsBIRERGRozk6sMrbrw4u8WLBMTVDcxZlK7zafOv0Kso+UU+hLiVvU9qrdLaa1oTv/Z8QeXtWY93ZuMfXpOaHVWyRyxZjrelOqBbyNqxy/bKtqFWzdLG3rW8zcprvV09Q4u7b2K82RX+OsR/7dnXytq1xtk9ERNRDOTSw6qF0PgZiipoTdgOKFhWjvtQ0zRgg5vdGZs++W6wz+NfA7Xdhc8kmXBbT5owqFEQEx1hkYPOizr1MW/fy/flie0tN4U4PnJV98pGr5kQS26jYiayMHNU2xN923iz9eEOTWJ4LF9Kv15e3um/xunttrILnfmMbyzHXpZYZxHMKLuSgMFW1bWp9XpT0FctVm4iIiHSODKwNe/ejfnwxdk/qo+bE56s+jWFjboVHtamriFB4tAq57qnIU3PyxoqA11IFXyIVQ/8ulLSIwOZJ19uumSjMAMpPHtGaFVt2IV0EwtVx7galhT7ko3CImmFoZdt2tcerUJkxNRQ64+9bvu4ASu+PElJDROD1VaFw9FSoI7AKbMUCEZ5Lx45SM4iIiMjgyMCaOek+rBqpGq355CC8dQNQPOkGNYO6TgBNLS54hhuRTIS0jTtRKeefU7PiqG0WqTZjVCjsyqEFBY3iwYUzWoU2b1a8QCgYoS9vJrLVLENr27Y6gnItPN6m2q3sO/ABfOJ147hpKIStqlyxxYvyjGKsdqsZFnpVGO7i+K+PiIioh0r6k658vmqA1VWHkd3vMrh5AY8cFgDUNUeORY1Jdq+L0OduyIFfds23BFCnFsVTcSiB0JfAtmt9u0S4DFdXW3UuIEJ5FXwoVsMBlqEUO+Heoqq3IkiXN+Zg86xwALaQ1V/kY5VR/SUiIiKL5A6sWnW1N+5ys7ramewnKFlPjAqgZONSNI3Wx3Kudjeh6QKQlZZgGGv0otdRF/wy+BXNRLYMg6kuZKnFMWnjQ1sJfQltW1ZXAygcEiNcxpJq3nc65o7OUdVbvXqa5ZkXqu5ayaECgahVYSIiItIldWCV1dUTWbegiCdbdSr7CUpvhoKaC+mpQK57manrWx8mED55KbbsNFnSzMFmGSb1WXpXft/+rYa5ipNVQMtOuI0g7Qu35YlViW5bq66K8FkYtes+hutdsavA2nABoNxnBPylKDHactiA/wOUayHfWO4Nt40KLRERUQ+XvIFVVVcXeEaoGdT10uHJdKHS7w1dlkkPgDnwmLvXY112yj0KhahCQSioJV7tjDjL35OjVT1lNVULzwltW80b3cZqp2sUPKlVIoQalWZ18lnmKGS7ZuJN83HJ4QIi1Bd6xGMZnt3zTMvkVCyO04VSebUB8xCC4ytxe8o1SJmwEjVqFhERUU/hzMB6rEJdW/Ugdovm7m36tVbD12M9i7LXWV11omzPcvhFQDQqhtpYUVNVM77bsFoGNtl1r6qNcgysUa0NXd9Vncil7yPRa5bG37aknRgVo7oaf9/pmFtUjCz/UrXtpfBlLjNVntvB8Htwz3jx/wNNDKxERNTjpAQF9ThhZ86cQf/+PTspvvCCF488Uqxanev48V9j+PCbVYt6ih1F12B67XKceG8hhqp5REREPUHSXyWAqNvb+ShSUhhWiYio52JgJXK6/HUIBj9DkGGViIh6KAZWIiIiInI0BlYiIiIicjQGViIiIiJyNAZWIiIiInI0BlYiIiIicjQGViIiIiJyNAZWIiIiInI0R9/pqmHvq/AcvoQpM4qxaqSaqfg2eFFcpxpCtOd0JN7pKjZ5G1O337hfag42l8xDnmq17gjml3pRrlq57mi3OG3C2rKlKGmR99xfjrkuNTuwFfdot07Vyfv1m2+9Gn/b1mWa1HzLbWXjva6KLbNR0KgamvCxRS7TRR5fjNdFRETUwzm0wnoWZSu8mI+BmKLmmMkgW4xxqC8t1qcZA7B7WwV8ajl1If8aEepcIsxtwmUxbc6oQkHZVtSqxfHJwOZFnQiSct3L9+eL7S3FfL9arNT6vCjpm4NC1daJdSuq4Llf3+9lTw7KRcDU7/UvJbJtGULV+nIyhdVEXpcMoKF1S8KBM2+Web6YxL5zRaBNv15fboj+uoiIiMiRgbVh737Ujy/G7kl91Byr+uZLGJaWplpC/z4Yph5SVxKh8GgVct1TQ5XHvLEinLVUwRcKjnH4d6GkRQQ2o+rpmonCDKD85BG9LQW2YoEImaVjR6kZhnTMLTJVJd2jRPALoOmcaiey7Ziu8nXZ1B6vQmXGVGsFNebrIiIiIkcG1sxJ98Xt3veMGIATh7djwTHZOouy16txImsQPNpS6joiILa44Blu6mbXuuhNwTGO2maR/jJGhUKh7ILXutIvnFGVTFlF3SnCaHGbu8tb33Y8V/e6rI6gXAumt6m2dOWvi4iIqCdIzpOuRuahvnQcsM2LwSXbsW/odNTPGaEWUteT3e+z0avUK/66kN3nQF1zk1qWAP8ase5suBty4Jfd5y0BaMOVZZUU+VgVMaY1UsUWL8pT81FoGSMqxNq2pgoFYplc3qvUPJzAEP91lfuMdWfjHl/011vr24Vye3W1Da+LiIioJ0rOwHqsQgTVjzD4MTmGdRwGH96OwSsOokEtpo4lTyIygllkOAugZONSNI3Wx2uudjeh6QKQlZZgGGv0otdRF/xyrKccQ3ougMpUF7JkVdMXQGmeaVxpDHr1NAebzWNQpZjblm7DamOMqZw8LvE6zKE1/uuyjlMtRpZ/aZTQKqurARQOMVdXE39dREREPVUSBtazKDtwGsPGTESRdqGCEVglQuuUT6vh1YYIUEezn0QUPtPehfRU/ez78Nnvene6/QSjaLLTZNnRGjS1rvy+/ZHt/wDlWmg0grI8o1+1t4THoepn8gOl91uvTBB326ptoY2BNbT1dd2G3Az10ESrrtqrvgm+LiIiop4sOSuswonmZvVIOHYKu9Ebg/urNnWRdHgyXaj0e0OVST2k5cBj7gKXl5+S4cx+9QAtJFahIBTUTBVJ9zxLSJZVzELt0lHi8Sy9YhkOq1EuCRVv21HoQwqM407wdRn8a1DQaB7zKqn9jbZVUhN4XZrjK3F7yjVImbASNWoWERFRT+HM67DKLv9tp1UjLHyt1WosKDkoQqqhNxY8dp+quHYOXoc1Nj04qmRnu5apxrhearRlsovcdD3UyGuVGuTzdiE9FE6t64VY9hFn23Jsq69KNYSMYmtgFGK/Lvu+rddolbRrsV6I9nrt7K/LUIOVE27GogPF2B5ch2lqLhERUU/gzMCaBBhYqbPtKLoG02uX48R7CzFUzSMiIuoJknZIAFGPsfNRpKQwrBIRUc/FwErkdPnrEAx+hiDDKhER9VAMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaAysRERERORoDKxERERE5GgMrERERETkaI6+01XD3lfhOXzJdEvWMN8GL4rrVOO6EfAtGodM1ewMvNNVbJZbmEa5TWl81tuc5rqX4U1P+J788bZtXaYL3X7VfutVg7oFq3br1EY1z8R+a1j9efJe/9Zbp0aub32OZbn9lrSt3BbWvu3Yt6slIiLqpmRgbatAICD++1kHTmeC3uXrgv9nT2Vw/j+uC87/pXV5/Z6fBDOXVwbrVXvfK+uCma/8yvKcjp7WrVsXdX5nTNXV1VHnO2I6tir4pWWrgrtUe9fmWcEveX8arDE/J+Z0KrjGOyt4975Tevv0T4N3L5sVnHdMLW9l2zX7FobXbXWy7cs+afteGFxz2ph3KDhPHss++3x9kscSOk77JI978yHV1vcbbtunth4XJ06cOHHi1P0nRw4JaNi7H/Xji7F7Uh81x6waXll1HR+uqHo8IzCs7hR8qk1dpQlrj1Yh1z01VPXMG5uP3JYq+KyFz+j8u1DSkoNCo6LqmonCDKD85BHRuMpt2wU+gM+8L5va41WozJhqqpDuQvr9m7B6uN5uE/c8U8U0HZ5MsdELZ1Cr5lilI72vehiNqz+y1EMiIqKewpGBNXPSfRFDAKx6Y3B/9VC6sQ8G4xIaPlFt6iIBNLW44BluhMAjmL9xJyrl/HNqVhy1zSJ5ZoyydPFrXeFauLu6bdtVHNopgmQ4/FodQbkfKB0b7pbPm2UdAtBxjqCy0fw6bfwfoDw1B55OORYiIiJnSMKTrkbAk3UJq3zVqi3Hsx7EbvWYnKAJa8tmo1epF/BswuYMoK65SS1LgBzTWTob7oYc+O+XVdQAjOHKrW270r9UW1ebtsjKbBSBrShvjFNd9e1Cuam6mqhyn9qvmO7xxXi9Yt8L/AEUjjaNYRVkONfX9UbZt/GaxeSriliXiIiou0vKqwR45ozDlLqDGFzi1SbfCNFGb2R2/HlgJMiTgIxgFhnOAijZuBRNozfhcok8OagJTReArLQYFUO7Ri96HXXBL9a9LE9MOhdAZapLdYPH33a2Z7k2X5+WofSC2FaU0Cqrq5WmSq6VrK6KQDkkXF1NRN4sY79yKkaWCM6RoVWvCsO9LOKkKfOx+/vsQq+yraYhA+mYW2RsexnSj8YJxERERN1Qkl7WagRWlRajXk2r+l9E/XVyWAB1Bms422Q6i9+F9FT9zP5wINO78tOvV804stNkWTEHm01n0GvDBPr2F+22bluNFbXTqqsuS3e/mVZdTc1H4VWdhX8bcjPUwxB19YOMYstVD6LJHp5jqyqb6a+r8uKVDNwlIiJKTkkaWE0+OYgpz/8Gd32jcy9rRdGoMOX3Yq3KU3oAtI25FKHxHlmdtVQRBfcoFKIKBaGqqLnameC2DUbXu61SqldXY3X3q/1dbZe7f4126SvLeFsVVs2Xq4rliirAOx9FSso1SCnaoWYQERF1HylBQT1OWIdfh/VYBQZvO60aYcb1WI3rs+oGwFuaB49qdRZehzU2y/VQ7dcclWRglSdMRVtmhDvVsl9zNPa25TjPpShp0RpC5LVS9eudBiLnK9r1Ti9EOybbfkOMfViPOZHrw0rGa4tYbgm29tcV6zqsO/BIygx4xy/HifcWYqiaS0RE1B04M7AmAQZWcpYarJxwMxZlb0OwbJqaR0RE1D0k/5AAoh6uZvkdSElhWCUiou6LgZUoyQ1d/A6Cwc8YVomIqNtiYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR3Psna58G7worlON60bAt2gcMlVT6urbs/boO11ptzitUo0ot0CNy3ar0Yj761tvc5rrXoY3PcY9+a9mXcG4JaxqRrttbLzbymq3bm1UjYh14+3bfutWIcq+LduPeG1EREQ9mAysbRUIBMR/P+u46Zc7g5mv/Eq1zwS9y9eZ2mr5P+4M7lPtfa+I5csrg/XG8k6Y1q1bF3V+Z0zV1dVR53fKdPqnwbuXLQyuOa23a/YtDH5p2argLvvzYky7Ns8KfmnzIdU+FJy3bFbw7n2nVPtUcI3X1Nb2NSs475i+rtxX+LmtrKuWG+vq7fBxRz5fHZv3p8Ea1TZP2us0LbO+jvjHre873nukrx/eHidOnDhx4sTJPDlzSMDIPNTPGaEaN8AztDdw/iIatPZZlB04jWFjbg1VVD2eERj26W/g+0TNoA5TcWgnKjOmhiqq2Z6pKEQVKv16O67AVpQ3ulA61qgc3oZCtwuVDR+gVjb9u1DSkoNCozLpmonCDKD85BGtme1Zbqpa3oZcsazyoqqG2tdV2zbWReAM6uBCeqgSnI70vuqhJI/tQmTVU3cE5f4ACkeHl+WNzUdu4weokI1WjrtVcv2+rKgSERHFkoRjWJtR/2lv3OW+QbWrseD5apzAJdSfUbOogzSh6QJQOMQIVrKLXu/qrmtu0mfFcy6AytQceIzQ6F+jd7+3BESYBGqbxeOMUcjTl2rd81oX+YUzeqBtTaoLWeqhlJ0mdmSs6xoFT2oVCsq2qnC8RmzbBc9wPWTWHhehWwRYX9ls9CrVp3t85tckwu716qHk6i/2FUCTOOSrPe6Kk1XI7XMG89V+5TQ/kT8AiIiIegjnB9ZPDmL+4UuYMt46hlWrtK7wYnDJQWBGMbwiqdSfPauWUUeT4y17lS6FL3MZ/LJKalQ6EyHHkspg5gM2lxSjUAW/EDlGVix3N+TAf38+clWgtVCBM1StdY9CYctOlIeC3hHMD42zldIxt2gT/JlVcIf2HR57WyePv1E8P28TLpeISewX/qUqOMpqbgAlh8IV04ottjGpUtzjFmFZ7lebFmNt6PXqfwRUiuCeK/crJ08OykXoDT+HiIioZ3N4YNWrpxgzHatGqlmaS1j1/HbUjy9GfWmxWHYWDeeBwTcYVVfqSOW+2Sjvs0wLV7KLXoa93D6hvvb4RKh0bwygUAtn85Bn76pv9KLXURf8crnsnteqstbKqRZ4RRgt9JhP9roNq7WgZ4TCD5Ar2ujbX3Xjy2qwCJMXp2rH7XcHtABprmTmuovD21Pd+kblOG+WCNby2FTorBwig3aixy2OTXu9avK4ULLRGkgLPeK9UI/hnorSVBHiz6k2ERFRD+fgwCrCaslB7M4ah92TzEE0DYOvA4ZZQqw+TGBwf9WkDqLGfWYUW87clxXCrDSjHcf1LuRqVxUwhTNTsNO68JGDzaZxpFp3eyh0CupMf7iXYbVbzTO454VDoQjDWWLdUJBW40w3q3GicjysrAyXH9WHCGSJ58WvEltD5+rrRdBuy3GbyWqwemi8pwkNqSAiIuqhHBpYw2E1fPKVQT8J68Th/ShTJ1k17P0Iu68bCE/HXmmLhLwhOVo1MVSZVEEw1xIe9Wqmtetb0MaRBlBSocaRyucdrUJu5ig92GlBrgoFW4yud3WykzFm1hRWLZerikK7PFVDDlZZnmceetAEX0M4VGYPF6+hcVf4eOVJWKYxrhbacVTBk6cCamvHbaMNJzCN5ZXvaaV4H7UTuKRo7+nxlbg95RqkTFiJGjWLiIiop3DkdVit11gNmzJDdv/rjy3PiXKd1o7G67Aa40NzsFl27auWzrhearRrtBrL9FbEtVJt1ywt9GwKVVIt1ykNMfZh3W6065harrEq2Z9jeV3WY7euG+01xz5u63aFVo8t2vZrsHLCzVh0oBjbg+swTc0lIiLqCRwZWJNBjw6s1CV2FF2D6bXLceK9hRiq5hEREfUESXhZK6IeZuejSElhWCUiop6LgZXI6fLXIRj8DEGGVSIi6qEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0Rx7pyvfBi+K61Qjxq1X9ef0xoLH7kNRJ994i7dmjX4L09a1fgtV/V79O1EZbZmg36I1cr8Rt14VLLdINRjHb96+sU+9BaTmw180E9mqKbV++1Qhyraj31I2xrERERFRBGdWWI9VoBjjUF9aLKbpWIBqeDZUq4VSNRaUeOFLG4Fhag51EhnsfAERFjfhcskm+EXgKtm4BhVqcWsqtoiw2leEObHu5ZJiFDZ6xfaa1FI9FPYSG/NkqBkW8n79s1HZJx+5ao5drnuZ2rY+RQZCsQ1x/IUZtoTtmok3Tett7rsT7i1H1EJBBFG33yVCqlqeUYWCsq2oVYt10bedNyu8XW26Xx6/C+nXqycQERFRXM4MrCPzUD9nhGrcAM/Q3sD5i2hQc3wbPsLgx4qxitWpTldxSFY+p4Yqm9meqShEFSr9ejsuEXbLZWV0rFE1vQ2FbhcqGz7Qg59YvuDiVFwumol0bblVxZZdSBdBefVwNeMKVGzxolwcf2EfNSOGrD7m0NmEtUerRBieGqqo5o0VobOlCiKfhiS67drj4v0yvYdEREQUX1KOYfXM6fwhACQ1oekCUDjECJyye1+ENPGorjlcJY3pXACVqTnwGEFNq1qKxNcSgDb6Q1Y5owwBMOTNasvQgyjE/goac7A5zj50TfA1BEyvM4CmFhc8w40YfQTzteEDYv45NSvhbR9BuQj34dBORERErXF+YP3kIOYfvoQp4yPHsFLXkeMye5UuhS9zGfyySnrROnY0LjmsoFSs7wM2y2EBMvi1YfV4Kv1LxXHJYxOTuUtfVUkLPVHGnRrk+FNt3aUoQT4KIyr4MqDL5V7xV5McFmAE9QS2rdT6dmlVWFZXiYiIEufwwFqNBc9XA2OmY9VINYu6XLlvNsr76GNF3/Sko06E1VxLF3ocLTvh3hhAoTaeUwS8wBnUyfGc7RDgsj3LTWNFl6H0gjcUWmt9Xm3sbNyTnNzzwuuPDsBdah6bG0DJxqVoGq0vX+3Wq81ZaemJbVsjq6vmyi0RERElwsGBVZ5YdRC7s8Zh96Qb1DzqWulI7yv+l1GsBVVdOLi16nqXdrJR6f2mSqQ2TMCFLNVsP+nwZBopWO/iR6MIsKr6qg1F0NqLsTZaddc9ylT5FYE6VT+hKxxK9WEC6dcnvm2tupoarXJLRERE8Tg0sIbDavjkK3KCvCE5Whibb5xk5d+FkpYcEeZUW2N0ndvCoGsUPKkBlFQYZ9frXem5maMsl49qF/IErlA1Mx1zi4zKqz7JYQzapadKoo+L1cOlMd5WD7+Vfm/o9YSXJ7ptVV0dbb1UVlgNVk64Bikpd2DlcTWLiIiINI68DmvD3lfhOXxJtcKmzCjWhgZEX96512PldViN67BGux6pDKzyWqvRrtFqLNNbsmoZrtbKy1bpJ3FZqGuaRrvOavg6sNbthuerpo22LXlFAuMkKctrElq7DmuU5YaIbQvatVgvxF5Hqll+B4YteQ/FOz7DC/lqJhERETn3xgFO16MDK3WMnY8iZVoNVlS/g4VXcekuIiKi7iYpL2tF1L3swCMp1zCsEhERxcDAStTlpuGF4GcIBhlWiYiIomFgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHY2AlIiIiIkdjYCUiIiIiR2NgJSIiIiJHc+ydrnwbvCiuU43rRsC3aBwyVRM4i7IV27HqU9XMGof6OSNUo3PwTlexWW+hGu3WrfFYb89qvXVra9u23Z5V3dLVIrAV92zciUqtYb19ayLHrd1itTHKbV8t2xXst2613Po1cn19u6ohFHo2YbVbNQzGPuLcFpaIiKg7cmaF9VgFiiFCaGmxmKZjAarh2VCtFgINe/dj39Dpavk4TKk7iCl7z6ql1KVEMHP7XSLsbcJlMW3OqEJB2VbUqsXxycDpRZ0IqXLdy/fni+0txXy/WtzKtiu2iLDaV4RUbXkxChu9uMfXpJYKMjRurILnfn39yyWm0Gjbtt8dsB23DNKzUdknH7lqTphYZtnuMpRiJxYY+5ZB0xcQIdXYNlCycQ0q9KVaUC4Q33h9XTF5clDuCy/XifemYieyMnJUm4iIqOdwZmAdmWeqmN4Az9DewPmLaFBzMifdh92TblCtEfBkASeam1Wbuo4IVUerkOueGqpM5o0VAa+lCiKvtc6/CyUtOSg0KqqumSjMAMpPHhGNVrYtQmG5rHyONSqqt6HQ7UJlwwcqdMr1ZWi0VUY1kdvO9kxFoem4K7bsQroInKuj3YkqcAZ1cCE9tN10pPdVD4WKQztRmTE1tF9t26hCpQridRcDyO1jOqjrXRGhuNbnRQnyUThEzSAiIupBOIaV2lEATS0ueIYbXfiy8ii7ycX8c2pWHLXNIh1mjAqFRq3yKLvJL5wRobOVbZ8LoDI1Bx5LxVRsryUgwqQQ+AA+sT6OL0av0tn6ZKv8ZqWFhx7Ibvv01PBx582KFnQV1yh4Uk3VXrFvOWxAP9YmNF2ACJpGkBbhuEwf8lDXrFdg84bkiPBqVJLF8goZcMPvgwzjC8Sy0jwOAyAiop7J+YH1k4OYf/gSpow3j2E1kcMH6npjgadzx7BSPDKUyVDoBTyy6z4czhIiu+5FoHQ35MB/v6yiqtCpaWXbsvtdhlEfsFkOC5CBVlZJZaBFFXyhrne92969RVZv0+HJdKH8aDjAahVNYyxsq9Ixt2gT/JlVcIf2HRlw5TjVXqVL4ctcBr+s/l5U5Vv3PHE8xYAvvNw89lZWaOEujh2YiYiIujmHB9ZqLHi+GhgzHatGqllmIsxO2XYaU2bch6KOPQeMTPTgFZ4s40RFQCzZuBRNo/XxmKvdeoXRWr2Mo9GLXkdd8MtQKU8s0iqnLmRpC1vZdosIoBsDKNQC6Tzk2bvqU/OxKnQClwiZo3NU9VZ20xfrAVa9pgWYilKx3/Tr9WfHp4do98Wp2nFp41/FNkJjb4VyEUbL++hjc+VJZJZhAFpA14ccyOCa5V8arv7Kau0F83ETERH1PA4OrCKslhzE7qxxpvGqJjKsxguz1GHyZumB0ZjCZ/HLbnT9zP7wGe56V34iwS87TQa4HGw2nQGvDRPo21+0W9m2Nu5Tnn1vOrPfHHblckul1k6vkoZfk4ihctuJVDXV2NvNqiqa7VmuVVD1iq0az5pRbHqfzEFbhF1t/KxRQb0Nq2VlWITvchF4K05W6UFcBWntSgOqbQ7ERERE3ZlDA2s4rEa9XJUprEYNs9RF9K71Sr8Xa1Vvd61vF8rNY0slo9veNoYU7lHayUgFWje9dESEtoAa/9nKtrVxpAGUVBjbVEEwc5QeftU40/JQNdi23EK/tBY8bbkclxp6oGmCr8EI2voYVVk5Dl/tQA+4uabLVoWGB0j+D1AuA7oI4vY/DuQVBLRLZonHlste7XwUKSnXIKVoh5pBRETUfTjyOqwNe1+F5/Al1QqbMqNYq6ZartEa0hsLHuu8oQG8DmtsluuZRrtmqAysMa8nqodF4zqs9uuRxt+27JoPX4fVfg1X+7Yty2W3fJzrpFqv0WoIPy9iuf0asJbt26/xaj2uaPsPkduRQyYi3rcdeCRlBrzjl+PEewsxVM0lIiLqDhx74wCnY2AlZ6nBygk3Y1H2NgTLpql5RERE3QMva0WU5GqW34GUFIZVIiLqvhhYiZLc0MXvIBj8jGGViIi6LQZWIiIiInI0BlYiIiIicjQGViIiIiJyNAZWIiIiInI0BlYiIiIicjQGViIiIiJyNAZWIiIiInI0x97pynL71etGwLdoHDJVE8cqMHjbadUQssahfs4I1egcvNNVbNbblNpvQ9qaOLdPtd161X7704ots1HQqBqC/bauIcZtUuPePtV+e9T4+9YYt5xt5bgMMY+PiIiILJxZYRWBtBgihJYWi2k6FqAang3VaqEwMk8tU8vPH8SUvWfVQupSIvS5/S4RUjfhspg2Z1ShoGwratXi+GQo9KJOhFS57uX788X2lmK+X19a6/PCl6mWlRSjsNGLe3xN+kIhb5a+T2PdOhGc19pv/y8DsS+Awgzbjfpl2BTzS+/X1/eLIFmycQ0q1OLW9i1Dei/xZE+GmmFiOS51bLkiEKdfr55AREREcTkzsMpAGqqY3gDP0N7A+YtoUHOsbkBmP/WQupgInEerkOueGqqo5o0V4aylCiILts6/CyUtOSg0KqqumSJYAuUnj2jNbM9yU7X1NuSKZZUXY2zY1R9Z6qFZxRYvyjOmorCPmqFUHJKV0amhimq2RzwHVahUYTnuvkXYXXBxKi4XzYTxjHhqj4vtmvZFRERE8XWDMazV8NX1xl3uG1Sbuk4ATS0ueIYbse0I5ssucjn/nJoVR22zCIAZo0JhV1Ytta70C2cSrNCa+D9AeWoOPOZQ6F8jtpeDzfaufBG0my4AhUOM+XqlVw5LqGsOV1FjEsH6zYhtxnIE5SIEl45N9PlERETk/MD6yUHMP3wJU8abxrAKDXtfxeASr5gOYnfWLSjq2CG11CYy8M1Gr1Iv4JHDAhIMfgY5lrR0NtwNOfDL7vOWAIzhzCFa+HTZgp+xXzH5qlA4eiay1RKj+lvoiT+eVo437VW6VOv+97td0Su4UfedmFrfLq3Cy+oqERFR4hweWKux4PlqYMx0rBqpZimZk+4LjWP1pX2EwSsOxhgyQO1ND3XhyTyWU1ZZSzYuRdNofbzmardevcxKS6SzXGj0otdRF/xyrGeRCJznAqhMdVm797XxpjJ8mk+KktIxt8gYK7oM6UfDxybHoJb0LY57klO5bzbK++jjVGX3f50Iq7l9bMky5r4TIaurAVMll4iIiBLh4MAqwqpWPR2H3ZPid/dnugdi2KcXUa/a1LHsJxGFx3a6kJ6qn9kfDob6MIFETjDKTpMJMAebZVDVZ+nDBPr2D1dK1Zn4sOwjmnR4Mo0KaRN8DeL/MgyrkK1dxUBryxOz0pHeVzwto9hyRYKIoJ3wvqPTqqup+Si8gnWJiIh6MocG1nBYTeRyVT5fNU5kDYJHtamrqJDo94bOztdDmm0sqQx+Mjjarx7gHqWd6FSwRT/JKqIiaQqM4WAZi3ldc+VVn2R3v3ZpqhK9Upo3JEcLsMYVCYwTwHKNcNmmfUejjscyTMGsBisnXIOUlDuw8riaRURERBpHXodVjk/1HL6kWmFTZhRrQwMilvM6rI5iuQ5raj78poqpRoW/ymjLRLAzX4fVfK3S6NczNa6XKsevmq6TKsS7zql2jPLM/pjXYbVePzb+vq3HHGK6Hqu2/oVorzesZvkdGLbkPRTv+Awv5KuZRERE5NwbBzgdAyu1u52PImVaDVZUv4OFw9U8IiIi6g6XtSJKdjvwSMo1DKtEREQxMLASdblpeCH4GYJBhlUiIqJoGFiJiIiIyNEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEYWImIiIjI0RhYiYiIiMjRGFiJiIiIyNEce6cr3wYviutU47oR8C0ah0zVDDuLshXbserT3ljw2H0o6sSbb/FOV7FZbs1qu8Vp66y3Oc213bs//rbtt0g1L7ffutW4rapqGreLVc1ot5S1357VeuvXeMcd5datUW9LS0RERFHJwNpWgUBA/Pezjpt+uTOY+cqvVPtM0Lt8nakdnur3/ETM3xmc/48/CXrPWJd19LRu3bqo8ztjqq6ujjrfEdOxVcEvLVsV3KXauzbPCn7J+9Ngjfk5MadTwTXeWcG7953S26d/Grx72azgvGNqedxt6+uGnmss33wo1LZMtm3ZJ/u6NfsWWrdlWb+V4w4eCs6Lsy9OnDhx4sSJU/zJmUMCRuahfs4I1bgBnqG9gfMX0aDmaD45iPmHgQWeQWoGdb0mrD1ahVz31FDVM29sPnJbquAziqLx+HehpCUHhUZl0jUThRlA+ckjotHatgNoanEh/XptkSarj1E+jeJ6F3LVw2js69ZdDCDXPM+8ftzjJiIioquVpGNYz6Ls9WpgzMROHQZArdFDo2e4qStc62YX88+pWXHUNovkmTEqFEhl97/WBX/hDGpb3fZtyM0IoGTjGlTIZmArFvgDKBxym2xFqD1ehUrTvqya4Guwrps3JAeV/qWY75ctEZ4rxL7V+vGPm4iIiK6W8wOrVkm9hCnjTWNYj72PVRiB1ZNuUDPIWeR40dnoVeoFPJuwOQOoa25SyxLgXyPWnQ13Qw7898sqagDGcOZ4286btQmXPUCBWLfXxip47jePMZXkWFK5rti2CJ6lY21hVu23V+lSlCAfheZ13fNwuaQY8OnLfZnLcHlW9PWjH3eVflzatBhrE6k4ExERkQD8/+3lMs0z1zWgAAAAAElFTkSuQmCC" + } + }, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![image.png](attachment:image.png)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's now try to query from Azure AI Search!\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "await search_memory_examples(kernel)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We have laid the foundation which will allow us to store an arbitrary amount of data in an external Vector Store above and beyond what could fit in memory at the expense of a little more latency.\n" ] } ], @@ -416,7 +537,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/python/notebooks/07-hugging-face-for-plugins.ipynb b/python/notebooks/07-hugging-face-for-plugins.ipynb new file mode 100644 index 000000000000..6bc97e9c226a --- /dev/null +++ b/python/notebooks/07-hugging-face-for-plugins.ipynb @@ -0,0 +1,171 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "68e1c158", + "metadata": {}, + "source": [ + "# Using Hugging Face With Plugins\n", + "\n", + "In this notebook, we demonstrate using Hugging Face models for Plugins using both SemanticMemory and text completions. \n", + "\n", + "SK supports downloading models from the Hugging Face that can perform the following tasks: text-generation, text2text-generation, summarization, and sentence-similarity. You can search for models by task at https://huggingface.co/models." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a77bdf89", + "metadata": {}, + "outputs": [], + "source": [ + "!python -m pip install semantic-kernel==0.5.1.dev0\n", + "\n", + "# Note that additional dependencies are required for the Hugging Face connectors:\n", + "!python -m pip install torch==2.0.0\n", + "!python -m pip install transformers==^4.28.1\n", + "!python -m pip install sentence-transformers==^2.2.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "508ad44f", + "metadata": {}, + "outputs": [], + "source": [ + "import semantic_kernel as sk\n", + "import semantic_kernel.connectors.ai.hugging_face as sk_hf" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "d8ddffc1", + "metadata": {}, + "source": [ + "First, we will create a kernel and add both text completion and embedding services. \n", + "\n", + "For text completion, we are choosing GPT2. This is a text-generation model. (Note: text-generation will repeat the input in the output, text2text-generation will not.)\n", + "For embeddings, we are using sentence-transformers/all-MiniLM-L6-v2. Vectors generated for this model are of length 384 (compared to a length of 1536 from OpenAI ADA).\n", + "\n", + "The following step may take a few minutes when run for the first time as the models will be downloaded to your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f8dcbc6", + "metadata": {}, + "outputs": [], + "source": [ + "kernel = sk.Kernel()\n", + "\n", + "# Configure LLM service\n", + "kernel.add_text_completion_service(\n", + " service_id=\"gpt2\",\n", + " service=sk_hf.HuggingFaceTextCompletion(ai_model_id=\"gpt2\", task=\"text-generation\"),\n", + ")\n", + "kernel.add_text_embedding_generation_service(\n", + " service_id=\"sentence-transformers/all-MiniLM-L6-v2\",\n", + " service=sk_hf.HuggingFaceTextEmbedding(ai_model_id=\"sentence-transformers/all-MiniLM-L6-v2\"),\n", + ")\n", + "kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore())\n", + "kernel.import_plugin(sk.core_plugins.TextMemoryPlugin())" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "2a7e7ca4", + "metadata": {}, + "source": [ + "### Add Memories and Define a plugin to use them\n", + "\n", + "Most models available on huggingface.co are not as powerful as OpenAI GPT-3+. Your plugins will likely need to be simpler to accommodate this." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d096504c", + "metadata": {}, + "outputs": [], + "source": [ + "await kernel.memory.save_information(collection=\"animal-facts\", id=\"info1\", text=\"Sharks are fish.\")\n", + "await kernel.memory.save_information(collection=\"animal-facts\", id=\"info2\", text=\"Whales are mammals.\")\n", + "await kernel.memory.save_information(collection=\"animal-facts\", id=\"info3\", text=\"Penguins are birds.\")\n", + "await kernel.memory.save_information(collection=\"animal-facts\", id=\"info4\", text=\"Dolphins are mammals.\")\n", + "await kernel.memory.save_information(collection=\"animal-facts\", id=\"info5\", text=\"Flies are insects.\")\n", + "\n", + "# Define semantic function using SK prompt template language\n", + "my_prompt = \"\"\"I know these animal facts: {{recall $query1}} {{recall $query2}} {{recall $query3}} and \"\"\"\n", + "\n", + "# Create the semantic function\n", + "my_function = kernel.create_semantic_function(prompt_template=my_prompt, max_tokens=45, temperature=0.5, top_p=0.5)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "2calf857", + "metadata": {}, + "source": [ + "Let's now see what the completion looks like! Remember, \"gpt2\" is nowhere near as large as ChatGPT, so expect a much simpler answer." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "628c843e", + "metadata": {}, + "outputs": [], + "source": [ + "context = kernel.create_new_context()\n", + "context[sk.core_plugins.TextMemoryPlugin.COLLECTION_PARAM] = \"animal-facts\"\n", + "context[sk.core_plugins.TextMemoryPlugin.RELEVANCE_PARAM] = \"0.3\"\n", + "\n", + "context[\"query1\"] = \"animal that swims\"\n", + "context[\"query2\"] = \"animal that flies\"\n", + "context[\"query3\"] = \"penguins are?\"\n", + "output = await kernel.run(my_function, input_vars=context.variables)\n", + "\n", + "output = str(output).strip()\n", + "\n", + "query_result1 = await kernel.memory.search(\n", + " collection=\"animal-facts\", query=context[\"query1\"], limit=1, min_relevance_score=0.3\n", + ")\n", + "query_result2 = await kernel.memory.search(\n", + " collection=\"animal-facts\", query=context[\"query2\"], limit=1, min_relevance_score=0.3\n", + ")\n", + "query_result3 = await kernel.memory.search(\n", + " collection=\"animal-facts\", query=context[\"query3\"], limit=1, min_relevance_score=0.3\n", + ")\n", + "\n", + "print(f\"gpt2 completed prompt with: '{output}'\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/python/notebooks/07-hugging-face-for-skills.ipynb b/python/notebooks/07-hugging-face-for-skills.ipynb deleted file mode 100644 index 057d44216944..000000000000 --- a/python/notebooks/07-hugging-face-for-skills.ipynb +++ /dev/null @@ -1,183 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "68e1c158", - "metadata": {}, - "source": [ - "# Using Hugging Face With Skills\n", - "\n", - "In this notebook, we demonstrate using Hugging Face models for Skills using both SemanticMemory and text completions. \n", - "\n", - "SK supports downloading models from the Hugging Face that can perform the following tasks: text-generation, text2text-generation, summarization, and sentence-similarity. You can search for models by task at https://huggingface.co/models." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a77bdf89", - "metadata": {}, - "outputs": [], - "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0\n", - "\n", - "# Note that additional dependencies are required for the Hugging Face connectors:\n", - "!python -m pip install torch==2.0.0\n", - "!python -m pip install transformers==^4.28.1\n", - "!python -m pip install sentence-transformers==^2.2.2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "508ad44f", - "metadata": {}, - "outputs": [], - "source": [ - "import semantic_kernel as sk\n", - "import semantic_kernel.connectors.ai.hugging_face as sk_hf" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "d8ddffc1", - "metadata": {}, - "source": [ - "First, we will create a kernel and add both text completion and embedding services. \n", - "\n", - "For text completion, we are choosing GPT2. This is a text-generation model. (Note: text-generation will repeat the input in the output, text2text-generation will not.)\n", - "For embeddings, we are using sentence-transformers/all-MiniLM-L6-v2. Vectors generated for this model are of length 384 (compared to a length of 1536 from OpenAI ADA).\n", - "\n", - "The following step may take a few minutes when run for the first time as the models will be downloaded to your local machine." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8f8dcbc6", - "metadata": {}, - "outputs": [], - "source": [ - "kernel = sk.Kernel()\n", - "\n", - "# Configure LLM service\n", - "kernel.add_text_completion_service(\n", - " \"gpt2\", sk_hf.HuggingFaceTextCompletion(\"gpt2\", task=\"text-generation\")\n", - ")\n", - "kernel.add_text_embedding_generation_service(\n", - " \"sentence-transformers/all-MiniLM-L6-v2\",\n", - " sk_hf.HuggingFaceTextEmbedding(\"sentence-transformers/all-MiniLM-L6-v2\"),\n", - ")\n", - "kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore())\n", - "kernel.import_skill(sk.core_skills.TextMemorySkill())" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "2a7e7ca4", - "metadata": {}, - "source": [ - "### Add Memories and Define a skill to use them\n", - "\n", - "Most models available on huggingface.co are not as powerful as OpenAI GPT-3+. Your skills will likely need to be simpler to accommodate this." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d096504c", - "metadata": {}, - "outputs": [], - "source": [ - "await kernel.memory.save_information_async(\n", - " \"animal-facts\", id=\"info1\", text=\"Sharks are fish.\"\n", - ")\n", - "await kernel.memory.save_information_async(\n", - " \"animal-facts\", id=\"info2\", text=\"Whales are mammals.\"\n", - ")\n", - "await kernel.memory.save_information_async(\n", - " \"animal-facts\", id=\"info3\", text=\"Penguins are birds.\"\n", - ")\n", - "await kernel.memory.save_information_async(\n", - " \"animal-facts\", id=\"info4\", text=\"Dolphins are mammals.\"\n", - ")\n", - "await kernel.memory.save_information_async(\n", - " \"animal-facts\", id=\"info5\", text=\"Flies are insects.\"\n", - ")\n", - "\n", - "# Define semantic function using SK prompt template language\n", - "my_prompt = \"\"\"I know these animal facts: {{recall $query1}} {{recall $query2}} {{recall $query3}} and \"\"\"\n", - "\n", - "# Create the semantic function\n", - "my_function = kernel.create_semantic_function(\n", - " my_prompt, max_tokens=45, temperature=0.5, top_p=0.5\n", - ")\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "2calf857", - "metadata": {}, - "source": [ - "Let's now see what the completion looks like! Remember, \"gpt2\" is nowhere near as large as ChatGPT, so expect a much simpler answer." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "628c843e", - "metadata": {}, - "outputs": [], - "source": [ - "context = kernel.create_new_context()\n", - "context[sk.core_skills.TextMemorySkill.COLLECTION_PARAM] = \"animal-facts\"\n", - "context[sk.core_skills.TextMemorySkill.RELEVANCE_PARAM] = 0.3\n", - "\n", - "context[\"query1\"] = \"animal that swims\"\n", - "context[\"query2\"] = \"animal that flies\"\n", - "context[\"query3\"] = \"penguins are?\"\n", - "output = await kernel.run_async(my_function, input_vars=context.variables)\n", - "\n", - "output = str(output).strip()\n", - "\n", - "\n", - "query_result1 = await kernel.memory.search_async(\n", - " \"animal-facts\", context[\"query1\"], limit=1, min_relevance_score=0.3\n", - ")\n", - "query_result2 = await kernel.memory.search_async(\n", - " \"animal-facts\", context[\"query2\"], limit=1, min_relevance_score=0.3\n", - ")\n", - "query_result3 = await kernel.memory.search_async(\n", - " \"animal-facts\", context[\"query3\"], limit=1, min_relevance_score=0.3\n", - ")\n", - "\n", - "print(f\"gpt2 completed prompt with: '{output}'\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/python/notebooks/08-native-function-inline.ipynb b/python/notebooks/08-native-function-inline.ipynb index 484f43a9a27a..afaa72e2bcff 100644 --- a/python/notebooks/08-native-function-inline.ipynb +++ b/python/notebooks/08-native-function-inline.ipynb @@ -6,7 +6,7 @@ "id": "3c93ac5b", "metadata": {}, "source": [ - "# Running Native Functions" + "# Running Native Functions\n" ] }, { @@ -21,13 +21,13 @@ "\n", "This can be useful in a few scenarios:\n", "\n", - "* Writing logic around how to run a prompt that changes the prompt's outcome.\n", - "* Using external data sources to gather data to concatenate into your prompt.\n", - "* Validating user input data prior to sending it to the LLM prompt.\n", + "- Writing logic around how to run a prompt that changes the prompt's outcome.\n", + "- Using external data sources to gather data to concatenate into your prompt.\n", + "- Validating user input data prior to sending it to the LLM prompt.\n", "\n", "Native functions are defined using standard Python code. The structure is simple, but not well documented at this point.\n", "\n", - "The following examples are intended to help guide new users towards successful native & semantic function use with the SK Python framework." + "The following examples are intended to help guide new users towards successful native & semantic function use with the SK Python framework.\n" ] }, { @@ -36,7 +36,7 @@ "id": "d90b0c13", "metadata": {}, "source": [ - "Prepare a semantic kernel instance first, loading also the AI service settings defined in the [Setup notebook](00-getting-started.ipynb):" + "Prepare a semantic kernel instance first, loading also the AI service settings defined in the [Setup notebook](00-getting-started.ipynb):\n" ] }, { @@ -46,7 +46,7 @@ "metadata": {}, "outputs": [], "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" + "!python -m pip install semantic-kernel==0.5.1.dev0" ] }, { @@ -56,23 +56,27 @@ "metadata": {}, "outputs": [], "source": [ - "import os\n", - "import sys\n", "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " OpenAIChatCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", - "\n", "useAzureOpenAI = False\n", "\n", "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat_completion\", AzureChatCompletion(deployment, endpoint, api_key))\n", + " azure_chat_service = AzureChatCompletion(\n", + " deployment_name=\"turbo\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your chat model\n", + " kernel.add_chat_service(\"chat_completion\", azure_chat_service)\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))" + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + " kernel.add_chat_service(\"chat-gpt\", oai_chat_service)" ] }, { @@ -81,7 +85,7 @@ "id": "186767f8", "metadata": {}, "source": [ - "Let's create a **native** function that gives us a random number between 3 and a user input as the upper limit. We'll use this number to create 3-x paragraphs of text when passed to a semantic function." + "Let's create a **native** function that gives us a random number between 3 and a user input as the upper limit. We'll use this number to create 3-x paragraphs of text when passed to a semantic function.\n" ] }, { @@ -90,7 +94,7 @@ "id": "589733c5", "metadata": {}, "source": [ - "First, let's create our native function." + "First, let's create our native function.\n" ] }, { @@ -101,16 +105,17 @@ "outputs": [], "source": [ "import random\n", - "from semantic_kernel.skill_definition import sk_function\n", + "from semantic_kernel.plugin_definition import kernel_function\n", + "\n", "\n", - "class GenerateNumberSkill:\n", + "class GenerateNumberPlugin:\n", " \"\"\"\n", " Description: Generate a number between 3-x.\n", " \"\"\"\n", "\n", - " @sk_function(\n", + " @kernel_function(\n", " description=\"Generate a random number between 3-x\",\n", - " name=\"GenerateNumberThreeOrHigher\"\n", + " name=\"GenerateNumberThreeOrHigher\",\n", " )\n", " def generate_number_three_or_higher(self, input: str) -> str:\n", " \"\"\"\n", @@ -123,7 +128,7 @@ " int value\n", " \"\"\"\n", " try:\n", - " return str(random.randint(3, int(input))) \n", + " return str(random.randint(3, int(input)))\n", " except ValueError as e:\n", " print(f\"Invalid input {input}\")\n", " raise e" @@ -135,7 +140,7 @@ "id": "f26b90c4", "metadata": {}, "source": [ - "Next, let's create a semantic function that accepts a number as `{{$input}}` and generates that number of paragraphs about two Corgis on an adventure. `$input` is a default variable semantic functions can use. " + "Next, let's create a semantic function that accepts a number as `{{$input}}` and generates that number of paragraphs about two Corgis on an adventure. `$input` is a default variable semantic functions can use.\n" ] }, { @@ -154,15 +159,17 @@ "- Be exactly {{$input}} paragraphs long\n", "\"\"\"\n", "\n", - "corgi_story = kernel.create_semantic_function(prompt_template=sk_prompt,\n", - " function_name=\"CorgiStory\",\n", - " skill_name=\"CorgiSkill\",\n", - " description=\"Write a short story about two Corgis on an adventure\",\n", - " max_tokens=500,\n", - " temperature=0.5,\n", - " top_p=0.5)\n", + "corgi_story = kernel.create_semantic_function(\n", + " prompt_template=sk_prompt,\n", + " function_name=\"CorgiStory\",\n", + " plugin_name=\"CorgiPlugin\",\n", + " description=\"Write a short story about two Corgis on an adventure\",\n", + " max_tokens=500,\n", + " temperature=0.5,\n", + " top_p=0.5,\n", + ")\n", "\n", - "generate_number_skill = kernel.import_skill(GenerateNumberSkill())" + "generate_number_plugin = kernel.import_plugin(GenerateNumberPlugin(), \"GenerateNumberPlugin\")" ] }, { @@ -173,8 +180,8 @@ "outputs": [], "source": [ "# Run the number generator\n", - "generate_number_three_or_higher = generate_number_skill[\"GenerateNumberThreeOrHigher\"]\n", - "number_result = generate_number_three_or_higher(6)\n", + "generate_number_three_or_higher = generate_number_plugin[\"GenerateNumberThreeOrHigher\"]\n", + "number_result = await generate_number_three_or_higher(6)\n", "print(number_result)" ] }, @@ -185,7 +192,7 @@ "metadata": {}, "outputs": [], "source": [ - "story = await corgi_story.invoke_async(input=number_result.result)" + "story = await corgi_story.invoke(input=number_result.result)" ] }, { @@ -208,37 +215,41 @@ "source": [ "## Context Variables\n", "\n", - "That works! But let's expand on our example to make it more generic. \n", + "That works! But let's expand on our example to make it more generic.\n", "\n", "For the native function, we'll introduce the lower limit variable. This means that a user will input two numbers and the number generator function will pick a number between the first and second input.\n", "\n", - "We'll make use of the `semantic_kernel.ContextVariables` class to do hold these variables." + "We'll make use of the `semantic_kernel.ContextVariables` class to do hold these variables.\n" ] }, { "cell_type": "code", "execution_count": null, - "id": "fa5087bd", + "id": "d54983d8", "metadata": {}, "outputs": [], "source": [ - "import os\n", - "import sys\n", "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " OpenAIChatCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", - "\n", "useAzureOpenAI = False\n", "\n", "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat_completion\", AzureChatCompletion(deployment, endpoint, api_key))\n", + " azure_chat_service = AzureChatCompletion(\n", + " deployment_name=\"turbo\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your chat model\n", + " kernel.add_chat_service(\"chat_completion\", azure_chat_service)\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))\n" + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + " kernel.add_chat_service(\"chat-gpt\", oai_chat_service)" ] }, { @@ -247,7 +258,7 @@ "id": "091f45e4", "metadata": {}, "source": [ - "Let's start with the native function. Notice that we're also adding `@sk_function_context_parameter` decorators to the function here to provide context about what variables need to be provided to the function, and any defaults for those inputs. Using the `@sk_function_context_parameter` decorator provides the name, description and default values for a function's inputs to the [planner.](./05-using-the-planner.ipynb)" + "Let's start with the native function. Notice that we're also adding `@kernel_function_context_parameter` decorators to the function here to provide context about what variables need to be provided to the function, and any defaults for those inputs. Using the `@kernel_function_context_parameter` decorator provides the name, description and default values for a function's inputs to the [planner.](./05-using-the-planner.ipynb)\n" ] }, { @@ -258,21 +269,22 @@ "outputs": [], "source": [ "import random\n", - "from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter\n", - "from semantic_kernel import SKContext\n", + "from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter\n", + "from semantic_kernel import KernelContext\n", + "\n", "\n", - "class GenerateNumberSkill:\n", + "class GenerateNumberPlugin:\n", " \"\"\"\n", " Description: Generate a number between a min and a max.\n", " \"\"\"\n", "\n", - " @sk_function(\n", + " @kernel_function(\n", " description=\"Generate a random number between min and max\",\n", - " name=\"GenerateNumber\"\n", + " name=\"GenerateNumber\",\n", " )\n", - " @sk_function_context_parameter(name=\"min\", description=\"Minimum number of paragraphs.\")\n", - " @sk_function_context_parameter(name=\"max\", description=\"Maximum number of paragraphs.\", default_value=10)\n", - " def generate_number(self, context: SKContext) -> str:\n", + " @kernel_function_context_parameter(name=\"min\", description=\"Minimum number of paragraphs.\")\n", + " @kernel_function_context_parameter(name=\"max\", description=\"Maximum number of paragraphs.\", default_value=\"10\")\n", + " def generate_number(self, context: KernelContext) -> str:\n", " \"\"\"\n", " Generate a number between min-max\n", " Example:\n", @@ -284,7 +296,7 @@ " int value\n", " \"\"\"\n", " try:\n", - " return str(random.randint(int(context[\"min\"]), int(context[\"max\"]))) \n", + " return str(random.randint(int(context[\"min\"]), int(context[\"max\"])))\n", " except ValueError as e:\n", " print(f\"Invalid input {context['min']} {context['max']}\")\n", " raise e" @@ -297,8 +309,8 @@ "metadata": {}, "outputs": [], "source": [ - "generate_number_skill = kernel.import_skill(GenerateNumberSkill())\n", - "generate_number = generate_number_skill[\"GenerateNumber\"]" + "generate_number_plugin = kernel.import_plugin(GenerateNumberPlugin(), \"GenerateNumberPlugin\")\n", + "generate_number = generate_number_plugin[\"GenerateNumber\"]" ] }, { @@ -307,7 +319,7 @@ "id": "6ad068d6", "metadata": {}, "source": [ - "Now let's also allow the semantic function to take in additional arguments. In this case, we're going to allow the our CorgiStory function to be written in a specified language. We'll need to provide a `paragraph_count` and a `language`." + "Now let's also allow the semantic function to take in additional arguments. In this case, we're going to allow the our CorgiStory function to be written in a specified language. We'll need to provide a `paragraph_count` and a `language`.\n" ] }, { @@ -327,13 +339,15 @@ "- Be written in this language: {{$language}}\n", "\"\"\"\n", "\n", - "corgi_story = kernel.create_semantic_function(prompt_template=sk_prompt,\n", - " function_name=\"CorgiStory\",\n", - " skill_name=\"CorgiSkill\",\n", - " description=\"Write a short story about two Corgis on an adventure\",\n", - " max_tokens=500,\n", - " temperature=0.5,\n", - " top_p=0.5)" + "corgi_story = kernel.create_semantic_function(\n", + " prompt_template=sk_prompt,\n", + " function_name=\"CorgiStory\",\n", + " plugin_name=\"CorgiPlugin\",\n", + " description=\"Write a short story about two Corgis on an adventure\",\n", + " max_tokens=500,\n", + " temperature=0.5,\n", + " top_p=0.5,\n", + ")" ] }, { @@ -342,7 +356,7 @@ "id": "fdce1872", "metadata": {}, "source": [ - "Now we can call this using our `invoke` function by passing in our `context_variables` in the `variables` parameter. " + "Now we can call this using our `invoke` function by passing in our `context_variables` in the `variables` parameter.\n" ] }, { @@ -352,11 +366,7 @@ "metadata": {}, "outputs": [], "source": [ - "context_variables = sk.ContextVariables(variables={\n", - " \"min\": 1,\n", - " \"max\": 5,\n", - " \"language\": \"Spanish\"\n", - "})" + "context_variables = sk.ContextVariables(variables={\"min\": \"1\", \"max\": \"5\", \"language\": \"Spanish\"})" ] }, { @@ -365,7 +375,7 @@ "id": "c8778bad", "metadata": {}, "source": [ - "Let's add a paragraph count to our context variables " + "Let's add a paragraph count to our context variables\n" ] }, { @@ -375,7 +385,8 @@ "metadata": {}, "outputs": [], "source": [ - "context_variables['paragraph_count'] = generate_number.invoke(variables=context_variables).result" + "num = await generate_number.invoke(variables=context_variables)\n", + "context_variables[\"paragraph_count\"] = num.result" ] }, { @@ -386,7 +397,7 @@ "outputs": [], "source": [ "# Pass the output to the semantic story function\n", - "story = await corgi_story.invoke_async(variables=context_variables)" + "story = await corgi_story.invoke(variables=context_variables)" ] }, { @@ -398,8 +409,11 @@ }, "outputs": [], "source": [ - "print(\"Generating a corgi story exactly {} paragraphs long in {} language: \".format(context_variables[\"paragraph_count\"],\n", - " context_variables[\"language\"]))\n", + "print(\n", + " \"Generating a corgi story exactly {} paragraphs long in {} language: \".format(\n", + " context_variables[\"paragraph_count\"], context_variables[\"language\"]\n", + " )\n", + ")\n", "print(\"=====================================================\")\n", "print(story)" ] @@ -416,7 +430,7 @@ "\n", "We will make our CorgiStory semantic function call a native function `GenerateNames` which will return names for our Corgi characters.\n", "\n", - "We do this using the syntax `{{skill_name.function_name}}`. You can read more about our prompte templating syntax [here](../../../docs/PROMPT_TEMPLATE_LANGUAGE.md). " + "We do this using the syntax `{{plugin_name.function_name}}`. You can read more about our prompte templating syntax [here](../../../docs/PROMPT_TEMPLATE_LANGUAGE.md).\n" ] }, { @@ -427,36 +441,25 @@ "outputs": [], "source": [ "import random\n", - "from semantic_kernel.skill_definition import sk_function\n", + "from semantic_kernel.plugin_definition import kernel_function\n", "\n", "\n", - "class GenerateNamesSkill:\n", + "class GenerateNamesPlugin:\n", " \"\"\"\n", " Description: Generate character names.\n", " \"\"\"\n", "\n", " # The default function name will be the name of the function itself, however you can override this\n", - " # by setting the name= in the @sk_function decorator. In this case, we're using\n", + " # by setting the name= in the @kernel_function decorator. In this case, we're using\n", " # the same name as the function name for simplicity.\n", - " @sk_function(\n", - " description=\"Generate character names\",\n", - " name=\"generate_names\"\n", - " )\n", + " @kernel_function(description=\"Generate character names\", name=\"generate_names\")\n", " def generate_names(self) -> str:\n", " \"\"\"\n", " Generate two names.\n", " Returns:\n", " str\n", " \"\"\"\n", - " names = {\n", - " \"Hoagie\",\n", - " \"Hamilton\",\n", - " \"Bacon\",\n", - " \"Pizza\",\n", - " \"Boots\",\n", - " \"Shorts\",\n", - " \"Tuna\"\n", - " }\n", + " names = {\"Hoagie\", \"Hamilton\", \"Bacon\", \"Pizza\", \"Boots\", \"Shorts\", \"Tuna\"}\n", " first_name = random.choice(list(names))\n", " names.remove(first_name)\n", " second_name = random.choice(list(names))\n", @@ -470,8 +473,8 @@ "metadata": {}, "outputs": [], "source": [ - "generate_names_skill = kernel.import_skill(GenerateNamesSkill(), skill_name=\"GenerateNames\")\n", - "generate_names = generate_names_skill[\"generate_names\"]" + "generate_names_plugin = kernel.import_plugin(GenerateNamesPlugin(), plugin_name=\"GenerateNames\")\n", + "generate_names = generate_names_plugin[\"generate_names\"]" ] }, { @@ -493,22 +496,6 @@ "\"\"\"" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "73aca517", - "metadata": {}, - "outputs": [], - "source": [ - "corgi_story = kernel.create_semantic_function(prompt_template=sk_prompt,\n", - " function_name=\"CorgiStory\",\n", - " skill_name=\"CorgiSkill\",\n", - " description=\"Write a short story about two Corgis on an adventure\",\n", - " max_tokens=500,\n", - " temperature=0.5,\n", - " top_p=0.5)" - ] - }, { "cell_type": "code", "execution_count": null, @@ -516,12 +503,9 @@ "metadata": {}, "outputs": [], "source": [ - "context_variables = sk.ContextVariables(variables={\n", - " \"min\": 1,\n", - " \"max\": 5,\n", - " \"language\": \"Spanish\"\n", - "})\n", - "context_variables['paragraph_count'] = generate_number.invoke(variables=context_variables).result" + "context_variables = sk.ContextVariables(variables={\"min\": \"1\", \"max\": \"5\", \"language\": \"Spanish\"})\n", + "num = await generate_number.invoke(variables=context_variables)\n", + "context_variables[\"paragraph_count\"] = str(num.result)" ] }, { @@ -532,7 +516,7 @@ "outputs": [], "source": [ "# Pass the output to the semantic story function\n", - "story = await corgi_story.invoke_async(variables=context_variables)" + "story = await corgi_story.invoke(variables=context_variables)" ] }, { @@ -542,8 +526,11 @@ "metadata": {}, "outputs": [], "source": [ - "print(\"Generating a corgi story exactly {} paragraphs long in {} language: \".format(context_variables[\"paragraph_count\"],\n", - " context_variables[\"language\"]))\n", + "print(\n", + " \"Generating a corgi story exactly {} paragraphs long in {} language: \".format(\n", + " context_variables[\"paragraph_count\"], context_variables[\"language\"]\n", + " )\n", + ")\n", "print(\"=====================================================\")\n", "print(story)" ] @@ -560,7 +547,7 @@ "\n", "- We've learned how to create native and semantic functions and register them to the kernel\n", "- We've seen how we can use context variables to pass in more custom variables into our prompt\n", - "- We've seen how we can call native functions within semantic function prompts. \n" + "- We've seen how we can call native functions within semantic function prompts.\n" ] } ], @@ -580,7 +567,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/python/notebooks/09-groundedness-checking.ipynb b/python/notebooks/09-groundedness-checking.ipynb index 8dd15ca7e453..f704be497b1d 100644 --- a/python/notebooks/09-groundedness-checking.ipynb +++ b/python/notebooks/09-groundedness-checking.ipynb @@ -5,7 +5,7 @@ "id": "f5c76c5f", "metadata": {}, "source": [ - "# Groundedness Checking Skills\n", + "# Groundedness Checking Plugins\n", "\n", "A well-known problem with large language models (LLMs) is that they make things up. These are sometimes called 'hallucinations' but a safer (and less anthropomorphic) term is 'ungrounded addition' - something in the text which cannot be firmly established. When attempting to establish whether or not something in an LLM response is 'true' we can either check for it in the supplied prompt (this is called 'narrow grounding') or use our general knowledge ('broad grounding'). Note that narrow grounding can lead to things being classified as 'true, but ungrounded.' For example \"I live in Switzerland\" is **not** _narrowly_ grounded in \"I live in Geneva\" even though it must be true (it **is** _broadly_ grounded).\n", "\n", @@ -75,6 +75,16 @@ "We prepare our kernel in the usual way:" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "7b22d324", + "metadata": {}, + "outputs": [], + "source": [ + "!python -m pip install semantic-kernel==0.5.1.dev0" + ] + }, { "cell_type": "code", "execution_count": null, @@ -83,19 +93,26 @@ "outputs": [], "source": [ "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureChatCompletion,\n", + " OpenAIChatCompletion,\n", + ")\n", "\n", "kernel = sk.Kernel()\n", "\n", - "useAzureOpenAI = True\n", + "useAzureOpenAI = False\n", "\n", "# Configure AI service used by the kernel\n", "if useAzureOpenAI:\n", " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat_completion\", AzureChatCompletion(deployment, endpoint, api_key))\n", + " azure_chat_service = AzureChatCompletion(\n", + " deployment_name=\"turbo\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your chat model\n", + " kernel.add_chat_service(\"chat_completion\", azure_chat_service)\n", "else:\n", " api_key, org_id = sk.openai_settings_from_dot_env()\n", - " kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))" + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + " kernel.add_chat_service(\"chat-gpt\", oai_chat_service)" ] }, { @@ -103,9 +120,9 @@ "id": "0c65f786", "metadata": {}, "source": [ - "## Import the Skills\n", + "## Import the Plugins\n", "\n", - "We are going to be using the grounding skill, to check its quality, and remove ungrounded additions:" + "We are going to be using the grounding plugin, to check its quality, and remove ungrounded additions:" ] }, { @@ -115,12 +132,10 @@ "metadata": {}, "outputs": [], "source": [ - "from semantic_kernel.core_skills.text_skill import TextSkill\n", + "# note: using plugins from the samples folder\n", + "plugins_directory = \"../../samples/plugins\"\n", "\n", - "# note: using skills from the samples folder\n", - "skills_directory = \"../../samples/skills\"\n", - "\n", - "groundingSemanticFunctions = kernel.import_semantic_skill_from_directory(skills_directory, \"GroundingSkill\")" + "groundingSemanticFunctions = kernel.import_semantic_plugin_from_directory(plugins_directory, \"GroundingPlugin\")" ] }, { @@ -185,7 +200,7 @@ "- A reference to Rome has been added\n", "\n", "\n", - "The grounding skill has three stages:\n", + "The grounding plugin has three stages:\n", "\n", "1. Extract entities from a summary text\n", "2. Perform a reference check against the grounding text\n", @@ -201,7 +216,7 @@ "source": [ "### Preparing the Context\n", "\n", - "Semantic functions operate in a context, which provides extra parameters for their operation. For the grounding skill, the context is expected to supply the topic for the reference checking, and some particular examples of them:" + "Semantic functions operate in a context, which provides extra parameters for their operation. For the grounding plugin, the context is expected to supply the topic for the reference checking, and some particular examples of them:" ] }, { @@ -233,7 +248,7 @@ "metadata": {}, "outputs": [], "source": [ - "extraction_result = entity_extraction(summary_text, context=context)\n", + "extraction_result = await entity_extraction(summary_text, context=context)\n", "\n", "print(extraction_result.result)" ] @@ -281,7 +296,7 @@ "metadata": {}, "outputs": [], "source": [ - "grounding_result = reference_check(extraction_result.result, context=context)\n", + "grounding_result = await reference_check(extraction_result.result, context=context)\n", "\n", "print(grounding_result.result)" ] @@ -321,18 +336,10 @@ "metadata": {}, "outputs": [], "source": [ - "excision_result = entity_excision(summary_text, context=context)\n", + "excision_result = await entity_excision(summary_text, context=context)\n", "\n", "print(excision_result.result)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "78ddf011", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -351,7 +358,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.3" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/python/notebooks/10-multiple-results-per-prompt.ipynb b/python/notebooks/10-multiple-results-per-prompt.ipynb index 9795fdb8c667..018c8892c169 100644 --- a/python/notebooks/10-multiple-results-per-prompt.ipynb +++ b/python/notebooks/10-multiple-results-per-prompt.ipynb @@ -1,326 +1,404 @@ { - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "68e1c158", - "metadata": {}, - "source": [ - "# Multiple Results" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "fb81bacd", - "metadata": {}, - "source": [ - "In this notebook we show how you can in a single request, have the LLM model return multiple results per prompt. This is useful for running experiments where you want to evaluate the robustness of your prompt and the parameters of your config against a particular large language model." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a77bdf89", - "metadata": {}, - "outputs": [], - "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "508ad44f", - "metadata": {}, - "outputs": [], - "source": [ - "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai import ChatRequestSettings, CompleteRequestSettings\n", - "from semantic_kernel.connectors.ai.open_ai import AzureTextCompletion, AzureChatCompletion, OpenAITextCompletion, OpenAIChatCompletion\n", - "from semantic_kernel.connectors.ai.hugging_face import HuggingFaceTextCompletion" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "d8ddffc1", - "metadata": {}, - "source": [ - "First, we will set up the text and chat services we will be submitting prompts to." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8f8dcbc6", - "metadata": {}, - "outputs": [], - "source": [ - "kernel = sk.Kernel()\n", - "\n", - "# Configure Azure LLM service\n", - "deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - "azure_text_service = AzureTextCompletion(deployment, endpoint, api_key)\n", - "azure_chat_service = AzureChatCompletion(deployment, endpoint, api_key)\n", - "\n", - "# Configure OpenAI service\n", - "api_key, org_id = sk.openai_settings_from_dot_env()\n", - "oai_text_service = OpenAITextCompletion(\"text-davinci-003\", api_key, org_id)\n", - "oai_chat_service = OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id)\n", - "\n", - "# Configure Hugging Face service\n", - "hf_text_service = HuggingFaceTextCompletion(\"distilgpt2\", task=\"text-generation\")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "50561d82", - "metadata": {}, - "source": [ - "Next, we'll set up the completion request settings for text completion services." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "628c843e", - "metadata": {}, - "outputs": [], - "source": [ - "request_settings = CompleteRequestSettings(\n", - " max_tokens=80,\n", - " temperature=0.7,\n", - " top_p=1,\n", - " frequency_penalty=0.5,\n", - " presence_penalty=0.5,\n", - " number_of_responses=3\n", - ")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "857a9c89", - "metadata": {}, - "source": [ - "## Multiple Open AI Text Completions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e2979db8", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"what is the purpose of a rubber duck?\"\n", - "results = await oai_text_service.complete_async(prompt, request_settings)\n", - "i = 1\n", - "for result in results:\n", - " print(f\"Result {i}: {result}\")\n", - " i += 1" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "4288d09f", - "metadata": {}, - "source": [ - "## Multiple Azure Open AI Text Completions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5319f14d", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"provide me a list of possible meanings for the acronym 'ORLD'\"\n", - "results = await azure_text_service.complete_async(prompt, request_settings)\n", - "i = 1\n", - "for result in results:\n", - " print(f\"Result {i}: {result}\")\n", - " i += 1" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "eb548f9c", - "metadata": {}, - "source": [ - "## Multiple Hugging Face Text Completions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9525e4f3", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"The purpose of a rubber duck is\"\n", - "results = await hf_text_service.complete_async(prompt, request_settings)\n", - "i = 1\n", - "for result in results:\n", - " print(f\"Result {i}: {result}\")\n", - " i += 1" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "da632e12", - "metadata": {}, - "source": [ - "Here, we're setting up the settings for Chat completions." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e5f11e46", - "metadata": {}, - "outputs": [], - "source": [ - "chat_request_settings = ChatRequestSettings(\n", - " max_tokens=80,\n", - " temperature=0.7,\n", - " top_p=1,\n", - " frequency_penalty=0.5,\n", - " presence_penalty=0.5,\n", - " number_of_responses=3\n", - ")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "d6bf238e", - "metadata": {}, - "source": [ - "## Multiple OpenAI Chat Completions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dabc6a4c", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"It's a beautiful day outside, birds are singing, flowers are blooming. On days like these, kids like you...\"\n", - "results = await oai_chat_service.complete_chat_async([(\"user\", prompt)], chat_request_settings)\n", - "i = 0\n", - "for result in results:\n", - " print(f\"Result {i}: {result}\")\n", - " i += 1" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "cdb8f740", - "metadata": {}, - "source": [ - "## Multiple Azure OpenAI Chat Completions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b74a64a9", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"Tomorow is going to be a great day, I can feel it. I'm going to wake up early, go for a run, and then...\"\n", - "results = await azure_chat_service.complete_chat_async([(\"user\", prompt)], chat_request_settings)\n", - "i = 0\n", - "for result in results:\n", - " print(f\"Result {i}: {result}\")\n", - " i += 1" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "98c8191d", - "metadata": {}, - "source": [ - "## Streaming Multiple Results\n", - "\n", - "Here is an example pattern if you want to stream your multiple results. Note that this is not supported for Hugging Face text completions at this time." - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "id": "26a37702", - "metadata": {}, - "outputs": [ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "68e1c158", + "metadata": {}, + "source": [ + "# Multiple Results\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "fb81bacd", + "metadata": {}, + "source": [ + "In this notebook we show how you can in a single request, have the LLM model return multiple results per prompt. This is useful for running experiments where you want to evaluate the robustness of your prompt and the parameters of your config against a particular large language model.\n" + ] + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "Result 1: \n", - "\n", - "A rubber duck is a toy shaped like a bath duck, often yellow with an orange bill and white eyes. It is often used as a bath time companion for children to play games with. Rubber ducks can also be used as decorations in the bathroom or around the house, and for other various games and activities.\n", - "Result 2: \n", - "\n", - "A rubber duck is a type of toy shaped like a duck, often used in bath time play. It can also be used as a decorative item or to relieve stress.\n", - "Result 3: \n", - "\n", - "A rubber duck is a toy shaped like a duck, often yellow and made of rubber. Its purpose is to provide children with entertainment and promote imaginative play.\n", - "----------------------------------------\n" - ] + "cell_type": "code", + "execution_count": null, + "id": "a77bdf89", + "metadata": {}, + "outputs": [], + "source": [ + "!python -m pip install semantic-kernel==0.5.1.dev0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3f4bfee4", + "metadata": {}, + "outputs": [], + "source": [ + "from services import Service\n", + "\n", + "# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n", + "selectedService = Service.AzureOpenAI" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "508ad44f", + "metadata": {}, + "outputs": [], + "source": [ + "import semantic_kernel as sk\n", + "\n", + "if selectedService == Service.OpenAI or selectedService == Service.AzureOpenAI:\n", + " from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import (\n", + " OpenAITextPromptExecutionSettings,\n", + " OpenAIChatPromptExecutionSettings,\n", + " )\n", + " from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import (\n", + " AzureChatPromptExecutionSettings,\n", + " )\n", + " from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureTextCompletion,\n", + " AzureChatCompletion,\n", + " OpenAITextCompletion,\n", + " OpenAIChatCompletion,\n", + " )\n", + "if selectedService == Service.HuggingFace:\n", + " from semantic_kernel.connectors.ai.hugging_face import HuggingFaceTextCompletion" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "d8ddffc1", + "metadata": {}, + "source": [ + "First, we will set up the text and chat services we will be submitting prompts to.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f8dcbc6", + "metadata": {}, + "outputs": [], + "source": [ + "kernel = sk.Kernel()\n", + "\n", + "# Configure Azure LLM service\n", + "if selectedService == Service.AzureOpenAI:\n", + " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", + " azure_text_service = AzureTextCompletion(\n", + " deployment_name=\"gpt-35-turbo-instruct\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your text model (e.g. gpt-35-turbo-instruct or text-davinci-003)\n", + " azure_chat_service = AzureChatCompletion(\n", + " deployment_name=\"gpt-35-turbo\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your chat model\n", + "\n", + "# Configure OpenAI service\n", + "if selectedService == Service.OpenAI:\n", + " api_key, org_id = sk.openai_settings_from_dot_env()\n", + " oai_text_service = OpenAITextCompletion(ai_model_id=\"gpt-3.5-turbo-instruct\", api_key=api_key, org_id=org_id)\n", + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + "\n", + "# Configure Hugging Face service\n", + "if selectedService == Service.HuggingFace:\n", + " hf_text_service = HuggingFaceTextCompletion(ai_model_id=\"distilgpt2\", task=\"text-generation\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "50561d82", + "metadata": {}, + "source": [ + "Next, we'll set up the completion request settings for text completion services.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "628c843e", + "metadata": {}, + "outputs": [], + "source": [ + "oai_text_prompt_execution_settings = OpenAITextPromptExecutionSettings(\n", + " extension_data={\n", + " \"max_tokens\": 80,\n", + " \"temperature\": 0.7,\n", + " \"top_p\": 1,\n", + " \"frequency_penalty\": 0.5,\n", + " \"presence_penalty\": 0.5,\n", + " \"number_of_responses\": 3,\n", + " }\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "857a9c89", + "metadata": {}, + "source": [ + "## Multiple Open AI Text Completions\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e2979db8", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.OpenAI:\n", + " prompt = \"what is the purpose of a rubber duck?\"\n", + " results = await oai_text_service.complete(prompt=prompt, settings=oai_text_prompt_execution_settings)\n", + " i = 1\n", + " for result in results:\n", + " print(f\"Result {i}: {result}\")\n", + " i += 1" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "4288d09f", + "metadata": {}, + "source": [ + "## Multiple Azure Open AI Text Completions\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5319f14d", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.AzureOpenAI:\n", + " prompt = \"provide me a list of possible meanings for the acronym 'ORLD'\"\n", + " results = await azure_text_service.complete(prompt=prompt, settings=oai_text_prompt_execution_settings)\n", + " i = 1\n", + " for result in results:\n", + " print(f\"Result {i}: {result}\")\n", + " i += 1" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "eb548f9c", + "metadata": {}, + "source": [ + "## Multiple Hugging Face Text Completions\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a148709", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.HuggingFace:\n", + " from semantic_kernel.connectors.ai.hugging_face.hf_prompt_execution_settings import (\n", + " HuggingFacePromptExecutionSettings,\n", + " )\n", + "\n", + " hf_prompt_execution_settings = HuggingFacePromptExecutionSettings(\n", + " extension_data={\"max_new_tokens\": 80, \"temperature\": 0.7, \"top_p\": 1}\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9525e4f3", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.HuggingFace:\n", + " prompt = \"The purpose of a rubber duck is\"\n", + " results = await hf_text_service.complete(prompt=prompt, prompt_execution_settings=hf_prompt_execution_settings)\n", + " print(\"\".join(results))" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "da632e12", + "metadata": {}, + "source": [ + "Here, we're setting up the settings for Chat completions.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5f11e46", + "metadata": {}, + "outputs": [], + "source": [ + "oai_chat_prompt_execution_settings = OpenAIChatPromptExecutionSettings(\n", + " max_tokens=80,\n", + " temperature=0.7,\n", + " top_p=1,\n", + " frequency_penalty=0.5,\n", + " presence_penalty=0.5,\n", + " number_of_responses=3,\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "d6bf238e", + "metadata": {}, + "source": [ + "## Multiple OpenAI Chat Completions\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dabc6a4c", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.OpenAI:\n", + " role = \"user\"\n", + " content = (\n", + " \"It's a beautiful day outside, birds are singing, flowers are blooming. On days like these, kids like you...\"\n", + " )\n", + " message = {\"role\": role, \"content\": content}\n", + " results = await oai_chat_service.complete_chat(messages=[message], settings=oai_chat_prompt_execution_settings)\n", + " i = 0\n", + " for result in results:\n", + " print(f\"Result {i}: {result[0]}\")\n", + " i += 1" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "cdb8f740", + "metadata": {}, + "source": [ + "## Multiple Azure OpenAI Chat Completions\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "66ba4767", + "metadata": {}, + "outputs": [], + "source": [ + "az_oai_prompt_execution_settings = AzureChatPromptExecutionSettings(\n", + " max_tokens=80,\n", + " temperature=0.7,\n", + " top_p=1,\n", + " frequency_penalty=0.5,\n", + " presence_penalty=0.5,\n", + " number_of_responses=3,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b74a64a9", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.AzureOpenAI:\n", + " role = \"user\"\n", + " content = \"Tomorow is going to be a great day, I can feel it. I'm going to wake up early, go for a run, and then...\"\n", + " message = {\"role\": role, \"content\": content}\n", + " results = await azure_chat_service.complete_chat(messages=[message], settings=az_oai_prompt_execution_settings)\n", + " i = 0\n", + " for result in results:\n", + " print(f\"Result {i}: {result[0]}\")\n", + " i += 1" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "98c8191d", + "metadata": {}, + "source": [ + "## Streaming Multiple Results\n", + "\n", + "Here is an example pattern if you want to stream your multiple results. Note that this is not supported for Hugging Face text completions at this time.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26a37702", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.OpenAI:\n", + " import os\n", + " from IPython.display import clear_output\n", + " import time\n", + "\n", + " # Determine the clear command based on OS\n", + " clear_command = \"cls\" if os.name == \"nt\" else \"clear\"\n", + "\n", + " prompt = \"what is the purpose of a rubber duck?\"\n", + " stream = oai_text_service.complete_stream(prompt=prompt, settings=oai_text_prompt_execution_settings)\n", + " number_of_responses = oai_text_prompt_execution_settings.number_of_responses\n", + " texts = [\"\"] * number_of_responses\n", + "\n", + " last_clear_time = time.time()\n", + " clear_interval = 0.5 # seconds\n", + "\n", + " # Note: there are some quirks with displaying the output, which sometimes flashes and disappears.\n", + " # This could be influenced by a few factors specific to Jupyter notebooks and asynchronous processing.\n", + " # The following code attempts to buffer the results to avoid the output flashing on/off the screen.\n", + "\n", + " async for results in stream:\n", + " current_time = time.time()\n", + "\n", + " # Update texts with new results\n", + " for idx, result in enumerate(results):\n", + " if idx < number_of_responses:\n", + " texts[idx] += result\n", + "\n", + " # Clear and display output at intervals\n", + " if current_time - last_clear_time > clear_interval:\n", + " clear_output(wait=True)\n", + " for idx, text in enumerate(texts):\n", + " print(f\"Result {idx + 1}: {text}\")\n", + " last_clear_time = current_time\n", + "\n", + " print(\"----------------------------------------\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" } - ], - "source": [ - "from IPython.display import clear_output\n", - "\n", - "if os.name == \"nt\":\n", - " clear = \"cls\"\n", - "else:\n", - " clear = \"clear\"\n", - "\n", - "prompt = \"what is the purpose of a rubber duck?\"\n", - "stream = oai_text_service.complete_stream_async(prompt, request_settings)\n", - "texts = [\"\"] * request_settings.number_of_responses\n", - "async for results in stream:\n", - " i = 1\n", - " clear_output(wait=True)\n", - " for result in results:\n", - " texts[i - 1] += result\n", - " print(f\"Result {i}: {texts[i - 1]}\")\n", - " i += 1\n", - "\n", - "print(\"----------------------------------------\")\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/python/notebooks/11-streaming-completions.ipynb b/python/notebooks/11-streaming-completions.ipynb index 8179e69e7e36..c089fe9864e6 100644 --- a/python/notebooks/11-streaming-completions.ipynb +++ b/python/notebooks/11-streaming-completions.ipynb @@ -1,249 +1,331 @@ { - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "68e1c158", - "metadata": {}, - "source": [ - "# Streaming Results" - ] + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "68e1c158", + "metadata": {}, + "source": [ + "# Streaming Results\n", + "\n", + "Here is an example pattern if you want to stream your multiple results. Note that this is not supported for Hugging Face text completions at this time.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a77bdf89", + "metadata": {}, + "outputs": [], + "source": [ + "!python -m pip install semantic-kernel==0.5.1.dev0" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from services import Service\n", + "\n", + "# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n", + "selectedService = Service.AzureOpenAI" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "508ad44f", + "metadata": {}, + "outputs": [], + "source": [ + "import semantic_kernel as sk\n", + "\n", + "if selectedService == Service.OpenAI or selectedService == Service.AzureOpenAI:\n", + " from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import (\n", + " OpenAITextPromptExecutionSettings,\n", + " OpenAIChatPromptExecutionSettings,\n", + " )\n", + " from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import (\n", + " AzureChatPromptExecutionSettings,\n", + " )\n", + " from semantic_kernel.connectors.ai.open_ai import (\n", + " AzureTextCompletion,\n", + " AzureChatCompletion,\n", + " OpenAITextCompletion,\n", + " OpenAIChatCompletion,\n", + " )\n", + "if selectedService == Service.HuggingFace:\n", + " from semantic_kernel.connectors.ai.hugging_face import HuggingFaceTextCompletion" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "d8ddffc1", + "metadata": {}, + "source": [ + "First, we will set up the text and chat services we will be submitting prompts to.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f8dcbc6", + "metadata": {}, + "outputs": [], + "source": [ + "kernel = sk.Kernel()\n", + "\n", + "# Configure Azure LLM service\n", + "if selectedService == Service.AzureOpenAI:\n", + " deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", + " azure_text_service = AzureTextCompletion(\n", + " deployment_name=\"text-davinci-003\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your text model (e.g. gpt-35-turbo-instruct or text-davinci-003)\n", + " azure_chat_service = AzureChatCompletion(\n", + " deployment_name=\"gpt-35-turbo\", endpoint=endpoint, api_key=api_key\n", + " ) # set the deployment name to the value of your chat model\n", + "\n", + "# Configure OpenAI service\n", + "if selectedService == Service.OpenAI:\n", + " api_key, org_id = sk.openai_settings_from_dot_env()\n", + " oai_text_service = OpenAITextCompletion(ai_model_id=\"gpt-3.5-turbo-instruct\", api_key=api_key, org_id=org_id)\n", + " oai_chat_service = OpenAIChatCompletion(ai_model_id=\"gpt-3.5-turbo\", api_key=api_key, org_id=org_id)\n", + "\n", + "# Configure Hugging Face service\n", + "if selectedService == Service.HuggingFace:\n", + " hf_text_service = HuggingFaceTextCompletion(ai_model_id=\"distilgpt2\", task=\"text-generation\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "50561d82", + "metadata": {}, + "source": [ + "Next, we'll set up the completion request settings for text completion services.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "628c843e", + "metadata": {}, + "outputs": [], + "source": [ + "oai_prompt_execution_settings = OpenAITextPromptExecutionSettings(\n", + " max_tokens=150,\n", + " temperature=0.7,\n", + " top_p=1,\n", + " frequency_penalty=0.5,\n", + " presence_penalty=0.5,\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "857a9c89", + "metadata": {}, + "source": [ + "## Streaming Open AI Text Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e2979db8", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.OpenAI:\n", + " prompt = \"what is the purpose of a rubber duck?\"\n", + " stream = oai_text_service.complete_stream(prompt=prompt, settings=oai_prompt_execution_settings)\n", + " async for text in stream:\n", + " print(text, end=\"\") # end = \"\" to avoid newlines" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "4288d09f", + "metadata": {}, + "source": [ + "## Streaming Azure Open AI Text Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5319f14d", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.AzureOpenAI:\n", + " prompt = \"provide me a list of possible meanings for the acronym 'ORLD'\"\n", + " stream = azure_text_service.complete_stream(prompt=prompt, settings=oai_prompt_execution_settings)\n", + " async for text in stream:\n", + " print(text, end=\"\") # end = \"\" to avoid newlines" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "eb548f9c", + "metadata": {}, + "source": [ + "## Streaming Hugging Face Text Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "be7b1c2e", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.HuggingFace:\n", + " from semantic_kernel.connectors.ai.hugging_face.hf_prompt_execution_settings import (\n", + " HuggingFacePromptExecutionSettings,\n", + " )\n", + "\n", + " hf_prompt_execution_settings = HuggingFacePromptExecutionSettings(\n", + " extension_data={\n", + " \"max_new_tokens\": 80,\n", + " \"top_p\": 1,\n", + " \"eos_token_id\": 11,\n", + " \"pad_token_id\": 0,\n", + " }\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9525e4f3", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.HuggingFace:\n", + " prompt = \"The purpose of a rubber duck is\"\n", + " stream = hf_text_service.complete_stream(prompt=prompt, prompt_execution_settings=hf_prompt_execution_settings)\n", + " async for text in stream:\n", + " print(text, end=\"\") # end = \"\" to avoid newlines" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "da632e12", + "metadata": {}, + "source": [ + "Here, we're setting up the settings for Chat completions.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5f11e46", + "metadata": {}, + "outputs": [], + "source": [ + "oai_chat_prompt_execution_settings = OpenAIChatPromptExecutionSettings(\n", + " max_tokens=150,\n", + " temperature=0.7,\n", + " top_p=1,\n", + " frequency_penalty=0.5,\n", + " presence_penalty=0.5,\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "d6bf238e", + "metadata": {}, + "source": [ + "## Streaming OpenAI Chat Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dabc6a4c", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.OpenAI:\n", + " role = \"system\"\n", + " content = \"You are an AI assistant that helps people find information.\"\n", + " message = {\"role\": role, \"content\": content}\n", + " stream = oai_chat_service.complete_chat_stream(messages=[message], settings=oai_chat_prompt_execution_settings)\n", + " async for text in stream:\n", + " print(text, end=\"\") # end = \"\" to avoid newlines" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "cdb8f740", + "metadata": {}, + "source": [ + "## Streaming Azure OpenAI Chat Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "da1e9f59", + "metadata": {}, + "outputs": [], + "source": [ + "az_oai_chat_prompt_execution_settings = AzureChatPromptExecutionSettings(\n", + " max_tokens=150,\n", + " temperature=0.7,\n", + " top_p=1,\n", + " frequency_penalty=0.5,\n", + " presence_penalty=0.5,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b74a64a9", + "metadata": {}, + "outputs": [], + "source": [ + "if selectedService == Service.AzureOpenAI:\n", + " role = \"system\"\n", + " content = \"You are an AI assistant that helps people find information.\"\n", + " message = {\"role\": role, \"content\": content}\n", + " stream = azure_chat_service.complete_chat_stream(messages=[message], settings=az_oai_chat_prompt_execution_settings)\n", + " async for text in stream:\n", + " print(text, end=\"\") # end = \"\" to avoid newlines" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } }, - { - "cell_type": "code", - "execution_count": null, - "id": "a77bdf89", - "metadata": {}, - "outputs": [], - "source": [ - "!python -m pip install semantic-kernel==0.3.10.dev0" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "508ad44f", - "metadata": {}, - "outputs": [], - "source": [ - "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai import ChatRequestSettings, CompleteRequestSettings\n", - "from semantic_kernel.connectors.ai.open_ai import AzureTextCompletion, AzureChatCompletion, OpenAITextCompletion, OpenAIChatCompletion\n", - "from semantic_kernel.connectors.ai.hugging_face import HuggingFaceTextCompletion" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "d8ddffc1", - "metadata": {}, - "source": [ - "First, we will set up the text and chat services we will be submitting prompts to." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8f8dcbc6", - "metadata": {}, - "outputs": [], - "source": [ - "kernel = sk.Kernel()\n", - "\n", - "# Configure Azure LLM service\n", - "deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", - "azure_text_service = AzureTextCompletion(deployment, endpoint, api_key)\n", - "azure_chat_service = AzureChatCompletion(deployment, endpoint, api_key)\n", - "\n", - "# Configure OpenAI service\n", - "api_key, org_id = sk.openai_settings_from_dot_env()\n", - "oai_text_service = OpenAITextCompletion(\"text-davinci-003\", api_key, org_id)\n", - "oai_chat_service = OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id)\n", - "\n", - "# Configure Hugging Face service\n", - "hf_text_service = HuggingFaceTextCompletion(\"distilgpt2\", task=\"text-generation\")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "50561d82", - "metadata": {}, - "source": [ - "Next, we'll set up the completion request settings for text completion services." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "628c843e", - "metadata": {}, - "outputs": [], - "source": [ - "request_settings = CompleteRequestSettings(\n", - " max_tokens=150,\n", - " temperature=0.7,\n", - " top_p=1,\n", - " frequency_penalty=0.5,\n", - " presence_penalty=0.5\n", - ")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "857a9c89", - "metadata": {}, - "source": [ - "## Streaming Open AI Text Completion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e2979db8", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"what is the purpose of a rubber duck?\"\n", - "stream = oai_text_service.complete_stream_async(prompt, request_settings)\n", - "async for text in stream:\n", - " print(text, end = \"\") # end = \"\" to avoid newlines" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "4288d09f", - "metadata": {}, - "source": [ - "## Streaming Azure Open AI Text Completion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5319f14d", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"provide me a list of possible meanings for the acronym 'ORLD'\"\n", - "stream = azure_text_service.complete_stream_async(prompt, request_settings)\n", - "async for text in stream:\n", - " print(text, end = \"\") # end = \"\" to avoid newlines" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "eb548f9c", - "metadata": {}, - "source": [ - "## Streaming Hugging Face Text Completion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9525e4f3", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"The purpose of a rubber duck is\"\n", - "stream = hf_text_service.complete_stream_async(prompt, request_settings)\n", - "async for text in stream:\n", - " print(text, end = \"\") # end = \"\" to avoid newlines" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "da632e12", - "metadata": {}, - "source": [ - "Here, we're setting up the settings for Chat completions." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e5f11e46", - "metadata": {}, - "outputs": [], - "source": [ - "chat_request_settings = ChatRequestSettings(\n", - " max_tokens=150,\n", - " temperature=0.7,\n", - " top_p=1,\n", - " frequency_penalty=0.5,\n", - " presence_penalty=0.5,\n", - ")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "d6bf238e", - "metadata": {}, - "source": [ - "## Streaming OpenAI Chat Completion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dabc6a4c", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"It's a beautiful day outside, birds are singing, flowers are blooming. On days like these, kids like you...\"\n", - "stream = oai_chat_service.complete_chat_stream_async([(\"user\", prompt)], chat_request_settings)\n", - "async for text in stream:\n", - " print(text, end = \"\") # end = \"\" to avoid newlines" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "cdb8f740", - "metadata": {}, - "source": [ - "## Streaming Azure OpenAI Chat Completion" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b74a64a9", - "metadata": {}, - "outputs": [], - "source": [ - "prompt = \"Tomorow is going to be a great day, I can feel it. I'm going to wake up early, go for a run, and then...\"\n", - "stream = azure_chat_service.complete_chat_stream_async([(\"user\", prompt)], chat_request_settings)\n", - "async for text in stream:\n", - " print(text, end = \"\") # end = \"\" to avoid newlines" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.10" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/python/notebooks/services.py b/python/notebooks/services.py new file mode 100644 index 000000000000..689be7ed5d5d --- /dev/null +++ b/python/notebooks/services.py @@ -0,0 +1,18 @@ +""" +This module defines an enumeration representing different services. +""" + +from enum import Enum + + +class Service(Enum): + """ + Attributes: + OpenAI (str): Represents the OpenAI service. + AzureOpenAI (str): Represents the Azure OpenAI service. + HuggingFace (str): Represents the HuggingFace service. + """ + + OpenAI = "openai" + AzureOpenAI = "azureopenai" + HuggingFace = "huggingface" diff --git a/python/notebooks/third_party/weaviate-persistent-memory.ipynb b/python/notebooks/third_party/weaviate-persistent-memory.ipynb index 58a3fcfd1f4a..aa72aa3450f7 100644 --- a/python/notebooks/third_party/weaviate-persistent-memory.ipynb +++ b/python/notebooks/third_party/weaviate-persistent-memory.ipynb @@ -1,536 +1,512 @@ { - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Introduction" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This notebook shows how to replace the `VolatileMemoryStore` memory storage used in a [previous notebook](./06-memory-and-embeddings.ipynb) with a `WeaviateMemoryStore`.\n", - "\n", - "`WeaviateMemoryStore` is an example of a persistent (i.e. long-term) memory store backed by the Weaviate vector database." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# About Weaviate" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[Weaviate](https://weaviate.io/) is an open-source vector database designed to scale seamlessly into billions of data objects. This implementation supports hybrid search out-of-the-box (meaning it will perform better for keyword searches)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can run Weaviate in 5 ways:\n", - "\n", - "- **SaaS** – with [Weaviate Cloud Services (WCS)](https://weaviate.io/pricing).\n", - "\n", - " WCS is a fully managed service that takes care of hosting, scaling, and updating your Weaviate instance. You can try it out for free with a sandbox that lasts for 14 days.\n", - "\n", - " To set up a SaaS Weaviate instance with WCS:\n", - "\n", - " 1. Navigate to [Weaviate Cloud Console](https://console.weaviate.cloud/).\n", - " 2. Register or sign in to your WCS account.\n", - " 3. Create a new cluster with the following settings:\n", - " - `Subscription Tier` – Free sandbox for a free trial, or contact [hello@weaviate.io](mailto:hello@weaviate.io) for other options.\n", - " - `Cluster name` – a unique name for your cluster. The name will become part of the URL used to access this instance.\n", - " - `Enable Authentication?` – Enabled by default. This will generate a static API key that you can use to authenticate.\n", - " 4. Wait for a few minutes until your cluster is ready. You will see a green tick ✔️ when it's done. Copy your cluster URL.\n", - "\n", - "- **Hybrid SaaS**\n", - "\n", - " > If you need to keep your data on-premise for security or compliance reasons, Weaviate also offers a Hybrid SaaS option: Weaviate runs within your cloud instances, but the cluster is managed remotely by Weaviate. This gives you the benefits of a managed service without sending data to an external party.\n", - "\n", - " The Weaviate Hybrid SaaS is a custom solution. If you are interested in this option, please reach out to [hello@weaviate.io](mailto:hello@weaviate.io).\n", - "\n", - "- **Self-hosted** – with a Docker container\n", - "\n", - " To set up a Weaviate instance with Docker:\n", - "\n", - " 1. [Install Docker](https://docs.docker.com/engine/install/) on your local machine if it is not already installed.\n", - " 2. [Install the Docker Compose Plugin](https://docs.docker.com/compose/install/)\n", - " 3. Download a `docker-compose.yml` file with this `curl` command:\n", - "\n", - " ```\n", - " curl -o docker-compose.yml \"https://configuration.weaviate.io/v2/docker-compose/docker-compose.yml?modules=standalone&runtime=docker-compose&weaviate_version=v1.19.6\"\n", - " ```\n", - "\n", - " Alternatively, you can use Weaviate's docker compose [configuration tool](https://weaviate.io/developers/weaviate/installation/docker-compose) to generate your own `docker-compose.yml` file.\n", - "\n", - " 4. Run `docker compose up -d` to spin up a Weaviate instance.\n", - "\n", - " > To shut it down, run `docker compose down`.\n", - "\n", - "- **Self-hosted** – with a Kubernetes cluster\n", - "\n", - " To configure a self-hosted instance with Kubernetes, follow Weaviate's [documentation](https://weaviate.io/developers/weaviate/installation/kubernetes).|\n", - "\n", - "- **Embedded** - start a weaviate instance right from your application code using the client library\n", - " \n", - " This code snippet shows how to instantiate an embedded weaviate instance and upload a document:\n", - "\n", - " ```python\n", - " import weaviate\n", - " from weaviate.embedded import EmbeddedOptions\n", - "\n", - " client = weaviate.Client(\n", - " embedded_options=EmbeddedOptions()\n", - " )\n", - "\n", - " data_obj = {\n", - " \"name\": \"Chardonnay\",\n", - " \"description\": \"Goes with fish\"\n", - " }\n", - "\n", - " client.data_object.create(data_obj, \"Wine\")\n", - " ```\n", - " \n", - " Refer to the [documentation](https://weaviate.io/developers/weaviate/installation/embedded) for more details about this deployment method." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Setup" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!pip install semantic-kernel==0.3.8.dev0\n", - "!pip install weaviate-client\n", - "!pip install python-dotenv" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## OS-specific notes:\n", - "* if you run into SSL errors when connecting to OpenAI on macOS, see this issue for a [potential solution](https://github.com/microsoft/semantic-kernel/issues/627#issuecomment-1580912248)\n", - "* on Windows, you may need to run Docker Desktop as administrator" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from typing import Tuple\n", - "\n", - "import semantic_kernel as sk\n", - "from semantic_kernel.connectors.ai.open_ai import (\n", - " OpenAIChatCompletion,\n", - " OpenAITextEmbedding,\n", - ")\n", - "\n", - "import os" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "First, we instantiate the Weaviate memory store. Uncomment ONE of the options below, depending on how you want to use Weaviate:\n", - "* from a Docker instance\n", - "* from WCS\n", - "* directly from the client (embedded Weaviate), which works on Linux only at the moment" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from semantic_kernel.connectors.memory.weaviate import weaviate_memory_store\n", - "from dotenv import load_dotenv\n", - "\n", - "load_dotenv(override=True)\n", - "\n", - "# Using Docker\n", - "config = weaviate_memory_store.WeaviateConfig(url=\"http://localhost:8080\")\n", - "\n", - "# Using WCS. Make sure the environment variables `WEAVIATE_URL` and `WEAVIATE_API_KEY`\n", - "# were set in the `.env` file.\n", - "#\n", - "#weaviate_api, weaviate_url = sk.weaviate_settings_from_dot_env()\n", - "#\n", - "#config = weaviate_memory_store.WeaviateConfig(\n", - "# url=weaviate_url,\n", - "# api_key=weaviate_api\n", - "#)\n", - "\n", - "# Using Embedded Weaviate\n", - "#config = weaviate_memory_store.WeaviateConfig(use_embed=True)\n", - "\n", - "store = weaviate_memory_store.WeaviateMemoryStore(config=config)\n", - "store.client.schema.delete_all()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Then, we register the memory store to the kernel:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "kernel = sk.Kernel()\n", - "\n", - "api_key, org_id = sk.openai_settings_from_dot_env()\n", - "kernel.add_chat_service(\n", - " \"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id)\n", - ")\n", - "kernel.add_text_embedding_generation_service(\n", - " \"ada\", OpenAITextEmbedding(\"text-embedding-ada-002\", api_key, org_id)\n", - ")\n", - "\n", - "kernel.register_memory_store(memory_store=store)\n", - "kernel.import_skill(sk.core_skills.TextMemorySkill())" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Manually adding memories\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's create some initial memories \"About Me\". We can add memories to our weaviate memory store by using `save_information_async`" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "COLLECTION = \"AboutMe\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "async def populate_memory(kernel: sk.Kernel) -> None:\n", - " # Add some documents to the semantic memory\n", - " await kernel.memory.save_information_async(\n", - " COLLECTION, id=\"info1\", text=\"My name is Andrea\"\n", - " )\n", - " await kernel.memory.save_information_async(\n", - " COLLECTION, id=\"info2\", text=\"I currently work as a tour guide\"\n", - " )\n", - " await kernel.memory.save_information_async(\n", - " COLLECTION, id=\"info3\", text=\"I've been living in Seattle since 2005\"\n", - " )\n", - " await kernel.memory.save_information_async(\n", - " COLLECTION, id=\"info4\", text=\"I visited France and Italy five times since 2015\"\n", - " )\n", - " await kernel.memory.save_information_async(\n", - " COLLECTION, id=\"info5\", text=\"My family is from New York\"\n", - " )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Searching is done through `search_async`:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "async def search_memory_examples(kernel: sk.Kernel) -> None:\n", - " questions = [\n", - " \"what's my name\",\n", - " \"where do I live?\",\n", - " \"where's my family from?\",\n", - " \"where have I traveled?\",\n", - " \"what do I do for work\",\n", - " ]\n", - "\n", - " for question in questions:\n", - " print(f\"Question: {question}\")\n", - " result = await kernel.memory.search_async(COLLECTION, question)\n", - " print(f\"Answer: {result[0].text}\\n\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's see the results of the functions:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(\"Populating memory...\")\n", - "await populate_memory(kernel)\n", - "\n", - "print(\"Asking questions... (manually)\")\n", - "await search_memory_examples(kernel)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here's how to use the weaviate memory store in a chat application:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "async def setup_chat_with_memory(\n", - " kernel: sk.Kernel,\n", - ") -> Tuple[sk.SKFunctionBase, sk.SKContext]:\n", - " sk_prompt = \"\"\"\n", - " ChatBot can have a conversation with you about any topic.\n", - " It can give explicit instructions or say 'I don't know' if\n", - " it does not have an answer.\n", - "\n", - " Information about me, from previous conversations:\n", - " - {{$fact1}} {{recall $fact1}}\n", - " - {{$fact2}} {{recall $fact2}}\n", - " - {{$fact3}} {{recall $fact3}}\n", - " - {{$fact4}} {{recall $fact4}}\n", - " - {{$fact5}} {{recall $fact5}}\n", - "\n", - " Chat:\n", - " {{$chat_history}}\n", - " User: {{$user_input}}\n", - " ChatBot: \"\"\".strip()\n", - "\n", - " chat_func = kernel.create_semantic_function(\n", - " sk_prompt, max_tokens=200, temperature=0.8\n", - " )\n", - "\n", - " context = kernel.create_new_context()\n", - " context[\"fact1\"] = \"what is my name?\"\n", - " context[\"fact2\"] = \"where do I live?\"\n", - " context[\"fact3\"] = \"where's my family from?\"\n", - " context[\"fact4\"] = \"where have I traveled?\"\n", - " context[\"fact5\"] = \"what do I do for work?\"\n", - "\n", - " context[sk.core_skills.TextMemorySkill.COLLECTION_PARAM] = COLLECTION\n", - " context[sk.core_skills.TextMemorySkill.RELEVANCE_PARAM] = 0.8\n", - "\n", - " context[\"chat_history\"] = \"\"\n", - "\n", - " return chat_func, context" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "async def chat(\n", - " kernel: sk.Kernel, chat_func: sk.SKFunctionBase, context: sk.SKContext\n", - ") -> bool:\n", - " try:\n", - " user_input = input(\"User:> \")\n", - " context[\"user_input\"] = user_input\n", - " except KeyboardInterrupt:\n", - " print(\"\\n\\nExiting chat...\")\n", - " return False\n", - " except EOFError:\n", - " print(\"\\n\\nExiting chat...\")\n", - " return False\n", - "\n", - " if user_input == \"exit\":\n", - " print(\"\\n\\nExiting chat...\")\n", - " return False\n", - "\n", - " answer = await kernel.run_async(chat_func, input_vars=context.variables)\n", - " context[\"chat_history\"] += f\"\\nUser:> {user_input}\\nChatBot:> {answer}\\n\"\n", - "\n", - " print(f\"ChatBot:> {answer}\")\n", - " return True" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "print(\"Setting up a chat (with memory!)\")\n", - "chat_func, context = await setup_chat_with_memory(kernel)\n", - "\n", - "print(\"Begin chatting (type 'exit' to exit):\\n\")\n", - "chatting = True\n", - "while chatting:\n", - " chatting = await chat(kernel, chat_func, context)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Adding documents to your memory" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Create a dictionary to hold some files. The key is the hyperlink to the file and the value is the file's content:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "github_files = {}\n", - "github_files[\n", - " \"https://github.com/microsoft/semantic-kernel/blob/main/README.md\"\n", - "] = \"README: Installation, getting started, and how to contribute\"\n", - "github_files[\n", - " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/02-running-prompts-from-file.ipynb\"\n", - "] = \"Jupyter notebook describing how to pass prompts from a file to a semantic skill or function\"\n", - "github_files[\n", - " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/00-getting-started.ipynb\"\n", - "] = \"Jupyter notebook describing how to get started with the Semantic Kernel\"\n", - "github_files[\n", - " \"https://github.com/microsoft/semantic-kernel/tree/main/samples/skills/ChatSkill/ChatGPT\"\n", - "] = \"Sample demonstrating how to create a chat skill interfacing with ChatGPT\"\n", - "github_files[\n", - " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel/Memory/Volatile/VolatileMemoryStore.cs\"\n", - "] = \"C# class that defines a volatile embedding store\"\n", - "github_files[\n", - " \"https://github.com/microsoft/semantic-kernel/tree/main/samples/dotnet/KernelHttpServer/README.md\"\n", - "] = \"README: How to set up a Semantic Kernel Service API using Azure Function Runtime v4\"\n", - "github_files[\n", - " \"https://github.com/microsoft/semantic-kernel/tree/main/samples/apps/chat-summary-webapp-react/README.md\"\n", - "] = \"README: README associated with a sample starter react-based chat summary webapp\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Use `save_reference_async` to save the file:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "COLLECTION = \"SKGitHub\"\n", - "\n", - "print(\n", - " \"Adding some GitHub file URLs and their descriptions to a volatile Semantic Memory.\"\n", - ")\n", - "i = 0\n", - "for entry, value in github_files.items():\n", - " await kernel.memory.save_reference_async(\n", - " collection=COLLECTION,\n", - " description=value,\n", - " text=value,\n", - " external_id=entry,\n", - " external_source_name=\"GitHub\",\n", - " )\n", - " i += 1\n", - " print(\" URL {} saved\".format(i))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Use `search_async` to ask a question:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ask = \"I love Jupyter notebooks, how should I get started?\"\n", - "print(\"===========================\\n\" + \"Query: \" + ask + \"\\n\")\n", - "\n", - "memories = await kernel.memory.search_async(\n", - " COLLECTION, ask, limit=5, min_relevance_score=0.77\n", - ")\n", - "\n", - "i = 0\n", - "for memory in memories:\n", - " i += 1\n", - " print(f\"Result {i}:\")\n", - " print(\" URL: : \" + memory.id)\n", - " print(\" Title : \" + memory.description)\n", - " print(\" Relevance: \" + str(memory.relevance))\n", - " print()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.13" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Introduction\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook shows how to replace the `VolatileMemoryStore` memory storage used in a [previous notebook](./06-memory-and-embeddings.ipynb) with a `WeaviateMemoryStore`.\n", + "\n", + "`WeaviateMemoryStore` is an example of a persistent (i.e. long-term) memory store backed by the Weaviate vector database.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# About Weaviate\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[Weaviate](https://weaviate.io/) is an open-source vector database designed to scale seamlessly into billions of data objects. This implementation supports hybrid search out-of-the-box (meaning it will perform better for keyword searches).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can run Weaviate in 5 ways:\n", + "\n", + "- **SaaS** – with [Weaviate Cloud Services (WCS)](https://weaviate.io/pricing).\n", + "\n", + " WCS is a fully managed service that takes care of hosting, scaling, and updating your Weaviate instance. You can try it out for free with a sandbox that lasts for 14 days.\n", + "\n", + " To set up a SaaS Weaviate instance with WCS:\n", + "\n", + " 1. Navigate to [Weaviate Cloud Console](https://console.weaviate.cloud/).\n", + " 2. Register or sign in to your WCS account.\n", + " 3. Create a new cluster with the following settings:\n", + " - `Subscription Tier` – Free sandbox for a free trial, or contact [hello@weaviate.io](mailto:hello@weaviate.io) for other options.\n", + " - `Cluster name` – a unique name for your cluster. The name will become part of the URL used to access this instance.\n", + " - `Enable Authentication?` – Enabled by default. This will generate a static API key that you can use to authenticate.\n", + " 4. Wait for a few minutes until your cluster is ready. You will see a green tick ✔️ when it's done. Copy your cluster URL.\n", + "\n", + "- **Hybrid SaaS**\n", + "\n", + " > If you need to keep your data on-premise for security or compliance reasons, Weaviate also offers a Hybrid SaaS option: Weaviate runs within your cloud instances, but the cluster is managed remotely by Weaviate. This gives you the benefits of a managed service without sending data to an external party.\n", + "\n", + " The Weaviate Hybrid SaaS is a custom solution. If you are interested in this option, please reach out to [hello@weaviate.io](mailto:hello@weaviate.io).\n", + "\n", + "- **Self-hosted** – with a Docker container\n", + "\n", + " To set up a Weaviate instance with Docker:\n", + "\n", + " 1. [Install Docker](https://docs.docker.com/engine/install/) on your local machine if it is not already installed.\n", + " 2. [Install the Docker Compose Plugin](https://docs.docker.com/compose/install/)\n", + " 3. Download a `docker-compose.yml` file with this `curl` command:\n", + "\n", + " ```\n", + " curl -o docker-compose.yml \"https://configuration.weaviate.io/v2/docker-compose/docker-compose.yml?modules=standalone&runtime=docker-compose&weaviate_version=v1.19.6\"\n", + " ```\n", + "\n", + " Alternatively, you can use Weaviate's docker compose [configuration tool](https://weaviate.io/developers/weaviate/installation/docker-compose) to generate your own `docker-compose.yml` file.\n", + "\n", + " 4. Run `docker compose up -d` to spin up a Weaviate instance.\n", + "\n", + " > To shut it down, run `docker compose down`.\n", + "\n", + "- **Self-hosted** – with a Kubernetes cluster\n", + "\n", + " To configure a self-hosted instance with Kubernetes, follow Weaviate's [documentation](https://weaviate.io/developers/weaviate/installation/kubernetes).|\n", + "\n", + "- **Embedded** - start a weaviate instance right from your application code using the client library\n", + "\n", + " This code snippet shows how to instantiate an embedded weaviate instance and upload a document:\n", + "\n", + " ```python\n", + " import weaviate\n", + " from weaviate.embedded import EmbeddedOptions\n", + "\n", + " client = weaviate.Client(\n", + " embedded_options=EmbeddedOptions()\n", + " )\n", + "\n", + " data_obj = {\n", + " \"name\": \"Chardonnay\",\n", + " \"description\": \"Goes with fish\"\n", + " }\n", + "\n", + " client.data_object.create(data_obj, \"Wine\")\n", + " ```\n", + "\n", + " Refer to the [documentation](https://weaviate.io/developers/weaviate/installation/embedded) for more details about this deployment method.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Setup\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install semantic-kernel==0.3.8.dev0\n", + "!pip install weaviate-client\n", + "!pip install python-dotenv" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## OS-specific notes:\n", + "\n", + "- if you run into SSL errors when connecting to OpenAI on macOS, see this issue for a [potential solution](https://github.com/microsoft/semantic-kernel/issues/627#issuecomment-1580912248)\n", + "- on Windows, you may need to run Docker Desktop as administrator\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Tuple\n", + "\n", + "import semantic_kernel as sk\n", + "from semantic_kernel.connectors.ai.open_ai import (\n", + " OpenAIChatCompletion,\n", + " OpenAITextEmbedding,\n", + ")\n", + "\n", + "import os" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, we instantiate the Weaviate memory store. Uncomment ONE of the options below, depending on how you want to use Weaviate:\n", + "\n", + "- from a Docker instance\n", + "- from WCS\n", + "- directly from the client (embedded Weaviate), which works on Linux only at the moment\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from semantic_kernel.connectors.memory.weaviate import weaviate_memory_store\n", + "from dotenv import load_dotenv\n", + "\n", + "load_dotenv(override=True)\n", + "\n", + "# Using Docker\n", + "config = weaviate_memory_store.WeaviateConfig(url=\"http://localhost:8080\")\n", + "\n", + "# Using WCS. Make sure the environment variables `WEAVIATE_URL` and `WEAVIATE_API_KEY`\n", + "# were set in the `.env` file.\n", + "#\n", + "# weaviate_api, weaviate_url = sk.weaviate_settings_from_dot_env()\n", + "#\n", + "# config = weaviate_memory_store.WeaviateConfig(\n", + "# url=weaviate_url,\n", + "# api_key=weaviate_api\n", + "# )\n", + "\n", + "# Using Embedded Weaviate\n", + "# config = weaviate_memory_store.WeaviateConfig(use_embed=True)\n", + "\n", + "store = weaviate_memory_store.WeaviateMemoryStore(config=config)\n", + "store.client.schema.delete_all()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then, we register the memory store to the kernel:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "kernel = sk.Kernel()\n", + "\n", + "api_key, org_id = sk.openai_settings_from_dot_env()\n", + "kernel.add_chat_service(\"chat-gpt\", OpenAIChatCompletion(\"gpt-3.5-turbo\", api_key, org_id))\n", + "kernel.add_text_embedding_generation_service(\"ada\", OpenAITextEmbedding(\"text-embedding-ada-002\", api_key, org_id))\n", + "\n", + "kernel.register_memory_store(memory_store=store)\n", + "kernel.import_plugin(sk.core_plugins.TextMemoryPlugin())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Manually adding memories\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's create some initial memories \"About Me\". We can add memories to our weaviate memory store by using `save_information`\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "COLLECTION = \"AboutMe\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def populate_memory(kernel: sk.Kernel) -> None:\n", + " # Add some documents to the semantic memory\n", + " await kernel.memory.save_information(COLLECTION, id=\"info1\", text=\"My name is Andrea\")\n", + " await kernel.memory.save_information(COLLECTION, id=\"info2\", text=\"I currently work as a tour guide\")\n", + " await kernel.memory.save_information(COLLECTION, id=\"info3\", text=\"I've been living in Seattle since 2005\")\n", + " await kernel.memory.save_information(\n", + " COLLECTION, id=\"info4\", text=\"I visited France and Italy five times since 2015\"\n", + " )\n", + " await kernel.memory.save_information(COLLECTION, id=\"info5\", text=\"My family is from New York\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Searching is done through `search`:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def search_memory_examples(kernel: sk.Kernel) -> None:\n", + " questions = [\n", + " \"what's my name\",\n", + " \"where do I live?\",\n", + " \"where's my family from?\",\n", + " \"where have I traveled?\",\n", + " \"what do I do for work\",\n", + " ]\n", + "\n", + " for question in questions:\n", + " print(f\"Question: {question}\")\n", + " result = await kernel.memory.search(COLLECTION, question)\n", + " print(f\"Answer: {result[0].text}\\n\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's see the results of the functions:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(\"Populating memory...\")\n", + "await populate_memory(kernel)\n", + "\n", + "print(\"Asking questions... (manually)\")\n", + "await search_memory_examples(kernel)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here's how to use the weaviate memory store in a chat application:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def setup_chat_with_memory(\n", + " kernel: sk.Kernel,\n", + ") -> Tuple[sk.KernelFunction, sk.KernelContext]:\n", + " sk_prompt = \"\"\"\n", + " ChatBot can have a conversation with you about any topic.\n", + " It can give explicit instructions or say 'I don't know' if\n", + " it does not have an answer.\n", + "\n", + " Information about me, from previous conversations:\n", + " - {{$fact1}} {{recall $fact1}}\n", + " - {{$fact2}} {{recall $fact2}}\n", + " - {{$fact3}} {{recall $fact3}}\n", + " - {{$fact4}} {{recall $fact4}}\n", + " - {{$fact5}} {{recall $fact5}}\n", + "\n", + " Chat:\n", + " {{$chat_history}}\n", + " User: {{$user_input}}\n", + " ChatBot: \"\"\".strip()\n", + "\n", + " chat_func = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0.8)\n", + "\n", + " context = kernel.create_new_context()\n", + " context[\"fact1\"] = \"what is my name?\"\n", + " context[\"fact2\"] = \"where do I live?\"\n", + " context[\"fact3\"] = \"where's my family from?\"\n", + " context[\"fact4\"] = \"where have I traveled?\"\n", + " context[\"fact5\"] = \"what do I do for work?\"\n", + "\n", + " context[sk.core_plugins.TextMemoryPlugin.COLLECTION_PARAM] = COLLECTION\n", + " context[sk.core_plugins.TextMemoryPlugin.RELEVANCE_PARAM] = 0.8\n", + "\n", + " context[\"chat_history\"] = \"\"\n", + "\n", + " return chat_func, context" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async def chat(kernel: sk.Kernel, chat_func: sk.KernelFunction, context: sk.KernelContext) -> bool:\n", + " try:\n", + " user_input = input(\"User:> \")\n", + " context[\"user_input\"] = user_input\n", + " except KeyboardInterrupt:\n", + " print(\"\\n\\nExiting chat...\")\n", + " return False\n", + " except EOFError:\n", + " print(\"\\n\\nExiting chat...\")\n", + " return False\n", + "\n", + " if user_input == \"exit\":\n", + " print(\"\\n\\nExiting chat...\")\n", + " return False\n", + "\n", + " answer = await kernel.run(chat_func, input_vars=context.variables)\n", + " context[\"chat_history\"] += f\"\\nUser:> {user_input}\\nChatBot:> {answer}\\n\"\n", + "\n", + " print(f\"ChatBot:> {answer}\")\n", + " return True" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(\"Setting up a chat (with memory!)\")\n", + "chat_func, context = await setup_chat_with_memory(kernel)\n", + "\n", + "print(\"Begin chatting (type 'exit' to exit):\\n\")\n", + "chatting = True\n", + "while chatting:\n", + " chatting = await chat(kernel, chat_func, context)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Adding documents to your memory\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a dictionary to hold some files. The key is the hyperlink to the file and the value is the file's content:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "github_files = {}\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/README.md\"\n", + "] = \"README: Installation, getting started, and how to contribute\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/02-running-prompts-from-file.ipynb\"\n", + "] = \"Jupyter notebook describing how to pass prompts from a file to a semantic plugin or function\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/notebooks/00-getting-started.ipynb\"\n", + "] = \"Jupyter notebook describing how to get started with the Semantic Kernel\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/tree/main/samples/plugins/ChatPlugin/ChatGPT\"\n", + "] = \"Sample demonstrating how to create a chat plugin interfacing with ChatGPT\"\n", + "github_files[\n", + " \"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel/Memory/Volatile/VolatileMemoryStore.cs\"\n", + "] = \"C# class that defines a volatile embedding store\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Use `save_reference` to save the file:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "COLLECTION = \"SKGitHub\"\n", + "\n", + "print(\"Adding some GitHub file URLs and their descriptions to a volatile Semantic Memory.\")\n", + "i = 0\n", + "for entry, value in github_files.items():\n", + " await kernel.memory.save_reference(\n", + " collection=COLLECTION,\n", + " description=value,\n", + " text=value,\n", + " external_id=entry,\n", + " external_source_name=\"GitHub\",\n", + " )\n", + " i += 1\n", + " print(\" URL {} saved\".format(i))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Use `search` to ask a question:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "ask = \"I love Jupyter notebooks, how should I get started?\"\n", + "print(\"===========================\\n\" + \"Query: \" + ask + \"\\n\")\n", + "\n", + "memories = await kernel.memory.search(COLLECTION, ask, limit=5, min_relevance_score=0.77)\n", + "\n", + "i = 0\n", + "for memory in memories:\n", + " i += 1\n", + " print(f\"Result {i}:\")\n", + " print(\" URL: : \" + memory.id)\n", + " print(\" Title : \" + memory.description)\n", + " print(\" Relevance: \" + str(memory.relevance))\n", + " print()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/python/poetry.lock b/python/poetry.lock index 79f7f3583e57..61d0e8da4389 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -13,111 +13,99 @@ files = [ [[package]] name = "aiohttp" -version = "3.8.5" +version = "3.9.3" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiosignal" @@ -133,6 +121,20 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + [[package]] name = "anyio" version = "3.7.1" @@ -165,6 +167,63 @@ files = [ {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + [[package]] name = "asgiref" version = "3.7.2" @@ -184,20 +243,21 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "asttokens" -version = "2.2.1" +version = "2.4.1" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] [package.dependencies] -six = "*" +six = ">=1.12.0" [package.extras] -test = ["astroid", "pytest"] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "async-timeout" @@ -230,17 +290,17 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "authlib" -version = "1.2.1" +version = "1.3.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "Authlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:c88984ea00149a90e3537c964327da930779afa4564e354edfd98410bea01911"}, - {file = "Authlib-1.2.1.tar.gz", hash = "sha256:421f7c6b468d907ca2d9afede256f068f87e34d23dd221c07d13d4c234726afb"}, + {file = "Authlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:9637e4de1fb498310a56900b3e2043a206b03cb11c05422014b0302cbc814be3"}, + {file = "Authlib-1.3.0.tar.gz", hash = "sha256:959ea62a5b7b5123c5059758296122b57cd2585ae2ed1c0622c21b371ffdae06"}, ] [package.dependencies] -cryptography = ">=3.2" +cryptography = "*" [[package]] name = "azure-common" @@ -255,17 +315,18 @@ files = [ [[package]] name = "azure-core" -version = "1.29.4" +version = "1.29.6" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-core-1.29.4.tar.gz", hash = "sha256:500b3aa9bf2e90c5ccc88bb105d056114ca0ce7d0ce73afb8bc4d714b2fc7568"}, - {file = "azure_core-1.29.4-py3-none-any.whl", hash = "sha256:b03261bcba22c0b9290faf9999cedd23e849ed2577feee90515694cea6bc74bf"}, + {file = "azure-core-1.29.6.tar.gz", hash = "sha256:13b485252ecd9384ae624894fe51cfa6220966207264c360beada239f88b738a"}, + {file = "azure_core-1.29.6-py3-none-any.whl", hash = "sha256:604a005bce6a49ba661bb7b2be84a9b169047e52fcfcd0a4e4770affab4178f7"}, ] [package.dependencies] -requests = ">=2.18.4" +anyio = ">=3.0,<5.0" +requests = ">=2.21.0" six = ">=1.11.0" typing-extensions = ">=4.6.0" @@ -274,19 +335,19 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-identity" -version = "1.14.0" +version = "1.15.0" description = "Microsoft Azure Identity Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-identity-1.14.0.zip", hash = "sha256:72441799f8c5c89bfe21026965e266672a7c5d050c2c65119ef899dd5362e2b1"}, - {file = "azure_identity-1.14.0-py3-none-any.whl", hash = "sha256:edabf0e010eb85760e1dd19424d5e8f97ba2c9caff73a16e7b30ccbdbcce369b"}, + {file = "azure-identity-1.15.0.tar.gz", hash = "sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8"}, + {file = "azure_identity-1.15.0-py3-none-any.whl", hash = "sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912"}, ] [package.dependencies] -azure-core = ">=1.11.0,<2.0.0" +azure-core = ">=1.23.0,<2.0.0" cryptography = ">=2.5" -msal = ">=1.20.0,<2.0.0" +msal = ">=1.24.0,<2.0.0" msal-extensions = ">=0.3.0,<2.0.0" [[package]] @@ -357,32 +418,38 @@ tzdata = ["tzdata"] [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.2" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, ] [package.extras] @@ -391,29 +458,33 @@ typecheck = ["mypy"] [[package]] name = "black" -version = "23.10.1" +version = "23.12.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, - {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, - {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, - {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, - {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, - {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, - {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, - {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, - {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, - {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, - {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, - {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, - {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, - {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, - {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, + {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, + {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, + {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, + {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, + {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, + {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, + {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, + {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, + {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, + {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, + {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, + {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, + {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"}, + {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"}, + {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"}, + {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"}, + {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"}, + {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"}, + {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"}, + {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"}, + {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, + {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, ] [package.dependencies] @@ -427,103 +498,91 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.1" +version = "5.3.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -553,86 +612,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -688,19 +762,19 @@ numpy = "*" [[package]] name = "chromadb" -version = "0.4.10" +version = "0.4.13" description = "Chroma." optional = false python-versions = ">=3.7" files = [ - {file = "chromadb-0.4.10-py3-none-any.whl", hash = "sha256:69e8c627cebb093cb211cd2e33959ec6edf66c9cdfcddf9f30902bd3c9bd23ac"}, - {file = "chromadb-0.4.10.tar.gz", hash = "sha256:1bbb72f5f69b7a0fa9c7f1d74c6ca6197d2991a4333598aa97fd90d89a8bd112"}, + {file = "chromadb-0.4.13-py3-none-any.whl", hash = "sha256:6959dc4aaa6278c7491dd1911724981a0e46816b19e9f86945b9bd875e6a252a"}, + {file = "chromadb-0.4.13.tar.gz", hash = "sha256:99d330b9ac8f2ec81f4b34798d34f2ea9f4656bef1da951efa7e93957ef7e706"}, ] [package.dependencies] bcrypt = ">=4.0.1" chroma-hnswlib = "0.7.3" -fastapi = ">=0.95.2,<0.100.0" +fastapi = ">=0.95.2" graphlib-backport = {version = ">=1.0.3", markers = "python_version < \"3.9\""} importlib-resources = "*" numpy = {version = ">=1.22.5", markers = "python_version >= \"3.8\""} @@ -708,11 +782,12 @@ onnxruntime = ">=1.14.1" overrides = ">=7.3.1" posthog = ">=2.4.0" pulsar-client = ">=3.1.0" -pydantic = ">=1.9,<2.0" +pydantic = ">=1.9" pypika = ">=0.48.9" requests = ">=2.28" tokenizers = ">=0.13.2" tqdm = ">=4.65.0" +typer = ">=0.9.0" typing-extensions = ">=4.5.0" uvicorn = {version = ">=0.18.3", extras = ["standard"]} @@ -730,35 +805,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "cmake" -version = "3.27.2" -description = "CMake is an open-source, cross-platform family of tools designed to build, test and package software" -optional = false -python-versions = "*" -files = [ - {file = "cmake-3.27.2-py2.py3-none-macosx_10_10_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:96ac856c4d6b2104408848f0005a8ab2229d4135b171ea9a03e8c33039ede420"}, - {file = "cmake-3.27.2-py2.py3-none-manylinux2010_i686.manylinux_2_12_i686.whl", hash = "sha256:11fe6129d07982721c5965fd804a4056b8c6e9c4f482ac9e0fe41bb3abc1ab5f"}, - {file = "cmake-3.27.2-py2.py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:f0c64e89e2ea59592980c4fe3821d712fee0e74cf87c2aaec5b3ab9aa809a57c"}, - {file = "cmake-3.27.2-py2.py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ca7650477dff2a1138776b28b79c0e99127be733d3978922e8f87b56a433eed6"}, - {file = "cmake-3.27.2-py2.py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ab2e40fe09e76a7ef67da2bbbf7a4cd1f52db4f1c7b6ccdda2539f918830343a"}, - {file = "cmake-3.27.2-py2.py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:980ee19f12c808cb8ddb56fdcee832501a9f9631799d8b4fc625c0a0b5fb4c55"}, - {file = "cmake-3.27.2-py2.py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:115d30ca0760e3861d9ad6b3288cd11ee72a785b81227da0c1765d3b84e2c009"}, - {file = "cmake-3.27.2-py2.py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efc338c939d6d435890a52458a260bf0942bd8392b648d7532a72c1ec0764e18"}, - {file = "cmake-3.27.2-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:7f7438c60ccc01765b67abfb1797787c3b9459d500a804ed70a4cc181bc02204"}, - {file = "cmake-3.27.2-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:294f008734267e0eee1574ad1b911bed137bc907ab19d60a618dab4615aa1fca"}, - {file = "cmake-3.27.2-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:197a34dc62ee149ced343545fac67e5a30b93fda65250b065726f86ce92bdada"}, - {file = "cmake-3.27.2-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:afb46ad883b174fb64347802ba5878423551dbd5847bb64669c39a5957c06eb7"}, - {file = "cmake-3.27.2-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:83611ffd155e270a6b13bbf0cfd4e8688ebda634f448aa2e3734006c745bf33f"}, - {file = "cmake-3.27.2-py2.py3-none-win32.whl", hash = "sha256:53e12deb893da935e236f93accd47dbe2806620cd7654986234dc4487cc49652"}, - {file = "cmake-3.27.2-py2.py3-none-win_amd64.whl", hash = "sha256:611f9722c68c40352d38a6c01960ab038c3d0419e7aee3bf18f95b23031e0dfe"}, - {file = "cmake-3.27.2-py2.py3-none-win_arm64.whl", hash = "sha256:30620326b51ac2ce0d8f476747af6367a7ea21075c4d065fad9443904b07476a"}, - {file = "cmake-3.27.2.tar.gz", hash = "sha256:7cd6e2d7d5a1125f8c26c4f65214f8c942e3f276f98c16cb62ae382c35609f25"}, -] - -[package.extras] -test = ["coverage (>=4.2)", "flake8 (>=3.0.4)", "path.py (>=11.5.0)", "pytest (>=3.0.3)", "pytest-cov (>=2.4.0)", "pytest-runner (>=2.9)", "pytest-virtualenv (>=1.7.0)", "scikit-build (>=0.10.0)", "setuptools (>=28.0.0)", "virtualenv (>=15.0.3)", "wheel"] - [[package]] name = "colorama" version = "0.4.6" @@ -789,93 +835,167 @@ cron = ["capturer (>=2.4)"] [[package]] name = "comm" -version = "0.1.4" +version = "0.2.0" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, - {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, + {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, + {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, ] [package.dependencies] traitlets = ">=4" [package.extras] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] test = ["pytest"] -typing = ["mypy (>=0.990)"] + +[[package]] +name = "coverage" +version = "7.3.3" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d874434e0cb7b90f7af2b6e3309b0733cde8ec1476eb47db148ed7deeb2a9494"}, + {file = "coverage-7.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6621dccce8af666b8c4651f9f43467bfbf409607c604b840b78f4ff3619aeb"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1367aa411afb4431ab58fd7ee102adb2665894d047c490649e86219327183134"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f0f8f0c497eb9c9f18f21de0750c8d8b4b9c7000b43996a094290b59d0e7523"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db0338c4b0951d93d547e0ff8d8ea340fecf5885f5b00b23be5aa99549e14cfd"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d31650d313bd90d027f4be7663dfa2241079edd780b56ac416b56eebe0a21aab"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9437a4074b43c177c92c96d051957592afd85ba00d3e92002c8ef45ee75df438"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17d9cb06c13b4f2ef570355fa45797d10f19ca71395910b249e3f77942a837"}, + {file = "coverage-7.3.3-cp310-cp310-win32.whl", hash = "sha256:eee5e741b43ea1b49d98ab6e40f7e299e97715af2488d1c77a90de4a663a86e2"}, + {file = "coverage-7.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:593efa42160c15c59ee9b66c5f27a453ed3968718e6e58431cdfb2d50d5ad284"}, + {file = "coverage-7.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c944cf1775235c0857829c275c777a2c3e33032e544bcef614036f337ac37bb"}, + {file = "coverage-7.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eda7f6e92358ac9e1717ce1f0377ed2b9320cea070906ece4e5c11d172a45a39"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c854c1d2c7d3e47f7120b560d1a30c1ca221e207439608d27bc4d08fd4aeae8"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:222b038f08a7ebed1e4e78ccf3c09a1ca4ac3da16de983e66520973443b546bc"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff4800783d85bff132f2cc7d007426ec698cdce08c3062c8d501ad3f4ea3d16c"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fc200cec654311ca2c3f5ab3ce2220521b3d4732f68e1b1e79bef8fcfc1f2b97"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:307aecb65bb77cbfebf2eb6e12009e9034d050c6c69d8a5f3f737b329f4f15fb"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ffb0eacbadb705c0a6969b0adf468f126b064f3362411df95f6d4f31c40d31c1"}, + {file = "coverage-7.3.3-cp311-cp311-win32.whl", hash = "sha256:79c32f875fd7c0ed8d642b221cf81feba98183d2ff14d1f37a1bbce6b0347d9f"}, + {file = "coverage-7.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:243576944f7c1a1205e5cd658533a50eba662c74f9be4c050d51c69bd4532936"}, + {file = "coverage-7.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a2ac4245f18057dfec3b0074c4eb366953bca6787f1ec397c004c78176a23d56"}, + {file = "coverage-7.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9191be7af41f0b54324ded600e8ddbcabea23e1e8ba419d9a53b241dece821d"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c0b1b8b5a4aebf8fcd227237fc4263aa7fa0ddcd4d288d42f50eff18b0bac4"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee453085279df1bac0996bc97004771a4a052b1f1e23f6101213e3796ff3cb85"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1191270b06ecd68b1d00897b2daddb98e1719f63750969614ceb3438228c088e"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:007a7e49831cfe387473e92e9ff07377f6121120669ddc39674e7244350a6a29"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:af75cf83c2d57717a8493ed2246d34b1f3398cb8a92b10fd7a1858cad8e78f59"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:811ca7373da32f1ccee2927dc27dc523462fd30674a80102f86c6753d6681bc6"}, + {file = "coverage-7.3.3-cp312-cp312-win32.whl", hash = "sha256:733537a182b5d62184f2a72796eb6901299898231a8e4f84c858c68684b25a70"}, + {file = "coverage-7.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:e995efb191f04b01ced307dbd7407ebf6e6dc209b528d75583277b10fd1800ee"}, + {file = "coverage-7.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbd8a5fe6c893de21a3c6835071ec116d79334fbdf641743332e442a3466f7ea"}, + {file = "coverage-7.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:50c472c1916540f8b2deef10cdc736cd2b3d1464d3945e4da0333862270dcb15"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e9223a18f51d00d3ce239c39fc41410489ec7a248a84fab443fbb39c943616c"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f501e36ac428c1b334c41e196ff6bd550c0353c7314716e80055b1f0a32ba394"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475de8213ed95a6b6283056d180b2442eee38d5948d735cd3d3b52b86dd65b92"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afdcc10c01d0db217fc0a64f58c7edd635b8f27787fea0a3054b856a6dff8717"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fff0b2f249ac642fd735f009b8363c2b46cf406d3caec00e4deeb79b5ff39b40"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a1f76cfc122c9e0f62dbe0460ec9cc7696fc9a0293931a33b8870f78cf83a327"}, + {file = "coverage-7.3.3-cp38-cp38-win32.whl", hash = "sha256:757453848c18d7ab5d5b5f1827293d580f156f1c2c8cef45bfc21f37d8681069"}, + {file = "coverage-7.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad2453b852a1316c8a103c9c970db8fbc262f4f6b930aa6c606df9b2766eee06"}, + {file = "coverage-7.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b15e03b8ee6a908db48eccf4e4e42397f146ab1e91c6324da44197a45cb9132"}, + {file = "coverage-7.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:89400aa1752e09f666cc48708eaa171eef0ebe3d5f74044b614729231763ae69"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c59a3e59fb95e6d72e71dc915e6d7fa568863fad0a80b33bc7b82d6e9f844973"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ede881c7618f9cf93e2df0421ee127afdfd267d1b5d0c59bcea771cf160ea4a"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3bfd2c2f0e5384276e12b14882bf2c7621f97c35320c3e7132c156ce18436a1"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f3bad1a9313401ff2964e411ab7d57fb700a2d5478b727e13f156c8f89774a0"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:65d716b736f16e250435473c5ca01285d73c29f20097decdbb12571d5dfb2c94"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a702e66483b1fe602717020a0e90506e759c84a71dbc1616dd55d29d86a9b91f"}, + {file = "coverage-7.3.3-cp39-cp39-win32.whl", hash = "sha256:7fbf3f5756e7955174a31fb579307d69ffca91ad163467ed123858ce0f3fd4aa"}, + {file = "coverage-7.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cad9afc1644b979211989ec3ff7d82110b2ed52995c2f7263e7841c846a75348"}, + {file = "coverage-7.3.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:d299d379b676812e142fb57662a8d0d810b859421412b4d7af996154c00c31bb"}, + {file = "coverage-7.3.3.tar.gz", hash = "sha256:df04c64e58df96b4427db8d0559e95e2df3138c9916c96f9f6a4dd220db2fdb7"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.4" +version = "42.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, - {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, - {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, - {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, + {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434"}, + {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc"}, + {file = "cryptography-42.0.0-cp37-abi3-win32.whl", hash = "sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4"}, + {file = "cryptography-42.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0"}, + {file = "cryptography-42.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221"}, + {file = "cryptography-42.0.0-cp39-abi3-win32.whl", hash = "sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b"}, + {file = "cryptography-42.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce"}, + {file = "cryptography-42.0.0.tar.gz", hash = "sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "debugpy" -version = "1.6.7.post1" +version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "debugpy-1.6.7.post1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:903bd61d5eb433b6c25b48eae5e23821d4c1a19e25c9610205f5aeaccae64e32"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16882030860081e7dd5aa619f30dec3c2f9a421e69861125f83cc372c94e57d"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-win32.whl", hash = "sha256:eea8d8cfb9965ac41b99a61f8e755a8f50e9a20330938ad8271530210f54e09c"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-win_amd64.whl", hash = "sha256:85969d864c45f70c3996067cfa76a319bae749b04171f2cdeceebe4add316155"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:890f7ab9a683886a0f185786ffbda3b46495c4b929dab083b8c79d6825832a52"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ac7a4dba28801d184b7fc0e024da2635ca87d8b0a825c6087bb5168e3c0d28"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-win32.whl", hash = "sha256:3370ef1b9951d15799ef7af41f8174194f3482ee689988379763ef61a5456426"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:65b28435a17cba4c09e739621173ff90c515f7b9e8ea469b92e3c28ef8e5cdfb"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92b6dae8bfbd497c90596bbb69089acf7954164aea3228a99d7e43e5267f5b36"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f5d2ecead8125cf669e62784ef1e6300f4067b0f14d9f95ee00ae06fc7c4f7"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-win32.whl", hash = "sha256:f0851403030f3975d6e2eaa4abf73232ab90b98f041e3c09ba33be2beda43fcf"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3de5d0f97c425dc49bce4293df6a04494309eedadd2b52c22e58d95107e178d9"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38651c3639a4e8bbf0ca7e52d799f6abd07d622a193c406be375da4d510d968d"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038c51268367c9c935905a90b1c2d2dbfe304037c27ba9d19fe7409f8cdc710c"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-win32.whl", hash = "sha256:4b9eba71c290852f959d2cf8a03af28afd3ca639ad374d393d53d367f7f685b2"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-win_amd64.whl", hash = "sha256:973a97ed3b434eab0f792719a484566c35328196540676685c975651266fccf9"}, - {file = "debugpy-1.6.7.post1-py2.py3-none-any.whl", hash = "sha256:1093a5c541af079c13ac8c70ab8b24d1d35c8cacb676306cf11e57f699c02926"}, - {file = "debugpy-1.6.7.post1.zip", hash = "sha256:fe87ec0182ef624855d05e6ed7e0b7cb1359d2ffa2a925f8ec2d22e98b75d0ca"}, + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, ] [[package]] @@ -891,13 +1011,24 @@ files = [ [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "distro" +version = "1.8.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.8.0-py3-none-any.whl", hash = "sha256:99522ca3e365cac527b44bde033f64c6945d90eb9f769703caaec52b09bbd3ff"}, + {file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"}, ] [[package]] @@ -942,13 +1073,13 @@ tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -956,51 +1087,52 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "1.2.0" +version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, ] [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "fastapi" -version = "0.99.1" +version = "0.109.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "fastapi-0.99.1-py3-none-any.whl", hash = "sha256:976df7bab51ac7beda9f68c4513b8c4490b5c1135c72aafd0a5ee4023ec5282e"}, - {file = "fastapi-0.99.1.tar.gz", hash = "sha256:ac78f717cd80d657bd183f94d33b9bda84aa376a46a9dab513586b8eef1dc6fc"}, + {file = "fastapi-0.109.1-py3-none-any.whl", hash = "sha256:510042044906b17b6d9149135d90886ade170bf615efcfb5533f568ae6d88534"}, + {file = "fastapi-0.109.1.tar.gz", hash = "sha256:5402389843a3561918634eb327e86b9ae98645a9e7696bede9074449c48d610a"}, ] [package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" -starlette = ">=0.27.0,<0.28.0" -typing-extensions = ">=4.5.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.35.0,<0.36.0" +typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.13.1" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "flatbuffers" @@ -1015,83 +1147,99 @@ files = [ [[package]] name = "frozenlist" -version = "1.4.0" +version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] [[package]] name = "fsspec" -version = "2023.6.0" +version = "2023.12.2" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.6.0-py3-none-any.whl", hash = "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a"}, - {file = "fsspec-2023.6.0.tar.gz", hash = "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af"}, + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, ] [package.extras] @@ -1139,13 +1287,13 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-api-core" -version = "2.11.1" +version = "2.15.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, ] [package.dependencies] @@ -1169,21 +1317,19 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.22.0" +version = "2.25.2" description = "Google Authentication Library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, + {file = "google-auth-2.25.2.tar.gz", hash = "sha256:42f707937feb4f5e5a39e6c4f343a17300a459aaf03141457ba505812841cc40"}, + {file = "google_auth-2.25.2-py2.py3-none-any.whl", hash = "sha256:473a8dfd0135f75bb79d878436e568f2695dce456764bf3a02b6f8c540b1d256"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -six = ">=1.9.0" -urllib3 = "<2.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] @@ -1194,12 +1340,12 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-generativeai" -version = "0.2.1" +version = "0.2.2" description = "Google Generative AI High level API client library and tools." optional = false python-versions = ">=3.9" files = [ - {file = "google_generativeai-0.2.1-py3-none-any.whl", hash = "sha256:892c80f33fda68f531e97de67c7796f9c10f68708599506bba2388c53d1d332e"}, + {file = "google_generativeai-0.2.2-py3-none-any.whl", hash = "sha256:0fc3e61fbaeddaca590d30cfa1a4b2945db85d2a782f31eef20982457f4cb31f"}, ] [package.dependencies] @@ -1214,13 +1360,13 @@ dev = ["absl-py", "black", "nose2", "pandas", "pytype", "pyyaml"] [[package]] name = "googleapis-common-protos" -version = "1.60.0" +version = "1.62.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, - {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, ] [package.dependencies] @@ -1242,133 +1388,133 @@ files = [ [[package]] name = "grpcio" -version = "1.56.0" +version = "1.58.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.56.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:fb34ace11419f1ae321c36ccaa18d81cd3f20728cd191250be42949d6845bb2d"}, - {file = "grpcio-1.56.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:008767c0aed4899e657b50f2e0beacbabccab51359eba547f860e7c55f2be6ba"}, - {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:17f47aeb9be0da5337f9ff33ebb8795899021e6c0741ee68bd69774a7804ca86"}, - {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43c50d810cc26349b093bf2cfe86756ab3e9aba3e7e681d360930c1268e1399a"}, - {file = "grpcio-1.56.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187b8f71bad7d41eea15e0c9812aaa2b87adfb343895fffb704fb040ca731863"}, - {file = "grpcio-1.56.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:881575f240eb5db72ddca4dc5602898c29bc082e0d94599bf20588fb7d1ee6a0"}, - {file = "grpcio-1.56.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c243b158dd7585021d16c50498c4b2ec0a64a6119967440c5ff2d8c89e72330e"}, - {file = "grpcio-1.56.0-cp310-cp310-win32.whl", hash = "sha256:8b3b2c7b5feef90bc9a5fa1c7f97637e55ec3e76460c6d16c3013952ee479cd9"}, - {file = "grpcio-1.56.0-cp310-cp310-win_amd64.whl", hash = "sha256:03a80451530fd3b8b155e0c4480434f6be669daf7ecba56f73ef98f94222ee01"}, - {file = "grpcio-1.56.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:64bd3abcf9fb4a9fa4ede8d0d34686314a7075f62a1502217b227991d9ca4245"}, - {file = "grpcio-1.56.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:fdc3a895791af4addbb826808d4c9c35917c59bb5c430d729f44224e51c92d61"}, - {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:4f84a6fd4482e5fe73b297d4874b62a535bc75dc6aec8e9fe0dc88106cd40397"}, - {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14e70b4dda3183abea94c72d41d5930c333b21f8561c1904a372d80370592ef3"}, - {file = "grpcio-1.56.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b5ce42a5ebe3e04796246ba50357f1813c44a6efe17a37f8dc7a5c470377312"}, - {file = "grpcio-1.56.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8219f17baf069fe8e42bd8ca0b312b875595e43a70cabf397be4fda488e2f27d"}, - {file = "grpcio-1.56.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:defdd14b518e6e468466f799aaa69db0355bca8d3a5ea75fb912d28ba6f8af31"}, - {file = "grpcio-1.56.0-cp311-cp311-win32.whl", hash = "sha256:50f4daa698835accbbcc60e61e0bc29636c0156ddcafb3891c987e533a0031ba"}, - {file = "grpcio-1.56.0-cp311-cp311-win_amd64.whl", hash = "sha256:59c4e606993a47146fbeaf304b9e78c447f5b9ee5641cae013028c4cca784617"}, - {file = "grpcio-1.56.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b1f4b6f25a87d80b28dd6d02e87d63fe1577fe6d04a60a17454e3f8077a38279"}, - {file = "grpcio-1.56.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:c2148170e01d464d41011a878088444c13413264418b557f0bdcd1bf1b674a0e"}, - {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:0409de787ebbf08c9d2bca2bcc7762c1efe72eada164af78b50567a8dfc7253c"}, - {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66f0369d27f4c105cd21059d635860bb2ea81bd593061c45fb64875103f40e4a"}, - {file = "grpcio-1.56.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38fdf5bd0a1c754ce6bf9311a3c2c7ebe56e88b8763593316b69e0e9a56af1de"}, - {file = "grpcio-1.56.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:79d4c5911d12a7aa671e5eb40cbb50a830396525014d2d6f254ea2ba180ce637"}, - {file = "grpcio-1.56.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5d2fc471668a7222e213f86ef76933b18cdda6a51ea1322034478df8c6519959"}, - {file = "grpcio-1.56.0-cp37-cp37m-win_amd64.whl", hash = "sha256:991224fd485e088d3cb5e34366053691a4848a6b7112b8f5625a411305c26691"}, - {file = "grpcio-1.56.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c6f36621aabecbaff3e70c4d1d924c76c8e6a7ffec60c331893640a4af0a8037"}, - {file = "grpcio-1.56.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:1eadd6de258901929223f422ffed7f8b310c0323324caf59227f9899ea1b1674"}, - {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:72836b5a1d4f508ffbcfe35033d027859cc737972f9dddbe33fb75d687421e2e"}, - {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f92a99ab0c7772fb6859bf2e4f44ad30088d18f7c67b83205297bfb229e0d2cf"}, - {file = "grpcio-1.56.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa08affbf672d051cd3da62303901aeb7042a2c188c03b2c2a2d346fc5e81c14"}, - {file = "grpcio-1.56.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2db108b4c8e29c145e95b0226973a66d73ae3e3e7fae00329294af4e27f1c42"}, - {file = "grpcio-1.56.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8674fdbd28266d8efbcddacf4ec3643f76fe6376f73283fd63a8374c14b0ef7c"}, - {file = "grpcio-1.56.0-cp38-cp38-win32.whl", hash = "sha256:bd55f743e654fb050c665968d7ec2c33f03578a4bbb163cfce38024775ff54cc"}, - {file = "grpcio-1.56.0-cp38-cp38-win_amd64.whl", hash = "sha256:c63bc5ac6c7e646c296fed9139097ae0f0e63f36f0864d7ce431cce61fe0118a"}, - {file = "grpcio-1.56.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c0bc9dda550785d23f4f025be614b7faa8d0293e10811f0f8536cf50435b7a30"}, - {file = "grpcio-1.56.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:d596408bab632ec7b947761e83ce6b3e7632e26b76d64c239ba66b554b7ee286"}, - {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76b6e6e1ee9bda32e6e933efd61c512e9a9f377d7c580977f090d1a9c78cca44"}, - {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7beb84ebd0a3f732625124b73969d12b7350c5d9d64ddf81ae739bbc63d5b1ed"}, - {file = "grpcio-1.56.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83ec714bbbe9b9502177c842417fde39f7a267031e01fa3cd83f1ca49688f537"}, - {file = "grpcio-1.56.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4feee75565d1b5ab09cb3a5da672b84ca7f6dd80ee07a50f5537207a9af543a4"}, - {file = "grpcio-1.56.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b4638a796778329cc8e142e4f57c705adb286b3ba64e00b0fa91eeb919611be8"}, - {file = "grpcio-1.56.0-cp39-cp39-win32.whl", hash = "sha256:437af5a7673bca89c4bc0a993382200592d104dd7bf55eddcd141cef91f40bab"}, - {file = "grpcio-1.56.0-cp39-cp39-win_amd64.whl", hash = "sha256:4241a1c2c76e748023c834995cd916570e7180ee478969c2d79a60ce007bc837"}, - {file = "grpcio-1.56.0.tar.gz", hash = "sha256:4c08ee21b3d10315b8dc26f6c13917b20ed574cdbed2d2d80c53d5508fdcc0f2"}, + {file = "grpcio-1.58.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:3e6bebf1dfdbeb22afd95650e4f019219fef3ab86d3fca8ebade52e4bc39389a"}, + {file = "grpcio-1.58.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:cde11577d5b6fd73a00e6bfa3cf5f428f3f33c2d2878982369b5372bbc4acc60"}, + {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a2d67ff99e70e86b2be46c1017ae40b4840d09467d5455b2708de6d4c127e143"}, + {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ed979b273a81de36fc9c6716d9fb09dd3443efa18dcc8652501df11da9583e9"}, + {file = "grpcio-1.58.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:458899d2ebd55d5ca2350fd3826dfd8fcb11fe0f79828ae75e2b1e6051d50a29"}, + {file = "grpcio-1.58.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc7ffef430b80345729ff0a6825e9d96ac87efe39216e87ac58c6c4ef400de93"}, + {file = "grpcio-1.58.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5b23d75e5173faa3d1296a7bedffb25afd2fddb607ef292dfc651490c7b53c3d"}, + {file = "grpcio-1.58.0-cp310-cp310-win32.whl", hash = "sha256:fad9295fe02455d4f158ad72c90ef8b4bcaadfdb5efb5795f7ab0786ad67dd58"}, + {file = "grpcio-1.58.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc325fed4d074367bebd465a20763586e5e1ed5b943e9d8bc7c162b1f44fd602"}, + {file = "grpcio-1.58.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:652978551af02373a5a313e07bfef368f406b5929cf2d50fa7e4027f913dbdb4"}, + {file = "grpcio-1.58.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:9f13a171281ebb4d7b1ba9f06574bce2455dcd3f2f6d1fbe0fd0d84615c74045"}, + {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8774219e21b05f750eef8adc416e9431cf31b98f6ce9def288e4cea1548cbd22"}, + {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09206106848462763f7f273ca93d2d2d4d26cab475089e0de830bb76be04e9e8"}, + {file = "grpcio-1.58.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62831d5e251dd7561d9d9e83a0b8655084b2a1f8ea91e4bd6b3cedfefd32c9d2"}, + {file = "grpcio-1.58.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:212f38c6a156862098f6bdc9a79bf850760a751d259d8f8f249fc6d645105855"}, + {file = "grpcio-1.58.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4b12754af201bb993e6e2efd7812085ddaaef21d0a6f0ff128b97de1ef55aa4a"}, + {file = "grpcio-1.58.0-cp311-cp311-win32.whl", hash = "sha256:3886b4d56bd4afeac518dbc05933926198aa967a7d1d237a318e6fbc47141577"}, + {file = "grpcio-1.58.0-cp311-cp311-win_amd64.whl", hash = "sha256:002f228d197fea12797a14e152447044e14fb4fdb2eb5d6cfa496f29ddbf79ef"}, + {file = "grpcio-1.58.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b5e8db0aff0a4819946215f156bd722b6f6c8320eb8419567ffc74850c9fd205"}, + {file = "grpcio-1.58.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:201e550b7e2ede113b63e718e7ece93cef5b0fbf3c45e8fe4541a5a4305acd15"}, + {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:d79b660681eb9bc66cc7cbf78d1b1b9e335ee56f6ea1755d34a31108b80bd3c8"}, + {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ef8d4a76d2c7d8065aba829f8d0bc0055495c998dce1964ca5b302d02514fb3"}, + {file = "grpcio-1.58.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cba491c638c76d3dc6c191d9c75041ca5b8f5c6de4b8327ecdcab527f130bb4"}, + {file = "grpcio-1.58.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6801ff6652ecd2aae08ef994a3e49ff53de29e69e9cd0fd604a79ae4e545a95c"}, + {file = "grpcio-1.58.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:24edec346e69e672daf12b2c88e95c6f737f3792d08866101d8c5f34370c54fd"}, + {file = "grpcio-1.58.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7e473a7abad9af48e3ab5f3b5d237d18208024d28ead65a459bd720401bd2f8f"}, + {file = "grpcio-1.58.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:4891bbb4bba58acd1d620759b3be11245bfe715eb67a4864c8937b855b7ed7fa"}, + {file = "grpcio-1.58.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:e9f995a8a421405958ff30599b4d0eec244f28edc760de82f0412c71c61763d2"}, + {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2f85f87e2f087d9f632c085b37440a3169fda9cdde80cb84057c2fc292f8cbdf"}, + {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb6b92036ff312d5b4182fa72e8735d17aceca74d0d908a7f08e375456f03e07"}, + {file = "grpcio-1.58.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d81c2b2b24c32139dd2536972f1060678c6b9fbd106842a9fcdecf07b233eccd"}, + {file = "grpcio-1.58.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fbcecb6aedd5c1891db1d70efbfbdc126c986645b5dd616a045c07d6bd2dfa86"}, + {file = "grpcio-1.58.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92ae871a902cf19833328bd6498ec007b265aabf2fda845ab5bd10abcaf4c8c6"}, + {file = "grpcio-1.58.0-cp38-cp38-win32.whl", hash = "sha256:dc72e04620d49d3007771c0e0348deb23ca341c0245d610605dddb4ac65a37cb"}, + {file = "grpcio-1.58.0-cp38-cp38-win_amd64.whl", hash = "sha256:1c1c5238c6072470c7f1614bf7c774ffde6b346a100521de9ce791d1e4453afe"}, + {file = "grpcio-1.58.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fe643af248442221db027da43ed43e53b73e11f40c9043738de9a2b4b6ca7697"}, + {file = "grpcio-1.58.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:128eb1f8e70676d05b1b0c8e6600320fc222b3f8c985a92224248b1367122188"}, + {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:039003a5e0ae7d41c86c768ef8b3ee2c558aa0a23cf04bf3c23567f37befa092"}, + {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f061722cad3f9aabb3fbb27f3484ec9d4667b7328d1a7800c3c691a98f16bb0"}, + {file = "grpcio-1.58.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0af11938acf8cd4cf815c46156bcde36fa5850518120920d52620cc3ec1830"}, + {file = "grpcio-1.58.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d4cef77ad2fed42b1ba9143465856d7e737279854e444925d5ba45fc1f3ba727"}, + {file = "grpcio-1.58.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24765a627eb4d9288ace32d5104161c3654128fe27f2808ecd6e9b0cfa7fc8b9"}, + {file = "grpcio-1.58.0-cp39-cp39-win32.whl", hash = "sha256:f0241f7eb0d2303a545136c59bc565a35c4fc3b924ccbd69cb482f4828d6f31c"}, + {file = "grpcio-1.58.0-cp39-cp39-win_amd64.whl", hash = "sha256:dcfba7befe3a55dab6fe1eb7fc9359dc0c7f7272b30a70ae0af5d5b063842f28"}, + {file = "grpcio-1.58.0.tar.gz", hash = "sha256:532410c51ccd851b706d1fbc00a87be0f5312bd6f8e5dbf89d4e99c7f79d7499"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.56.0)"] +protobuf = ["grpcio-tools (>=1.58.0)"] [[package]] name = "grpcio-status" -version = "1.56.0" +version = "1.58.0" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.56.0.tar.gz", hash = "sha256:9eca0b2dcda0782d3702df225918efd6d820f75f93cd5c51c7fb6a4ffbfea12c"}, - {file = "grpcio_status-1.56.0-py3-none-any.whl", hash = "sha256:e5f101c96686e9d4e94a114567960fdb00052aa3c818b029745e3db37dc9c613"}, + {file = "grpcio-status-1.58.0.tar.gz", hash = "sha256:0b42e70c0405a66a82d9e9867fa255fe59e618964a6099b20568c31dd9099766"}, + {file = "grpcio_status-1.58.0-py3-none-any.whl", hash = "sha256:36d46072b71a00147709ebce49344ac59b4b8960942acf0f813a8a7d6c1c28e0"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.56.0" +grpcio = ">=1.58.0" protobuf = ">=4.21.6" [[package]] name = "grpcio-tools" -version = "1.56.0" +version = "1.58.0" description = "Protobuf code generator for gRPC" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-tools-1.56.0.tar.gz", hash = "sha256:39f5877cea514b3da9f2683dfb3ffb45ef47b05f4ff39c287d7d61c5057f48b8"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:cdbae7312e6d132d38ec2c1611b8cafb783e0416cc5c6deae04efde5f16fb190"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:5f5c416b88d76fbdb548cfee0486928748816b700ece6e591006e5b1dc67598f"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:23e2ef1dc6a9bf766f091e2c52a68e54d0aff3548f94562e61fb0ac3874d514a"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8870ab60f8a76b4a7e43184ee03d28112b976d83c43d41cec821f47b3a297da2"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e59ab6c0bf4a8bb975553ad578d4425bd192775ae384f9406d77d31ad00f6efe"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b309659534b5d930f9ab6d521670c2dd86cb6ef7f47f37f73f96557e2ec13a49"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8115b416ea2cad8a87dc3aadfaf26da684e003c3770b12e7219b462505bb5b85"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-win32.whl", hash = "sha256:e4cb62a521efbca4cb1ad50233aa400574b3daaf6eb26707d661a0afe8191d92"}, - {file = "grpcio_tools-1.56.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d59009ed52220eb2d62f5cefa4e58dec930fb92fab27bb390c4cf1d360ac7e1"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:cd69107705794e815a8b262722c6fea995911cb1dfc1310abf63b476165335d6"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:2d1ee9e13ce135a6ed451b428ef14af131dc7df2551a5344ff4f8aee2d9fab99"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:142530b9fdfabe04f0c7e5dacd45b6c419d39704fa439cc0aabf73ea0d8f916d"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b7a4eb5003a29eecd71707589f93ae7e8fa2e681366a811b3f86695055d8666"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa6d9bdd75d3625dae38372b43696e159c10aa98719b4302b1e94f1ff7878d47"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c43b4fe8c8df4c52d3106bba2cf427f0e46bbebb80e127fbbc3134db0fead7be"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:168940a4a955b6c65da978dbf62e1c36e3a311bb27f649fd201a228e2583a6d4"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-win32.whl", hash = "sha256:3a4b06169493f9454a7f2516c5d41b566d9734e553bbc505f2a7837f7f4a2df1"}, - {file = "grpcio_tools-1.56.0-cp311-cp311-win_amd64.whl", hash = "sha256:1bd361fcc967c21672ba855fc77ea0e7afa51664033a746df96545f84edc4670"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:7e6bcb194b81e372411494d8ed69fab89aa3452b7275fce4f7917fbe7b04fb72"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:02b23a12b91287ebea14b3685735d1d675e77c3cd365ec1771c3e9afbeba1ec6"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:80d75856f8ec949847386ad2f56a460f21c63bf82ce99ca5b6aa512c0b875fb1"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cffff0b4af80285fa49637d69b69d640eb775dc74b23635e4de5faad9e7e744"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3de6c08b545920a39b31ed13305f946c00b19ac1b13d26119f111b6360f22ccf"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:128bb13fe9a2681eeb08175f5fbc8e2d8953d7d0dd240e96f9244b9d2547a1aa"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b57f7f01eafbfe3a293f2efffb675774dbe4074c4627975ec4dc4aa5766801fb"}, - {file = "grpcio_tools-1.56.0-cp37-cp37m-win_amd64.whl", hash = "sha256:282176066fb082ad21c403b84f9d6b440a20482e6f52b83bb2adf54d6fdcae9f"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:d9b8d1c42854d3433c058795f52b1418b53dd8c1e9811fecb1312202e803a2c5"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:accf713f51da74b1a18aa4b31df0ab135510704661f735a938081777b79a4c25"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:ac33fd2d02d24101ea389be8e05b928acb58be56403d4ebc3aecfab473fa4a25"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4acdc7b957abfd76581717f0ac8e4408e0a85b7d0ac8d2cdf4d964f16926b897"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79291bfb1fe5f21d99f4839f43d3c5d44c5402c830a24dbb2811d785dd21264b"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0a8767e4de0f573c678313c5de075ac0e163a192bb135018e45015a22f234387"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96fe2f7f5805d88cb7f2e3e3502550b2883dfab0f9efcf3cbd444942cf2ee1da"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-win32.whl", hash = "sha256:21cf32ccffd4f1800b0dcdf58aa1fc7f626795c9da784c3d817c944edcf2d3ae"}, - {file = "grpcio_tools-1.56.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3ab1a9fad636302f7307d143f64a9fbd11bc041652bf53bb016006e9a5ca820"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:8989d363ac1996238fee61c8f5663f15a8fc362cb1e758c4a686b76cb457cd70"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:11cdd9cbf0c09c3a761c6f59dfd7128104be7cd393334efe386d4fc3f990ee1a"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5fd4c005a4afec16578849bc522ddf3298d6d499b3d37bf51314b086c714cdd5"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7302acaa07cf4966c926fcd6a60c8d30a697f730c38168bf83e1519b464115b"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c1c43d185ebf904c3deec23c36ca2ba4e95db999cf00fc8f85eda4551622a26"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b12bb8c1d408ae40e4c806a3a8ebda2d107310e46696e1da13d0dc3f91fbd19d"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:781cf09e4d5c9288708f6ec9c3eae64d9d5a0f4c46c7ebe70ebb7ab4f6384789"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-win32.whl", hash = "sha256:c62f07452dee3f1ed23aeaef821797c5e516f79535e97fe6a6b0a0ee8db1cc91"}, - {file = "grpcio_tools-1.56.0-cp39-cp39-win_amd64.whl", hash = "sha256:7f063443870650e55012fdb3a58ff4ce5f4042b81dad6b749333ee8146157511"}, + {file = "grpcio-tools-1.58.0.tar.gz", hash = "sha256:6f4d80ceb591e31ca4dceec747dbe56132e1392a0a9bb1c8fe001d1b5cac898a"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:60c874908f3b40f32f1bb0221f7b3ab65ecb53a4d0a9f0a394f031f1b292c177"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:1852e798f31e5437ca7b37abc910e028b34732fb19364862cedb87b1dab66fad"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:149fb48f53cb691a6328f68bed8e4036c730f7106b7f98e92c2c0403f0b9e93c"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba3d383e5ca93826038b70f326fce8e8d12dd9b2f64d363a3d612f7475f12dd2"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6997511e9d2979f7a2389479682dbb06823f21a904e8fb0a5c6baaf1b4b4a863"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8de0b701da479643f71fad71fe66885cddd89441ae16e2c724939b47742dc72e"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43cc23908b63fcaefe690b10f68a2d8652c994b5b36ab77d2271d9608c895320"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-win32.whl", hash = "sha256:2c2221123d010dc6231799e63a37f2f4786bf614ef65b23009c387cd20d8b193"}, + {file = "grpcio_tools-1.58.0-cp310-cp310-win_amd64.whl", hash = "sha256:df2788736bdf58abe7b0e4d6b1ff806f7686c98c5ad900da312252e3322d91c4"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:b6ea5578712cdb29b0ff60bfc6405bf0e8d681b9c71d106dd1cda54fe7fe4e55"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c29880f491581c83181c0a84a4d11402af2b13166a5266f64e246adf1da7aa66"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:32d51e933c3565414dd0835f930bb28a1cdeba435d9d2c87fa3cf8b1d284db3c"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ad9d77f25514584b1ddc981d70c9e50dfcfc388aa5ba943eee67520c5267ed9"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4882382631e6352819059278a5c878ce0b067008dd490911d16d5616e8a36d85"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d84091a189d848d94645b7c48b61734c12ec03b0d46e5fc0049343a26989ac5c"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:85ac28a9621e9b92a3fc416288c4ce45542db0b4c31b3e23031dd8e0a0ec5590"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-win32.whl", hash = "sha256:7371d8ea80234b29affec145e25569523f549520ed7e53b2aa92bed412cdecfd"}, + {file = "grpcio_tools-1.58.0-cp311-cp311-win_amd64.whl", hash = "sha256:6997df6e7c5cf4d3ddc764240c1ff6a04b45d70ec28913b38fbc6396ef743e12"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:ac65b8d6e3acaf88b815edf9af88ff844b6600ff3d2591c05ba4f655b45d5fb4"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:88e8191d0dd789bebf42533808728f5ce75d2c51e2a72bdf20abe5b5e3fbec42"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a3dbece2a121761499a659b799979d4b738586d1065439053de553773eee11ca"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1086fe240c4c879b9721952b47d46996deb283c2d9355a8dc24a804811aacf70"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ae3dca059d5b358dd03fb63277428fa7d771605d4074a019138dd38d70719a"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f8904ac7fc3da2e874f00b3a986e8b7e004f499344a8e7eb213c26dfb025041"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aadbd8393ae332e49731adb31e741f2e689989150569b7acc939f5ea43124e2d"}, + {file = "grpcio_tools-1.58.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1cb6e24194786687d4f23c64de1f0ce553af51de22746911bc37340f85f9783e"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:6ec43909095c630df3e479e77469bdad367067431f4af602f6ccb978a3b78afd"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:4be49ed320b0ebcbc21d19ef555fbf229c1c452105522b728e1171ee2052078e"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:28eefebddec3d3adf19baca78f8b82a2287d358e1b1575ae018cdca8eacc6269"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ef8c696e9d78676cc3f583a92bbbf2c84e94e350f7ad22f150a52559f4599d1"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aeb5949e46558d21c51fd3ec3eeecc59c94dbca76c67c0a80d3da6b7437930c"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f7144aad9396d35fb1b80429600a970b559c2ad4d07020eeb180fe83cea2bee"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ee26e9253a721fff355737649678535f76cf5d642aa3ac0cd937832559b90af"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-win32.whl", hash = "sha256:343f572312039059a8797d6e29a7fc62196e73131ab01755660a9d48202267c1"}, + {file = "grpcio_tools-1.58.0-cp38-cp38-win_amd64.whl", hash = "sha256:cd7acfbb43b7338a78cf4a67528d05530d574d92b7c829d185b78dfc451d158f"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:46628247fbce86d18232eead24bd22ed0826c79f3fe2fc2fbdbde45971361049"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:51587842a54e025a3d0d37afcf4ef2b7ac1def9a5d17448665cb424b53d6c287"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:a062ae3072a2a39a3c057f4d68b57b021f1dd2956cd09aab39709f6af494e1de"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eec3c93a08df11c80ef1c29a616bcbb0d83dbc6ea41b48306fcacc720416dfa7"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b63f823ac991ff77104da614d2a2485a59d37d57830eb2e387a6e2a3edc7fa2b"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:579c11a9f198847ed48dbc4f211c67fe96a73320b87c81f01b044b72e24a7d77"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2fc1dd8049d417a5034d944c9df05cee76f855b3e431627ab4292e7c01c47"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-win32.whl", hash = "sha256:453023120114c35d3d9d6717ea0820e5d5c140f51f9d0b621de4397ff854471b"}, + {file = "grpcio_tools-1.58.0-cp39-cp39-win_amd64.whl", hash = "sha256:b6c896f1df99c35cf062d4803c15663ff00a33ff09add28baa6e475cf6b5e258"}, ] [package.dependencies] -grpcio = ">=1.56.0" +grpcio = ">=1.58.0" protobuf = ">=4.21.6,<5.0dev" setuptools = "*" @@ -1411,67 +1557,68 @@ files = [ [[package]] name = "httpcore" -version = "0.17.3" +version = "1.0.2" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, - {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, ] [package.dependencies] -anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = "==1.*" [package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httptools" -version = "0.6.0" +version = "0.6.1" description = "A collection of framework independent HTTP protocol utils." optional = false -python-versions = ">=3.5.0" -files = [ - {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:818325afee467d483bfab1647a72054246d29f9053fd17cc4b86cda09cc60339"}, - {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72205730bf1be875003692ca54a4a7c35fac77b4746008966061d9d41a61b0f5"}, - {file = "httptools-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33eb1d4e609c835966e969a31b1dedf5ba16b38cab356c2ce4f3e33ffa94cad3"}, - {file = "httptools-0.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdc6675ec6cb79d27e0575750ac6e2b47032742e24eed011b8db73f2da9ed40"}, - {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:463c3bc5ef64b9cf091be9ac0e0556199503f6e80456b790a917774a616aff6e"}, - {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82f228b88b0e8c6099a9c4757ce9fdbb8b45548074f8d0b1f0fc071e35655d1c"}, - {file = "httptools-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:0781fedc610293a2716bc7fa142d4c85e6776bc59d617a807ff91246a95dea35"}, - {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:721e503245d591527cddd0f6fd771d156c509e831caa7a57929b55ac91ee2b51"}, - {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:274bf20eeb41b0956e34f6a81f84d26ed57c84dd9253f13dcb7174b27ccd8aaf"}, - {file = "httptools-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:259920bbae18740a40236807915def554132ad70af5067e562f4660b62c59b90"}, - {file = "httptools-0.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfd2ae8a2d532952ac54445a2fb2504c804135ed28b53fefaf03d3a93eb1fd"}, - {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f959e4770b3fc8ee4dbc3578fd910fab9003e093f20ac8c621452c4d62e517cb"}, - {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e22896b42b95b3237eccc42278cd72c0df6f23247d886b7ded3163452481e38"}, - {file = "httptools-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:38f3cafedd6aa20ae05f81f2e616ea6f92116c8a0f8dcb79dc798df3356836e2"}, - {file = "httptools-0.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47043a6e0ea753f006a9d0dd076a8f8c99bc0ecae86a0888448eb3076c43d717"}, - {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a541579bed0270d1ac10245a3e71e5beeb1903b5fbbc8d8b4d4e728d48ff1d"}, - {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d802e7b2538a9756df5acc062300c160907b02e15ed15ba035b02bce43e89c"}, - {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:26326e0a8fe56829f3af483200d914a7cd16d8d398d14e36888b56de30bec81a"}, - {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e41ccac9e77cd045f3e4ee0fc62cbf3d54d7d4b375431eb855561f26ee7a9ec4"}, - {file = "httptools-0.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e748fc0d5c4a629988ef50ac1aef99dfb5e8996583a73a717fc2cac4ab89932"}, - {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cf8169e839a0d740f3d3c9c4fa630ac1a5aaf81641a34575ca6773ed7ce041a1"}, - {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5dcc14c090ab57b35908d4a4585ec5c0715439df07be2913405991dbb37e049d"}, - {file = "httptools-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0b0571806a5168013b8c3d180d9f9d6997365a4212cb18ea20df18b938aa0b"}, - {file = "httptools-0.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb4a608c631f7dcbdf986f40af7a030521a10ba6bc3d36b28c1dc9e9035a3c0"}, - {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93f89975465133619aea8b1952bc6fa0e6bad22a447c6d982fc338fbb4c89649"}, - {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:73e9d66a5a28b2d5d9fbd9e197a31edd02be310186db423b28e6052472dc8201"}, - {file = "httptools-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:22c01fcd53648162730a71c42842f73b50f989daae36534c818b3f5050b54589"}, - {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f96d2a351b5625a9fd9133c95744e8ca06f7a4f8f0b8231e4bbaae2c485046a"}, - {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72ec7c70bd9f95ef1083d14a755f321d181f046ca685b6358676737a5fecd26a"}, - {file = "httptools-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b703d15dbe082cc23266bf5d9448e764c7cb3fcfe7cb358d79d3fd8248673ef9"}, - {file = "httptools-0.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82c723ed5982f8ead00f8e7605c53e55ffe47c47465d878305ebe0082b6a1755"}, - {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b0a816bb425c116a160fbc6f34cece097fd22ece15059d68932af686520966bd"}, - {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dea66d94e5a3f68c5e9d86e0894653b87d952e624845e0b0e3ad1c733c6cc75d"}, - {file = "httptools-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:23b09537086a5a611fad5696fc8963d67c7e7f98cb329d38ee114d588b0b74cd"}, - {file = "httptools-0.6.0.tar.gz", hash = "sha256:9fc6e409ad38cbd68b177cd5158fc4042c796b82ca88d99ec78f07bed6c6b796"}, +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, ] [package.extras] @@ -1479,19 +1626,20 @@ test = ["Cython (>=0.29.24,<0.30.0)"] [[package]] name = "httpx" -version = "0.24.1" +version = "0.25.2" description = "The next generation HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, - {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, + {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, + {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, ] [package.dependencies] +anyio = "*" certifi = "*" h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} -httpcore = ">=0.15.0,<0.18.0" +httpcore = "==1.*" idna = "*" sniffio = "*" @@ -1503,18 +1651,18 @@ socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" -version = "0.16.4" +version = "0.19.4" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.16.4-py3-none-any.whl", hash = "sha256:0d3df29932f334fead024afc7cb4cc5149d955238b8b5e42dcf9740d6995a349"}, - {file = "huggingface_hub-0.16.4.tar.gz", hash = "sha256:608c7d4f3d368b326d1747f91523dbd1f692871e8e2e7a4750314a2dd8b63e14"}, + {file = "huggingface_hub-0.19.4-py3-none-any.whl", hash = "sha256:dba013f779da16f14b606492828f3760600a1e1801432d09fe1c33e50b825bb5"}, + {file = "huggingface_hub-0.19.4.tar.gz", hash = "sha256:176a4fc355a851c17550e7619488f383189727eab209534d7cef2114dae77b22"}, ] [package.dependencies] filelock = "*" -fsspec = "*" +fsspec = ">=2023.5.0" packaging = ">=20.9" pyyaml = ">=5.1" requests = "*" @@ -1522,16 +1670,17 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "black (>=23.1,<24.0)", "gradio", "jedi", "mypy (==0.982)", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "urllib3 (<2.0)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +docs = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "hf-doc-builder", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)", "watchdog"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -inference = ["aiohttp", "pydantic"] -quality = ["black (>=23.1,<24.0)", "mypy (==0.982)", "ruff (>=0.0.241)"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["torch"] -typing = ["pydantic", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] [[package]] name = "humanfriendly" @@ -1560,13 +1709,13 @@ files = [ [[package]] name = "identify" -version = "2.5.27" +version = "2.5.33" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.27-py2.py3-none-any.whl", hash = "sha256:fdb527b2dfe24602809b2201e033c2a113d7bdf716db3ca8e3243f735dcecaba"}, - {file = "identify-2.5.27.tar.gz", hash = "sha256:287b75b04a0e22d727bc9a41f0d4f3c1bcada97490fa6eabb5b28f0e9097e733"}, + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, ] [package.extras] @@ -1574,51 +1723,51 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "6.11.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" -version = "5.13.0" +version = "6.1.1" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, - {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, + {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, + {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -1633,13 +1782,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.25.2" +version = "6.27.1" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.25.2-py3-none-any.whl", hash = "sha256:2e2ee359baba19f10251b99415bb39de1e97d04e1fab385646f24f0596510b77"}, - {file = "ipykernel-6.25.2.tar.gz", hash = "sha256:f468ddd1f17acb48c8ce67fcfa49ba6d46d4f9ac0438c1f441be7c3d1372230b"}, + {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, + {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, ] [package.dependencies] @@ -1666,13 +1815,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.12.2" +version = "8.12.3" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.8" files = [ - {file = "ipython-8.12.2-py3-none-any.whl", hash = "sha256:ea8801f15dfe4ffb76dea1b09b847430ffd70d827b41735c64a0638a04103bfc"}, - {file = "ipython-8.12.2.tar.gz", hash = "sha256:c7b80eb7f5a855a88efc971fda506ff7a91c280b42cdae26643e0f601ea281ea"}, + {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, + {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, ] [package.dependencies] @@ -1719,13 +1868,13 @@ six = "*" [[package]] name = "jedi" -version = "0.19.0" +version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.19.0-py2.py3-none-any.whl", hash = "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e"}, - {file = "jedi-0.19.0.tar.gz", hash = "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4"}, + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, ] [package.dependencies] @@ -1734,17 +1883,17 @@ parso = ">=0.8.3,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1766,13 +1915,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.19.0" +version = "4.20.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.0-py3-none-any.whl", hash = "sha256:043dc26a3845ff09d20e4420d6012a9c91c9aa8999fa184e7efcfeccb41e32cb"}, - {file = "jsonschema-4.19.0.tar.gz", hash = "sha256:6e1e7569ac13be8139b2dd2c21a55d350066ee3f80df06c608b398cdc6f30e8f"}, + {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, + {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, ] [package.dependencies] @@ -1787,6 +1936,23 @@ rpds-py = ">=0.7.1" format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +[[package]] +name = "jsonschema-path" +version = "0.3.2" +description = "JSONSchema Spec with object-oriented paths" +optional = false +python-versions = ">=3.8.0,<4.0.0" +files = [ + {file = "jsonschema_path-0.3.2-py3-none-any.whl", hash = "sha256:271aedfefcd161a0f467bdf23e1d9183691a61eaabf4b761046a914e369336c7"}, + {file = "jsonschema_path-0.3.2.tar.gz", hash = "sha256:4d0dababf341e36e9b91a5fb2a3e3fd300b0150e7fe88df4e55cc8253c5a3989"}, +] + +[package.dependencies] +pathable = ">=0.4.1,<0.5.0" +PyYAML = ">=5.1" +referencing = ">=0.28.0,<0.32.0" +requests = ">=2.31.0,<3.0.0" + [[package]] name = "jsonschema-spec" version = "0.2.4" @@ -1821,13 +1987,13 @@ referencing = ">=0.28.0" [[package]] name = "jupyter-client" -version = "8.3.0" +version = "8.6.0" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, - {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, ] [package.dependencies] @@ -1844,13 +2010,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.3.1" +version = "5.5.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, - {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, + {file = "jupyter_core-5.5.1-py3-none-any.whl", hash = "sha256:220dfb00c45f0d780ce132bb7976b58263f81a3ada6e90a9b6823785a424f739"}, + {file = "jupyter_core-5.5.1.tar.gz", hash = "sha256:1553311a97ccd12936037f36b9ab4d6ae8ceea6ad2d5c90d94a909e752178e40"}, ] [package.dependencies] @@ -1859,73 +2025,64 @@ pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_ traitlets = ">=5.3" [package.extras] -docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "lazy-object-proxy" -version = "1.9.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." optional = false -python-versions = ">=3.7" -files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, -] - -[[package]] -name = "lit" -version = "16.0.6" -description = "A Software Testing Tool" -optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "lit-16.0.6.tar.gz", hash = "sha256:84623c9c23b6b14763d637f4e63e6b721b3446ada40bf7001d8fee70b8e77a9a"}, + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] name = "loguru" -version = "0.7.0" +version = "0.7.2" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5" files = [ - {file = "loguru-0.7.0-py3-none-any.whl", hash = "sha256:b93aa30099fa6860d4727f1b81f8718e965bb96253fa190fab2077aaad6d15d3"}, - {file = "loguru-0.7.0.tar.gz", hash = "sha256:1612053ced6ae84d7959dd7d5e431a0532642237ec21f7fd83ac73fe539e03e1"}, + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, ] [package.dependencies] @@ -1933,7 +2090,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v0.990)", "pre-commit (==3.2.1)", "pytest (==6.1.2)", "pytest (==7.2.1)", "pytest-cov (==2.12.1)", "pytest-cov (==4.0.0)", "pytest-mypy-plugins (==1.10.1)", "pytest-mypy-plugins (==1.9.3)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.2.0)", "tox (==3.27.1)", "tox (==4.4.6)"] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] [[package]] name = "markupsafe" @@ -2040,15 +2197,15 @@ traitlets = "*" [[package]] name = "milvus" -version = "2.2.13" +version = "2.2.16" description = "Embeded Milvus" optional = false python-versions = ">=3.6" files = [ - {file = "milvus-2.2.13-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:fcaff6cdc885ab46f432b79294a298e2cac542ffdbcb8c61be5f4c1f1c27dbeb"}, - {file = "milvus-2.2.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:58dfe6b2630a981475c56dc06fe414d2d821c94651b3341e62e7de8bc2850ec9"}, - {file = "milvus-2.2.13-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9f95afcdb7b8912ae7c8a02604ed3a057462df25e6fa22112db1ead828e371a1"}, - {file = "milvus-2.2.13-py3-none-win_amd64.whl", hash = "sha256:03c55043321a72d41d2dcb49745836f10156c61bfc9c48fa58345ae27dbbf164"}, + {file = "milvus-2.2.16-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:ada1fef174aa42d5117b4a784a48f9106141df8e799bccd7092ad88f4a34f67c"}, + {file = "milvus-2.2.16-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8df249e791758a94a3e3b6606e6efa6cda136eadf718dd37053b58729173f64e"}, + {file = "milvus-2.2.16-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea5bd32251b7bcad629d7e46c307414a586c96b4a50491d04a08db42fca8fba9"}, + {file = "milvus-2.2.16-py3-none-win_amd64.whl", hash = "sha256:f906990a6d7bbb5955bb03541992d0072ed213f8665c1101b003cea995cb97d9"}, ] [package.extras] @@ -2056,17 +2213,19 @@ client = ["pymilvus (>=2.2.0,!=2.2.14,<2.3.0)"] [[package]] name = "minio" -version = "7.1.17" +version = "7.2.0" description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" optional = false python-versions = "*" files = [ - {file = "minio-7.1.17-py3-none-any.whl", hash = "sha256:0aa525d77a3bc61378444c2400b0ba2685ad4cd6ecb3fba4141a0d0765e25f40"}, - {file = "minio-7.1.17.tar.gz", hash = "sha256:b0b687c1ec9be422a1f8b04c65fb8e43a1c090f9508178db57c434a17341c404"}, + {file = "minio-7.2.0-py3-none-any.whl", hash = "sha256:10656272c16156fa08436ce2b27e25e4134ef5142a8c259513ee26fb514531a6"}, + {file = "minio-7.2.0.tar.gz", hash = "sha256:4b015b018d10c1505f7c3e724fa7c2267760ac7bee6463a624cbf22cd272877b"}, ] [package.dependencies] +argon2-cffi = "*" certifi = "*" +pycryptodome = "*" urllib3 = "*" [[package]] @@ -2093,13 +2252,13 @@ files = [ [[package]] name = "motor" -version = "3.3.1" +version = "3.3.2" description = "Non-blocking MongoDB driver for Tornado or asyncio" optional = false python-versions = ">=3.7" files = [ - {file = "motor-3.3.1-py3-none-any.whl", hash = "sha256:a0dee83ad0d47b353932ac37467ba397b1e649ce7e3eea7f5a90554883d7cdbe"}, - {file = "motor-3.3.1.tar.gz", hash = "sha256:c5eb400e27d722a3db03a9826656b6d13acf9b6c70c2fb4604f474eac9da5be4"}, + {file = "motor-3.3.2-py3-none-any.whl", hash = "sha256:6fe7e6f0c4f430b9e030b9d22549b732f7c2226af3ab71ecc309e4a1b7d19953"}, + {file = "motor-3.3.2.tar.gz", hash = "sha256:d2fc38de15f1c8058f389c1a44a4d4105c0405c48c061cd492a654496f7bc26a"}, ] [package.dependencies] @@ -2112,7 +2271,7 @@ gssapi = ["pymongo[gssapi] (>=4.5,<5)"] ocsp = ["pymongo[ocsp] (>=4.5,<5)"] snappy = ["pymongo[snappy] (>=4.5,<5)"] srv = ["pymongo[srv] (>=4.5,<5)"] -test = ["aiohttp", "mockupdb", "motor[encryption]", "pytest (>=7)", "tornado (>=5)"] +test = ["aiohttp (<3.8.6)", "mockupdb", "motor[encryption]", "pytest (>=7)", "tornado (>=5)"] zstd = ["pymongo[zstd] (>=4.5,<5)"] [[package]] @@ -2134,13 +2293,13 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msal" -version = "1.23.0" +version = "1.26.0" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false -python-versions = "*" +python-versions = ">=2.7" files = [ - {file = "msal-1.23.0-py2.py3-none-any.whl", hash = "sha256:3342e0837a047007f9d479e814b559c3219767453d57920dc40a31986862048b"}, - {file = "msal-1.23.0.tar.gz", hash = "sha256:25c9a33acf84301f93d1fdbe9f1a9c60cd38af0d5fffdbfa378138fc7bc1e86b"}, + {file = "msal-1.26.0-py2.py3-none-any.whl", hash = "sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"}, + {file = "msal-1.26.0.tar.gz", hash = "sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15"}, ] [package.dependencies] @@ -2153,20 +2312,21 @@ broker = ["pymsalruntime (>=0.13.2,<0.14)"] [[package]] name = "msal-extensions" -version = "1.0.0" +version = "1.1.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, - {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, + {file = "msal-extensions-1.1.0.tar.gz", hash = "sha256:6ab357867062db7b253d0bd2df6d411c7891a0ee7308d54d1e4317c1d1c54252"}, + {file = "msal_extensions-1.1.0-py3-none-any.whl", hash = "sha256:01be9711b4c0b1a151450068eeb2c4f0997df3bba085ac299de3a66f585e382f"}, ] [package.dependencies] msal = ">=0.4.1,<2.0.0" +packaging = "*" portalocker = [ - {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, - {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, + {version = ">=1.0,<3", markers = "platform_system != \"Windows\""}, + {version = ">=1.6,<3", markers = "platform_system == \"Windows\""}, ] [[package]] @@ -2265,13 +2425,13 @@ files = [ [[package]] name = "nest-asyncio" -version = "1.5.7" +version = "1.5.8" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, - {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] [[package]] @@ -2369,194 +2529,177 @@ files = [ ] [[package]] -name = "nvidia-cublas-cu11" -version = "11.10.3.66" +name = "nvidia-cublas-cu12" +version = "12.1.3.1" description = "CUBLAS native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"}, - {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"}, + {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, + {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, ] -[package.dependencies] -setuptools = "*" -wheel = "*" - [[package]] -name = "nvidia-cuda-cupti-cu11" -version = "11.7.101" +name = "nvidia-cuda-cupti-cu12" +version = "12.1.105" description = "CUDA profiling tools runtime libs." optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl", hash = "sha256:e0cfd9854e1f2edaa36ca20d21cd0bdd5dcfca4e3b9e130a082e05b33b6c5895"}, - {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-win_amd64.whl", hash = "sha256:7cc5b8f91ae5e1389c3c0ad8866b3b016a175e827ea8f162a672990a402ab2b0"}, + {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, + {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, ] -[package.dependencies] -setuptools = "*" -wheel = "*" - [[package]] -name = "nvidia-cuda-nvrtc-cu11" -version = "11.7.99" +name = "nvidia-cuda-nvrtc-cu12" +version = "12.1.105" description = "NVRTC native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"}, - {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"}, - {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"}, + {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, + {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, ] -[package.dependencies] -setuptools = "*" -wheel = "*" - [[package]] -name = "nvidia-cuda-runtime-cu11" -version = "11.7.99" +name = "nvidia-cuda-runtime-cu12" +version = "12.1.105" description = "CUDA Runtime native Libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"}, - {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"}, + {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, + {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, ] -[package.dependencies] -setuptools = "*" -wheel = "*" - [[package]] -name = "nvidia-cudnn-cu11" -version = "8.5.0.96" +name = "nvidia-cudnn-cu12" +version = "8.9.2.26" description = "cuDNN runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"}, - {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, + {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"}, ] [package.dependencies] -setuptools = "*" -wheel = "*" +nvidia-cublas-cu12 = "*" [[package]] -name = "nvidia-cufft-cu11" -version = "10.9.0.58" +name = "nvidia-cufft-cu12" +version = "11.0.2.54" description = "CUFFT native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl", hash = "sha256:222f9da70c80384632fd6035e4c3f16762d64ea7a843829cb278f98b3cb7dd81"}, - {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-win_amd64.whl", hash = "sha256:c4d316f17c745ec9c728e30409612eaf77a8404c3733cdf6c9c1569634d1ca03"}, + {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, + {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, ] [[package]] -name = "nvidia-curand-cu11" -version = "10.2.10.91" +name = "nvidia-curand-cu12" +version = "10.3.2.106" description = "CURAND native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:eecb269c970fa599a2660c9232fa46aaccbf90d9170b96c462e13bcb4d129e2c"}, - {file = "nvidia_curand_cu11-10.2.10.91-py3-none-win_amd64.whl", hash = "sha256:f742052af0e1e75523bde18895a9ed016ecf1e5aa0ecddfcc3658fd11a1ff417"}, + {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, + {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, ] -[package.dependencies] -setuptools = "*" -wheel = "*" - [[package]] -name = "nvidia-cusolver-cu11" -version = "11.4.0.1" +name = "nvidia-cusolver-cu12" +version = "11.4.5.107" description = "CUDA solver native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:72fa7261d755ed55c0074960df5904b65e2326f7adce364cbe4945063c1be412"}, - {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:700b781bfefd57d161443aff9ace1878584b93e0b2cfef3d6e9296d96febbf99"}, - {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-win_amd64.whl", hash = "sha256:00f70b256add65f8c1eb3b6a65308795a93e7740f6df9e273eccbba770d370c4"}, + {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, + {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, ] [package.dependencies] -setuptools = "*" -wheel = "*" +nvidia-cublas-cu12 = "*" +nvidia-cusparse-cu12 = "*" +nvidia-nvjitlink-cu12 = "*" [[package]] -name = "nvidia-cusparse-cu11" -version = "11.7.4.91" +name = "nvidia-cusparse-cu12" +version = "12.1.0.106" description = "CUSPARSE native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:a3389de714db63321aa11fbec3919271f415ef19fda58aed7f2ede488c32733d"}, - {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-win_amd64.whl", hash = "sha256:304a01599534f5186a8ed1c3756879282c72c118bc77dd890dc1ff868cad25b9"}, + {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, + {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, ] [package.dependencies] -setuptools = "*" -wheel = "*" +nvidia-nvjitlink-cu12 = "*" [[package]] -name = "nvidia-nccl-cu11" -version = "2.14.3" +name = "nvidia-nccl-cu12" +version = "2.19.3" description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:5e5534257d1284b8e825bc3a182c6f06acd6eb405e9f89d49340e98cd8f136eb"}, + {file = "nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:a9734707a2c96443331c1e48c717024aa6678a0e2a4cb66b2c364d18cee6b48d"}, ] [[package]] -name = "nvidia-nvtx-cu11" -version = "11.7.91" -description = "NVIDIA Tools Extension" +name = "nvidia-nvjitlink-cu12" +version = "12.3.101" +description = "Nvidia JIT LTO Library" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:b22c64eee426a62fc00952b507d6d29cf62b4c9df7a480fcc417e540e05fd5ac"}, - {file = "nvidia_nvtx_cu11-11.7.91-py3-none-win_amd64.whl", hash = "sha256:dfd7fcb2a91742513027d63a26b757f38dd8b07fecac282c4d132a9d373ff064"}, + {file = "nvidia_nvjitlink_cu12-12.3.101-py3-none-manylinux1_x86_64.whl", hash = "sha256:64335a8088e2b9d196ae8665430bc6a2b7e6ef2eb877a9c735c804bd4ff6467c"}, + {file = "nvidia_nvjitlink_cu12-12.3.101-py3-none-win_amd64.whl", hash = "sha256:1b2e317e437433753530792f13eece58f0aec21a2b05903be7bffe58a606cbd1"}, ] -[package.dependencies] -setuptools = "*" -wheel = "*" +[[package]] +name = "nvidia-nvtx-cu12" +version = "12.1.105" +description = "NVIDIA Tools Extension" +optional = false +python-versions = ">=3" +files = [ + {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, + {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, +] [[package]] name = "onnxruntime" -version = "1.15.1" +version = "1.16.3" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false python-versions = "*" files = [ - {file = "onnxruntime-1.15.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:baad59e6a763237fa39545325d29c16f98b8a45d2dfc524c67631e2e3ba44d16"}, - {file = "onnxruntime-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:568c2db848f619a0a93e843c028e9fb4879929d40b04bd60f9ba6eb8d2e93421"}, - {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69088d7784bb04dedfd9e883e2c96e4adf8ae0451acdd0abb78d68f59ecc6d9d"}, - {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cef43737b2cd886d5d718d100f56ec78c9c476c5db5f8f946e95024978fe754"}, - {file = "onnxruntime-1.15.1-cp310-cp310-win32.whl", hash = "sha256:79d7e65abb44a47c633ede8e53fe7b9756c272efaf169758c482c983cca98d7e"}, - {file = "onnxruntime-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:8bc4c47682933a7a2c79808688aad5f12581305e182be552de50783b5438e6bd"}, - {file = "onnxruntime-1.15.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:652b2cb777f76446e3cc41072dd3d1585a6388aeff92b9de656724bc22e241e4"}, - {file = "onnxruntime-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89b86dbed15740abc385055a29c9673a212600248d702737ce856515bdeddc88"}, - {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5cdd9ee748149a57f4cdfa67187a0d68f75240645a3c688299dcd08742cc98"}, - {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f748cce6a70ed38c19658615c55f4eedb9192765a4e9c4bd2682adfe980698d"}, - {file = "onnxruntime-1.15.1-cp311-cp311-win32.whl", hash = "sha256:e0312046e814c40066e7823da58075992d51364cbe739eeeb2345ec440c3ac59"}, - {file = "onnxruntime-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:f0980969689cb956c22bd1318b271e1be260060b37f3ddd82c7d63bd7f2d9a79"}, - {file = "onnxruntime-1.15.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:345986cfdbd6f4b20a89b6a6cd9abd3e2ced2926ae0b6e91fefa8149f95c0f09"}, - {file = "onnxruntime-1.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d7b3ad75e040f1e95757f69826a11051737b31584938a26d466a0234c6de98"}, - {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3603d07b829bcc1c14963a76103e257aade8861eb208173b300cc26e118ec2f8"}, - {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3df0625b9295daf1f7409ea55f72e1eeb38d54f5769add53372e79ddc3cf98d"}, - {file = "onnxruntime-1.15.1-cp38-cp38-win32.whl", hash = "sha256:f68b47fdf1a0406c0292f81ac993e2a2ae3e8b166b436d590eb221f64e8e187a"}, - {file = "onnxruntime-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:52d762d297cc3f731f54fa65a3e329b813164970671547bef6414d0ed52765c9"}, - {file = "onnxruntime-1.15.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:99228f9f03dc1fc8af89a28c9f942e8bd3e97e894e263abe1a32e4ddb1f6363b"}, - {file = "onnxruntime-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:45db7f96febb0cf23e3af147f35c4f8de1a37dd252d1cef853c242c2780250cd"}, - {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bafc112a36db25c821b90ab747644041cb4218f6575889775a2c12dd958b8c3"}, - {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985693d18f2d46aa34fd44d7f65ff620660b2c8fa4b8ec365c2ca353f0fbdb27"}, - {file = "onnxruntime-1.15.1-cp39-cp39-win32.whl", hash = "sha256:708eb31b0c04724bf0f01c1309a9e69bbc09b85beb750e5662c8aed29f1ff9fd"}, - {file = "onnxruntime-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:73d6de4c42dfde1e9dbea04773e6dc23346c8cda9c7e08c6554fafc97ac60138"}, + {file = "onnxruntime-1.16.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:3bc41f323ac77acfed190be8ffdc47a6a75e4beeb3473fbf55eeb075ccca8df2"}, + {file = "onnxruntime-1.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:212741b519ee61a4822c79c47147d63a8b0ffde25cd33988d3d7be9fbd51005d"}, + {file = "onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f91f5497fe3df4ceee2f9e66c6148d9bfeb320cd6a71df361c66c5b8bac985a"}, + {file = "onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b1fc269cabd27f129fb9058917d6fdc89b188c49ed8700f300b945c81f889"}, + {file = "onnxruntime-1.16.3-cp310-cp310-win32.whl", hash = "sha256:f36b56a593b49a3c430be008c2aea6658d91a3030115729609ec1d5ffbaab1b6"}, + {file = "onnxruntime-1.16.3-cp310-cp310-win_amd64.whl", hash = "sha256:3c467eaa3d2429c026b10c3d17b78b7f311f718ef9d2a0d6938e5c3c2611b0cf"}, + {file = "onnxruntime-1.16.3-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a225bb683991001d111f75323d355b3590e75e16b5e0f07a0401e741a0143ea1"}, + {file = "onnxruntime-1.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9aded21fe3d898edd86be8aa2eb995aa375e800ad3dfe4be9f618a20b8ee3630"}, + {file = "onnxruntime-1.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00cccc37a5195c8fca5011b9690b349db435986bd508eb44c9fce432da9228a4"}, + {file = "onnxruntime-1.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e253e572021563226a86f1c024f8f70cdae28f2fb1cc8c3a9221e8b1ce37db5"}, + {file = "onnxruntime-1.16.3-cp311-cp311-win32.whl", hash = "sha256:a82a8f0b4c978d08f9f5c7a6019ae51151bced9fd91e5aaa0c20a9e4ac7a60b6"}, + {file = "onnxruntime-1.16.3-cp311-cp311-win_amd64.whl", hash = "sha256:78d81d9af457a1dc90db9a7da0d09f3ccb1288ea1236c6ab19f0ca61f3eee2d3"}, + {file = "onnxruntime-1.16.3-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:04ebcd29c20473596a1412e471524b2fb88d55e6301c40b98dd2407b5911595f"}, + {file = "onnxruntime-1.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9996bab0f202a6435ab867bc55598f15210d0b72794d5de83712b53d564084ae"}, + {file = "onnxruntime-1.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b8f5083f903408238883821dd8c775f8120cb4a604166dbdabe97f4715256d5"}, + {file = "onnxruntime-1.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c2dcf1b70f8434abb1116fe0975c00e740722aaf321997195ea3618cc00558e"}, + {file = "onnxruntime-1.16.3-cp38-cp38-win32.whl", hash = "sha256:d4a0151e1accd04da6711f6fd89024509602f82c65a754498e960b032359b02d"}, + {file = "onnxruntime-1.16.3-cp38-cp38-win_amd64.whl", hash = "sha256:e8aa5bba78afbd4d8a2654b14ec7462ff3ce4a6aad312a3c2d2c2b65009f2541"}, + {file = "onnxruntime-1.16.3-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:6829dc2a79d48c911fedaf4c0f01e03c86297d32718a3fdee7a282766dfd282a"}, + {file = "onnxruntime-1.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:76f876c53bfa912c6c242fc38213a6f13f47612d4360bc9d599bd23753e53161"}, + {file = "onnxruntime-1.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4137e5d443e2dccebe5e156a47f1d6d66f8077b03587c35f11ee0c7eda98b533"}, + {file = "onnxruntime-1.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56695c1a343c7c008b647fff3df44da63741fbe7b6003ef576758640719be7b"}, + {file = "onnxruntime-1.16.3-cp39-cp39-win32.whl", hash = "sha256:985a029798744ce4743fcf8442240fed35c8e4d4d30ec7d0c2cdf1388cd44408"}, + {file = "onnxruntime-1.16.3-cp39-cp39-win_amd64.whl", hash = "sha256:28ff758b17ce3ca6bcad3d936ec53bd7f5482e7630a13f6dcae518eba8f71d85"}, ] [package.dependencies] @@ -2569,35 +2712,36 @@ sympy = "*" [[package]] name = "openai" -version = "0.28.0" -description = "Python client library for the OpenAI API" +version = "1.5.0" +description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-0.28.0-py3-none-any.whl", hash = "sha256:d207ece78469be5648eb87b825753282225155a29d0eec6e02013ddbf8c31c0c"}, - {file = "openai-0.28.0.tar.gz", hash = "sha256:417b78c4c2864ba696aedaf1ccff77be1f04a581ab1739f0a56e0aae19e5a794"}, + {file = "openai-1.5.0-py3-none-any.whl", hash = "sha256:42d8c84b0714c990e18afe81d37f8a64423e8196bf7157b8ea665b8d8f393253"}, + {file = "openai-1.5.0.tar.gz", hash = "sha256:4cd91e97988ccd6c44f815107def9495cbc718aeb8b28be33a87b6fa2c432508"}, ] [package.dependencies] -aiohttp = "*" -requests = ">=2.20" -tqdm = "*" +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.5,<5" [package.extras] -datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] -embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] -wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] [[package]] name = "openapi-core" -version = "0.18.1" +version = "0.18.2" description = "client-side and server-side support for the OpenAPI Specification v3" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ - {file = "openapi_core-0.18.1-py3-none-any.whl", hash = "sha256:603983137a2b954843ef4b85fa36e2d5cceaba50add44c1c2a5165ba5d2954b4"}, - {file = "openapi_core-0.18.1.tar.gz", hash = "sha256:63fa13d9af226ac00119b0531ac9929e3dbb4cbe00216770784473fa6a03bc27"}, + {file = "openapi_core-0.18.2-py3-none-any.whl", hash = "sha256:ec13d366766d564450de60374f59feb0b5ccb447aed642cdf0f1ecfcc6fbe80a"}, + {file = "openapi_core-0.18.2.tar.gz", hash = "sha256:d4cc50f3ee03ae46313c83e97c6fbfe7e7ae9686741135eb0e4ed49e9d8ff08a"}, ] [package.dependencies] @@ -2607,7 +2751,7 @@ jsonschema = ">=4.18.0,<5.0.0" jsonschema-spec = ">=0.2.3,<0.3.0" more-itertools = "*" openapi-schema-validator = ">=0.6.0,<0.7.0" -openapi-spec-validator = ">=0.6.0,<0.7.0" +openapi-spec-validator = ">=0.7.1,<0.8.0" parse = "*" werkzeug = "*" @@ -2621,35 +2765,35 @@ starlette = ["starlette (>=0.26.1,<0.32.0)"] [[package]] name = "openapi-schema-validator" -version = "0.6.0" +version = "0.6.2" description = "OpenAPI schema validation for Python" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ - {file = "openapi_schema_validator-0.6.0-py3-none-any.whl", hash = "sha256:9e95b95b621efec5936245025df0d6a7ffacd1551e91d09196b3053040c931d7"}, - {file = "openapi_schema_validator-0.6.0.tar.gz", hash = "sha256:921b7c1144b856ca3813e41ecff98a4050f7611824dfc5c6ead7072636af0520"}, + {file = "openapi_schema_validator-0.6.2-py3-none-any.whl", hash = "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8"}, + {file = "openapi_schema_validator-0.6.2.tar.gz", hash = "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804"}, ] [package.dependencies] -jsonschema = ">=4.18.0,<5.0.0" +jsonschema = ">=4.19.1,<5.0.0" jsonschema-specifications = ">=2023.5.2,<2024.0.0" rfc3339-validator = "*" [[package]] name = "openapi-spec-validator" -version = "0.6.0" +version = "0.7.1" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ - {file = "openapi_spec_validator-0.6.0-py3-none-any.whl", hash = "sha256:675f1a3c0d0d8eff9116694acde88bcd4613a95bf5240270724d9d78c78f26d6"}, - {file = "openapi_spec_validator-0.6.0.tar.gz", hash = "sha256:68c4c212c88ef14c6b1a591b895bf742c455783c7ebba2507abd7dbc1365a616"}, + {file = "openapi_spec_validator-0.7.1-py3-none-any.whl", hash = "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959"}, + {file = "openapi_spec_validator-0.7.1.tar.gz", hash = "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7"}, ] [package.dependencies] -importlib-resources = {version = ">=5.8.0,<6.0.0", markers = "python_version < \"3.9\""} +importlib-resources = {version = ">=5.8,<7.0", markers = "python_version < \"3.9\""} jsonschema = ">=4.18.0,<5.0.0" -jsonschema-spec = ">=0.2.3,<0.3.0" +jsonschema-path = ">=0.3.1,<0.4.0" lazy-object-proxy = ">=1.7.1,<2.0.0" openapi-schema-validator = ">=0.6.0,<0.7.0" @@ -2666,13 +2810,13 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -2744,13 +2888,13 @@ xml = ["lxml (>=4.6.3)"] [[package]] name = "parse" -version = "1.19.1" +version = "1.20.0" description = "parse() is the opposite of format()" optional = false python-versions = "*" files = [ - {file = "parse-1.19.1-py2.py3-none-any.whl", hash = "sha256:371ed3800dc63983832159cc9373156613947707bc448b5215473a219dbd4362"}, - {file = "parse-1.19.1.tar.gz", hash = "sha256:cc3a47236ff05da377617ddefa867b7ba983819c664e1afe46249e5b469be464"}, + {file = "parse-1.20.0-py2.py3-none-any.whl", hash = "sha256:5e171b001452fa9f004c5a58a93525175468daf69b493e9fa915347ed7ff6968"}, + {file = "parse-1.20.0.tar.gz", hash = "sha256:bd28bae37714b45d5894d77160a16e2be36b64a3b618c81168b3684676aa498b"}, ] [[package]] @@ -2781,24 +2925,24 @@ files = [ [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pexpect" -version = "4.8.0" +version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" files = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, ] [package.dependencies] @@ -2817,70 +2961,88 @@ files = [ [[package]] name = "pillow" -version = "10.0.1" +version = "10.2.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.0.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:8f06be50669087250f319b706decf69ca71fdecd829091a37cc89398ca4dc17a"}, - {file = "Pillow-10.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50bd5f1ebafe9362ad622072a1d2f5850ecfa44303531ff14353a4059113b12d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6a90167bcca1216606223a05e2cf991bb25b14695c518bc65639463d7db722d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11c9102c56ffb9ca87134bd025a43d2aba3f1155f508eff88f694b33a9c6d19"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:186f7e04248103482ea6354af6d5bcedb62941ee08f7f788a1c7707bc720c66f"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0462b1496505a3462d0f35dc1c4d7b54069747d65d00ef48e736acda2c8cbdff"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d889b53ae2f030f756e61a7bff13684dcd77e9af8b10c6048fb2c559d6ed6eaf"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:552912dbca585b74d75279a7570dd29fa43b6d93594abb494ebb31ac19ace6bd"}, - {file = "Pillow-10.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:787bb0169d2385a798888e1122c980c6eff26bf941a8ea79747d35d8f9210ca0"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fd2a5403a75b54661182b75ec6132437a181209b901446ee5724b589af8edef1"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d7e91b4379f7a76b31c2dda84ab9e20c6220488e50f7822e59dac36b0cd92b1"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e9adb3f22d4c416e7cd79b01375b17159d6990003633ff1d8377e21b7f1b21"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93139acd8109edcdeffd85e3af8ae7d88b258b3a1e13a038f542b79b6d255c54"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:92a23b0431941a33242b1f0ce6c88a952e09feeea9af4e8be48236a68ffe2205"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cbe68deb8580462ca0d9eb56a81912f59eb4542e1ef8f987405e35a0179f4ea2"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:522ff4ac3aaf839242c6f4e5b406634bfea002469656ae8358644fc6c4856a3b"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:84efb46e8d881bb06b35d1d541aa87f574b58e87f781cbba8d200daa835b42e1"}, - {file = "Pillow-10.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:898f1d306298ff40dc1b9ca24824f0488f6f039bc0e25cfb549d3195ffa17088"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:bcf1207e2f2385a576832af02702de104be71301c2696d0012b1b93fe34aaa5b"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d6c9049c6274c1bb565021367431ad04481ebb54872edecfcd6088d27edd6ed"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28444cb6ad49726127d6b340217f0627abc8732f1194fd5352dec5e6a0105635"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de596695a75496deb3b499c8c4f8e60376e0516e1a774e7bc046f0f48cd620ad"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:2872f2d7846cf39b3dbff64bc1104cc48c76145854256451d33c5faa55c04d1a"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4ce90f8a24e1c15465048959f1e94309dfef93af272633e8f37361b824532e91"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ee7810cf7c83fa227ba9125de6084e5e8b08c59038a7b2c9045ef4dde61663b4"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1be1c872b9b5fcc229adeadbeb51422a9633abd847c0ff87dc4ef9bb184ae08"}, - {file = "Pillow-10.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:98533fd7fa764e5f85eebe56c8e4094db912ccbe6fbf3a58778d543cadd0db08"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:764d2c0daf9c4d40ad12fbc0abd5da3af7f8aa11daf87e4fa1b834000f4b6b0a"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fcb59711009b0168d6ee0bd8fb5eb259c4ab1717b2f538bbf36bacf207ef7a68"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:697a06bdcedd473b35e50a7e7506b1d8ceb832dc238a336bd6f4f5aa91a4b500"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f665d1e6474af9f9da5e86c2a3a2d2d6204e04d5af9c06b9d42afa6ebde3f21"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:2fa6dd2661838c66f1a5473f3b49ab610c98a128fc08afbe81b91a1f0bf8c51d"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:3a04359f308ebee571a3127fdb1bd01f88ba6f6fb6d087f8dd2e0d9bff43f2a7"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:723bd25051454cea9990203405fa6b74e043ea76d4968166dfd2569b0210886a"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:71671503e3015da1b50bd18951e2f9daf5b6ffe36d16f1eb2c45711a301521a7"}, - {file = "Pillow-10.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:44e7e4587392953e5e251190a964675f61e4dae88d1e6edbe9f36d6243547ff3"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:3855447d98cced8670aaa63683808df905e956f00348732448b5a6df67ee5849"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed2d9c0704f2dc4fa980b99d565c0c9a543fe5101c25b3d60488b8ba80f0cce1"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5bb289bb835f9fe1a1e9300d011eef4d69661bb9b34d5e196e5e82c4cb09b37"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0d3e54ab1df9df51b914b2233cf779a5a10dfd1ce339d0421748232cea9876"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:2cc6b86ece42a11f16f55fe8903595eff2b25e0358dec635d0a701ac9586588f"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ca26ba5767888c84bf5a0c1a32f069e8204ce8c21d00a49c90dabeba00ce0145"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f0b4b06da13275bc02adfeb82643c4a6385bd08d26f03068c2796f60d125f6f2"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bc2e3069569ea9dbe88d6b8ea38f439a6aad8f6e7a6283a38edf61ddefb3a9bf"}, - {file = "Pillow-10.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8b451d6ead6e3500b6ce5c7916a43d8d8d25ad74b9102a629baccc0808c54971"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:32bec7423cdf25c9038fef614a853c9d25c07590e1a870ed471f47fb80b244db"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cf63d2c6928b51d35dfdbda6f2c1fddbe51a6bc4a9d4ee6ea0e11670dd981e"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f6d3d4c905e26354e8f9d82548475c46d8e0889538cb0657aa9c6f0872a37aa4"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:847e8d1017c741c735d3cd1883fa7b03ded4f825a6e5fcb9378fd813edee995f"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7f771e7219ff04b79e231d099c0a28ed83aa82af91fd5fa9fdb28f5b8d5addaf"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459307cacdd4138edee3875bbe22a2492519e060660eaf378ba3b405d1c66317"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b059ac2c4c7a97daafa7dc850b43b2d3667def858a4f112d1aa082e5c3d6cf7d"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6caf3cd38449ec3cd8a68b375e0c6fe4b6fd04edb6c9766b55ef84a6e8ddf2d"}, - {file = "Pillow-10.0.1.tar.gz", hash = "sha256:d72967b06be9300fed5cfbc8b5bafceec48bf7cdc7dab66b1d2549035287191d"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] [[package]] name = "pinecone-client" @@ -2920,13 +3082,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.10.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2935,13 +3097,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -2950,13 +3112,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "portalocker" -version = "2.7.0" +version = "2.8.2" description = "Wraps the portalocker recipe for easy usage" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "portalocker-2.7.0-py2.py3-none-any.whl", hash = "sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983"}, - {file = "portalocker-2.7.0.tar.gz", hash = "sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51"}, + {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, + {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, ] [package.dependencies] @@ -2965,17 +3127,17 @@ pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} [package.extras] docs = ["sphinx (>=1.7.1)"] redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] [[package]] name = "posthog" -version = "3.0.2" +version = "3.1.0" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.0.2-py2.py3-none-any.whl", hash = "sha256:a8c0af6f2401fbe50f90e68c4143d0824b54e872de036b1c2f23b5abb39d88ce"}, - {file = "posthog-3.0.2.tar.gz", hash = "sha256:701fba6e446a4de687c6e861b587e7b7741955ad624bf34fe013c06a0fec6fb3"}, + {file = "posthog-3.1.0-py2.py3-none-any.whl", hash = "sha256:acd033530bdfc275dce5587f205f62378991ecb9b7cd5479e79c7f4ac575d319"}, + {file = "posthog-3.1.0.tar.gz", hash = "sha256:db17a2c511e18757aec12b6632ddcc1fa318743dad88a4666010467a3d9468da"}, ] [package.dependencies] @@ -2988,7 +3150,7 @@ six = ">=1.5" [package.extras] dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] sentry = ["django", "sentry-sdk"] -test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest"] +test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"] [[package]] name = "prance" @@ -3018,13 +3180,13 @@ ssv = ["swagger-spec-validator (>=2.4,<3.0)"] [[package]] name = "pre-commit" -version = "3.3.3" +version = "3.5.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, + {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, + {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, ] [package.dependencies] @@ -3036,13 +3198,13 @@ virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" -version = "3.0.39" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, - {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -3050,13 +3212,13 @@ wcwidth = "*" [[package]] name = "proto-plus" -version = "1.22.3" +version = "1.23.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.6" files = [ - {file = "proto-plus-1.22.3.tar.gz", hash = "sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b"}, - {file = "proto_plus-1.22.3-py3-none-any.whl", hash = "sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df"}, + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, ] [package.dependencies] @@ -3067,47 +3229,47 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.24.1" +version = "4.25.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-4.24.1-cp310-abi3-win32.whl", hash = "sha256:d414199ca605eeb498adc4d2ba82aedc0379dca4a7c364ff9bc9a179aa28e71b"}, - {file = "protobuf-4.24.1-cp310-abi3-win_amd64.whl", hash = "sha256:5906c5e79ff50fe38b2d49d37db5874e3c8010826f2362f79996d83128a8ed9b"}, - {file = "protobuf-4.24.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:970c701ee16788d74f3de20938520d7a0aebc7e4fff37096a48804c80d2908cf"}, - {file = "protobuf-4.24.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fc361148e902949dcb953bbcb148c99fe8f8854291ad01107e4120361849fd0e"}, - {file = "protobuf-4.24.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5d32363d14aca6e5c9e9d5918ad8fb65b091b6df66740ae9de50ac3916055e43"}, - {file = "protobuf-4.24.1-cp37-cp37m-win32.whl", hash = "sha256:df015c47d6855b8efa0b9be706c70bf7f050a4d5ac6d37fb043fbd95157a0e25"}, - {file = "protobuf-4.24.1-cp37-cp37m-win_amd64.whl", hash = "sha256:d4af4fd9e9418e819be30f8df2a16e72fbad546a7576ac7f3653be92a6966d30"}, - {file = "protobuf-4.24.1-cp38-cp38-win32.whl", hash = "sha256:302e8752c760549ed4c7a508abc86b25d46553c81989343782809e1a062a2ef9"}, - {file = "protobuf-4.24.1-cp38-cp38-win_amd64.whl", hash = "sha256:06437f0d4bb0d5f29e3d392aba69600188d4be5ad1e0a3370e581a9bf75a3081"}, - {file = "protobuf-4.24.1-cp39-cp39-win32.whl", hash = "sha256:0b2b224e9541fe9f046dd7317d05f08769c332b7e4c54d93c7f0f372dedb0b1a"}, - {file = "protobuf-4.24.1-cp39-cp39-win_amd64.whl", hash = "sha256:bd39b9094a4cc003a1f911b847ab379f89059f478c0b611ba1215053e295132e"}, - {file = "protobuf-4.24.1-py3-none-any.whl", hash = "sha256:55dd644adc27d2a624339332755fe077c7f26971045b469ebb9732a69ce1f2ca"}, - {file = "protobuf-4.24.1.tar.gz", hash = "sha256:44837a5ed9c9418ad5d502f89f28ba102e9cd172b6668bc813f21716f9273348"}, + {file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"}, + {file = "protobuf-4.25.1-cp310-abi3-win_amd64.whl", hash = "sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b"}, + {file = "protobuf-4.25.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd"}, + {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb"}, + {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7"}, + {file = "protobuf-4.25.1-cp38-cp38-win32.whl", hash = "sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd"}, + {file = "protobuf-4.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0"}, + {file = "protobuf-4.25.1-cp39-cp39-win32.whl", hash = "sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510"}, + {file = "protobuf-4.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10"}, + {file = "protobuf-4.25.1-py3-none-any.whl", hash = "sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6"}, + {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, ] [[package]] name = "psutil" -version = "5.9.5" +version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, ] [package.extras] @@ -3115,13 +3277,13 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg" -version = "3.1.12" +version = "3.1.15" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg-3.1.12-py3-none-any.whl", hash = "sha256:8ec5230d6a7eb654b4fb3cf2d3eda8871d68f24807b934790504467f1deee9f8"}, - {file = "psycopg-3.1.12.tar.gz", hash = "sha256:cec7ad2bc6a8510e56c45746c631cf9394148bdc8a9a11fd8cf8554ce129ae78"}, + {file = "psycopg-3.1.15-py3-none-any.whl", hash = "sha256:a6c03e508be0e42facb1e8581156fdc2904322fe8077ba4f298f5f0a947cb8e0"}, + {file = "psycopg-3.1.15.tar.gz", hash = "sha256:1b8e3e8d1612ea289a2684a5bf0c1f9a209549b222b6958377ce970a6e10b80c"}, ] [package.dependencies] @@ -3130,8 +3292,8 @@ typing-extensions = ">=4.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.1.12)"] -c = ["psycopg-c (==3.1.12)"] +binary = ["psycopg-binary (==3.1.15)"] +c = ["psycopg-c (==3.1.15)"] dev = ["black (>=23.1.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] @@ -3139,76 +3301,87 @@ test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6 [[package]] name = "psycopg-binary" -version = "3.1.10" +version = "3.1.15" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a529c203f6e0f4c67ba27cf8f9739eb3bc880ad70d6ad6c0e56c2230a66b5a09"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bd6e14d1aeb12754a43446c77a5ce819b68875cc25ae6538089ef90d7f6dd6f7"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1583ced5948cf88124212c4503dfe5b01ac3e2dd1a2833c083917f4c4aabe8b4"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2098721c486478987be700723b28ec7a48f134eba339de36af0e745f37dfe461"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e61f7b412fca7b15dd043a0b22fd528d2ed8276e76b3764c3889e29fa65082b"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0f33e33a072e3d5af51ee4d4a439e10dbe623fe87ef295d5d688180d529f13f"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f6f7738c59262d8d19154164d99c881ed58ed377fb6f1d685eb0dc43bbcd8022"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:511d38b1e1961d179d47d5103ba9634ecfc7ead431d19a9337ef82f3a2bca807"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:666e7acf2ffdb5e8a58e8b0c1759facdb9688c7e90ee8ca7aed675803b57404d"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:57b93c756fee5f7c7bd580c34cd5d244f7d5638f8b2cf25333f97b9b8b2ebfd1"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-win_amd64.whl", hash = "sha256:a1d61b7724c7215a8ea4495a5c6b704656f4b7bb6165f4cb9989b685886ebc48"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36fff836a7823c9d71fa7faa333c74b2b081af216cebdbb0f481dce55ee2d974"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:32caf98cb00881bfcbbbae39a15f2a4e08b79ff983f1c0f13b60a888ef6e8431"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5565a6a86fee8d74f30de89e07f399567cdf59367aeb09624eb690d524339076"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fb0d64520b29bd80a6731476ad8e1c20348dfdee00ab098899d23247b641675"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfc05ed4e74fa8615d7cc2bd57f00f97662f4e865a731dbd43da9a527e289c8c"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b59c8cff887757ddf438ff9489d79c5e6b717112c96f5c68e16f367ff8724e"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbaf12361136afefc5faab21a174a437e71c803b083f410e5140c7605bc66b"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ff72576061c774bcce5f5440b93e63d4c430032dd056d30f6cb1988e549dd92c"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a4e91e1a8d61c60f592a1dfcebdf55e52a29fe4fdb650c5bd5414c848e77d029"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f7187269d825e84c945be7d93dd5088a4e0b6481a4bdaba3bf7069d4ac13703d"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-win_amd64.whl", hash = "sha256:ba7812a593c16d9d661844dc8dd4d81548fd1c2a0ee676f3e3d8638369f4c5e4"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88caa5859740507b3596c6c2e00ceaccee2c6ab5317bc535887801ad3cc7f3e1"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a3a7e99ba10c2e83a48d79431560e0d5ca7865f68f2bac3a462dc2b151e9926"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:848f4f4707dc73f4b4e844c92f3de795b2ddb728f75132602bda5e6ba55084fc"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:415961e839bb49cfd75cd961503fb8846c0768f247db1fa7171c1ac61d38711b"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0471869e658d0c6b8c3ed53153794739c18d7dad2dd5b8e6ff023a364c20f7df"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4290060ee0d856caa979ecf675c0e6959325f508272ccf27f64c3801c7bcbde7"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:abf04bc06c8f6a1ac3dc2106d3b79c8661352e9d8a57ca2934ffa6aae8fe600a"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:51fe70708243b83bf16710d8c11b61bd46562e6a24a6300d5434380b35911059"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b658f7f8b49fb60a1c52e3f6692f690a85bdf1ad30aafe0f3f1fd74f6958cf8"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-win_amd64.whl", hash = "sha256:ffc8c796194f23b9b07f6d25f927ec4df84a194bbc7a1f9e73316734eef512f9"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74ce92122be34cf0e5f06d79869e1001c8421a68fa7ddf6fe38a717155cf3a64"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:75608a900984061c8898be68fbddc6f3da5eefdffce6e0624f5371645740d172"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6670d160d054466e8fdedfbc749ef8bf7dfdf69296048954d24645dd4d3d3c01"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d32026cfab7ba7ac687a42c33345026a2fb6fc5608a6144077f767af4386be0b"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:908fa388a5b75dfd17a937acb24708bd272e21edefca9a495004c6f70ec2636a"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e46b97073bd4de114f475249d681eaf054e950699c5d7af554d3684db39b82d"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9cf56bb4b115def3a18157f3b3b7d8322ee94a8dea30028db602c8f9ae34ad1e"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3b6c6f90241c4c5a6ca3f0d8827e37ef90fdc4deb9d8cfa5678baa0ea374b391"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:747176a6aeb058079f56c5397bd90339581ab7b3cc0d62e7445654e6a484c7e1"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41a415e78c457b06497fa0084e4ea7245ca1a377b55756dd757034210b64da7e"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-win_amd64.whl", hash = "sha256:a7bbe9017edd898d7b3a8747700ed045dda96a907dff87f45e642e28d8584481"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f062f20256708929a58c41d44f350efced4c00a603323d1413f6dc0b84d95a5"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dea30f2704337ca2d0322fccfe1fa30f61ce9185de3937eb986321063114a51f"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9d88ac72531034ebf7ec09114e732b066a9078f4ce213cf65cc5e42eb538d30"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2bea0940d69c3e24a72530730952687912893b34c53aa39e79045e7b446174d"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a691dc8e2436d9c1e5cf93902d63e9501688fccc957eb22f952d37886257470"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa92661f99351765673835a4d936d79bd24dfbb358b29b084d83be38229a90e4"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:30eb731ed5525d8df892db6532cc8ffd8a163b73bc355127dee9c49334e16eee"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50bf7a59d3a85a82d466fed341d352b44d09d6adc18656101d163a7cfc6509a0"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f48665947c55f8d6eb3f0be98de80411508e1ec329f354685329b57fced82c7f"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:caa771569da01fc0389ca34920c331a284425a68f92d1ba0a80cc08935f8356e"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-win_amd64.whl", hash = "sha256:b30887e631fd67affaed98f6cd2135b44f2d1a6d9bca353a69c3889c78bd7aa8"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12417e4aade7549c25d34d3f256c006713db81877b44934d50afa81e11f2fce5"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7e891f8d5e935c805ccf9acb5a012a3fd7032724d35022e4eba72babbd24003b"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:399a4a857a4aec8548fa8763d28b89c738408d0a66638019a74d92c19029421e"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0906a9297e057635d14687aa6cd8f783776918f1549d04d1c9bc84c0ad984d77"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90d8152933b84f43d48ab13c9fb6a7f59194f4879f2a0236824778165e14b97b"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d9ecf64337b6c5ba7e81ed1b46f05149d37a653e2dc9138ccd065db26252840"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dda4250b3aad981a37a504e555b784663d0b2dda2a385631662157ce808129f1"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1da944ad974c90c741e1f0139d71bab62e9603c32f4f3b3edcd766db5df88da"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:4a8c1c0d5ff08e8b089dbd4397b9b6cf9eec676685a53d6331f45fd6112bb138"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:18f5666d6f4064e454f279fbf59553f1665b81b4ababb384132682e244d85da7"}, + {file = "psycopg_binary-3.1.15-cp310-cp310-win_amd64.whl", hash = "sha256:cf231c921dc0dfb71cb2dab0647f9a620729276b19b724b50703663ab0ecc9a4"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1126bdfff795db17f09aa20e2ff3efeeced82b43ef0628ac92f5c1d9e4fa2482"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75b49634b0f4f63f6bfb62006f3b0736ef636a2d19475dcba1b3728d8d172721"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b265ce896527a752eee111ba780eaed6ed8ad6c2c50be45ad98099c8f1c34865"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7836017a850c3e48ed09052c0e1348547656815dc653218645d74e5d6da0357b"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02de4d5240fe40c0c000b1fc072f403f2f88ff963e0fe09b4bda6caf3bdb2d32"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be50b8cee9d0910ee9c98127e70216ac2865e34715e57a5490583af90734259d"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:773ae209449352788432f916f6e518e28c23a139a29d352810c4b21382d0f61d"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:288214e81966872adbe46057a66eb76e9250f628aff2cc9e46a5fcf1da24123b"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4781bda7d34fe12c10f128255abfc1ead12f58a3a0aaa2bd91b4055548be519b"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:076a50bc71b3f597a6fd1ec7e662b6948cd532486d4be5d107ff74dc9647be1e"}, + {file = "psycopg_binary-3.1.15-cp311-cp311-win_amd64.whl", hash = "sha256:c75e12eeb7a48e19eb4599524e24d883150ce3ef2c6967f7aff2f8f3c73ddb7a"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:666722d41389de0ab6cec5aa780548e2c60f36bda74da929f5ede6ca932dc34c"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e72129c3dc41ad4aaecb49ec54ed5f9c2311c53fb9a8e3c0fc63ad0f1699295e"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2f7e0fbd66e69a1c5f164c5c6c8b0b98f6e851a41ffd23ef44a0dd9ef3a175e"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07c11f76a258060e047db460ac05f76ae5e09d94c10ea9f81f3f0f28b401ac0f"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fd9a19784237845bfdd93c25d59c475e1ce069717470b7d6a7d928113a3743d"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97ab8928fa7403a17b6df76bc3337527c454a9653bd966b1eccfd3176336f909"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:007ec68533f530b8fdaf77cb5c7961812772f31ecc90cc9c1000f3e321621e66"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d8b0e31a5243f648d17d78f2405ceb08c7819a4e97bbff778ffd10a7bf1a08a1"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fa6cedc562e26b55de17b7897e8bf21e01d7aea271493b8d6ef2c31f63ad7c55"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b02eb2bc11a41c67825d14b12e0d8f65bdaa0a9d1c792f22f6b9c97d0123a2e"}, + {file = "psycopg_binary-3.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:8220860bb7553b37d3e50f877da7a96e487ad02e9cb996407db16b4e9b94c853"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21f54d3778d4fd50ce7501b55a6f84ce74eb7e0bbe2725dca049a0b478e9a431"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb9cbcc354b019ab59d92c739b55ecd654938b76b0e4174878bdaf9689060ed"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05407acf763766507e2d66473de904fc176ed3674bd0340246a80e4247ded39b"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:716bf9cd2b4e7ac09534b875a14ea9614776c8d9036e9c56d64c05b76e0aa6b3"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:410b4e97f617f9af58b0d41c5118d71276817ef2046d5f55c289a0d9d5696dc1"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c1148adc146c10ebd7432cf32324c1c977ebb4ab1a84c912dffec9b36523ab49"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ab7d92e7f1da609a145313ccb227f5f3d687d9aeaff4a46b5ada0395f270c09d"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2336d69180ce3d55f58fd899a47d334083e9c808e033bfe5ff43943064566e1d"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2be2a61a6cb7bccba7785dfb3268381d34b4a3868437ecf41aed0751b38255d9"}, + {file = "psycopg_binary-3.1.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d3a49a4ffa8cb7bf27b5e951ea856273cc2bbd60de78707e2701deef59ff3792"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e0e5d48c6541a186771f1c728a8b60e1494878def584f59c59a8a29a913776c"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bf1399084583875548e4df301e92bab00ce0ce03a2a72197c1b4f14f48d5135"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bc1a830912d43f1904e6de51f1bf3495b438158ac77e4c6446b60139d8e6d0d"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:456707bd6a67bc2fe466c9f0b0ed7e1e4179d98c965e4065209a83fbabc71d38"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:110d3235b7147526d1d1e77ecc81c9078cc99271011078744da9184104573575"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb368f0c40fc2e0c667197cad3529bf0bc8a20c1536a177d18890e0e7a1946b9"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca0d47cd667b076f0cf7f6621100378bf8ec6717f669eb9232649dc3ce4bd6df"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c4a32dace67083d62adf58cb7214f0741a3bf8346e519340538035992dfd642e"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3b404b6e8b789039b627b3ed6a14989c70acfa044240bf94974fd3b3f9ce96c4"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:83aaaa7dd3df8cab472fdf8ffa616b2bf059ba146d15a1301ca988a29330ace2"}, + {file = "psycopg_binary-3.1.15-cp38-cp38-win_amd64.whl", hash = "sha256:cb9732b7b1bbd9f8f464f2478e184ccc34efca5126a2386c4e6fd752f56f1ac7"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d099a62090074f1df0aeed670a7b342ab1352b8fce21bbc5252e4e393fe840a2"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:624921c699b278a2ec64ccb4164be491fdf178bd50c898fef18c8c6fd8989d3e"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:522a62f38139c6fd4122648b4eb91636a9cd888567a7712318097150f52771a1"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe9fb4100cf8827e52d53471a8aadba284b5863842fcf7b3ae5394ab01ccb196"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23c406ad98a816a75ee29673b518ec6288de5784bf9768e74f5e8a3e8b20c33b"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba5a495696b64eb9a9c68ffd10c33816cf51d69410e1d91f999eb93e41dc371c"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9af56d13dc6071dd627d342a3fe7302b8a290056e66bc2c1bf9e4c5e38150d78"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad609cef3bbd501a369a5ba7c72bd34e30972417f7601fd4684ee5f8b0f5cdba"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:699a14733709f0c08b7d4fe32abd1d0fbb67ae58675ec7d0512048bd6eadeab4"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b51b3d8b955585c6fc3e3d90b09f4e481e47e28a6486c2fbad7866ddb6a5868"}, + {file = "psycopg_binary-3.1.15-cp39-cp39-win_amd64.whl", hash = "sha256:361b0a0697b582ff019a15590063c1065f109ae207c0374f297926d5b359012b"}, ] [[package]] name = "psycopg-pool" -version = "3.1.7" +version = "3.2.0" description = "Connection Pool for Psycopg" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg-pool-3.1.7.tar.gz", hash = "sha256:d02741dc48303495f4021900630442af87d6b1c3bfd1a3ece54cc11aa43d7dde"}, - {file = "psycopg_pool-3.1.7-py3-none-any.whl", hash = "sha256:ca1f2c366b5910acd400e16e812912827c57836af638c1717ba495111d22073b"}, + {file = "psycopg-pool-3.2.0.tar.gz", hash = "sha256:2e857bb6c120d012dba240e30e5dff839d2d69daf3e962127ce6b8e40594170e"}, + {file = "psycopg_pool-3.2.0-py3-none-any.whl", hash = "sha256:73371d4e795d9363c7b496cbb2dfce94ee8fbf2dcdc384d0a937d1d9d8bdd08d"}, ] [package.dependencies] @@ -3227,41 +3400,41 @@ files = [ [[package]] name = "pulsar-client" -version = "3.2.0" +version = "3.3.0" description = "Apache Pulsar Python client library" optional = false python-versions = "*" files = [ - {file = "pulsar_client-3.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:da53bbe1903026ca1253d36a67bde0ae88513497091658aee8c5514c3e567483"}, - {file = "pulsar_client-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec595a71b7a25f1a72a1350efd6680a511b53253c3cac1911ba3d6c4d71fa64c"}, - {file = "pulsar_client-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3557c65463d74ec8d2864752389beb06761ab591dd134a164e0b1303c66719b"}, - {file = "pulsar_client-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d51dc76fec48217489bde95754ad58288c9389361de42f5a27d64e19840d27fb"}, - {file = "pulsar_client-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ef2baf85311e0fe1b98342fdafbb93a1818a08ef999eaa524234fedf6f3b941"}, - {file = "pulsar_client-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:0928b02beda0c98e77178f4e30e962ddb8ee8c3320e4c7304a78b0796e976523"}, - {file = "pulsar_client-3.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:584f44b03474a69906be711a597a4d516263a55be31e49fc07be503dc8406821"}, - {file = "pulsar_client-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a637b9a3b30860c61e68a7b8ea650e0987d89e82f73b6a3df1ab662a6438fdda"}, - {file = "pulsar_client-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4a187fdc5febcf16f725179dcf2c476f31eeebd8353794d91754a3202dd5072"}, - {file = "pulsar_client-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5ff879f868cf1fd29db99f39fdb22b3ec3e749c648aca28526689756d922d1c5"}, - {file = "pulsar_client-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a5f85d0cc414f739a5b51d843f213b54b2cd768c3a34f7c27cca410712b1f81"}, - {file = "pulsar_client-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:4fe748283848d829a80c0323558faeebea4c240d69fa58314ac90344f6999d17"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-macosx_10_15_universal2.whl", hash = "sha256:06b91c26def86dbbc35be15257999fd8a2afbadf32983916ea3eef44f4d4cab4"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ec897bc8d232e6b118793378fc662a844334b829a28a1b4ad1c5fe8d019135"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa37c96c25c1b5aff3bad0fd0194b385ec190b2c67a2f439ac91577f81ae18d3"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d49cdd4d1b7fc2e80d100acf14e6fd3898f6e099e403fc56ed22a690245b2fec"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0058ca3191fd24528ccf94dba6f12e4093831454a2597166f96900d0717271bf"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cb69b0411008e0b56df51de0aab20aa1c1a12aef3019b9ceba89afbae1f07fe2"}, - {file = "pulsar_client-3.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:f7d33e99602352df7a30707eab4e5781654602212fb618928bffb5523f2bcf35"}, - {file = "pulsar_client-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad1ac15a175ca90555c681a4d0134568771c6346b97a172f3ef14006556a50ae"}, - {file = "pulsar_client-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369e08ef1d5cb196dd9271039928800f90b4701a9c9df90bc068b44260d2fb11"}, - {file = "pulsar_client-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a52ba2b6736a2ebeed31b590e75d417dda149e333461655860efa84d898a3eb4"}, - {file = "pulsar_client-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c801334b3b569b23976481a2922bcea0c6dd990fc26544658dd9e9c8f78ca36"}, - {file = "pulsar_client-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:cd01fd419280e9013d1655bc53662248be2656b623b1506480e1a985aa7dadd2"}, - {file = "pulsar_client-3.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:0abe54d84db76435a6cd88ce27610352cabc7efae9fa3e7f874e032ec2ca0b3f"}, - {file = "pulsar_client-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9a1b6a806eb4819d8cbab1c4ae44ebf2110a94204a46c365f5757e1455252f2"}, - {file = "pulsar_client-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34ea2a6b75ae0e303d522e5b57c75a4ff03dc18b9bfc14151fb14dfaf5866f17"}, - {file = "pulsar_client-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:be6d3a9b2e1db3b6d1a7db5e13f7b4ed420674cf072cdb520fb004c4cd54c0af"}, - {file = "pulsar_client-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6b733e6239ffb505f7084df0175baf9d0215f14d0a02e9bbd1fdf71a2d6ea17"}, - {file = "pulsar_client-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:edc2135d02b4793efb086edca0ffaa6e8ac9133961c2cdc17ae487e0a53da481"}, + {file = "pulsar_client-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c31afd3e67a044ff93177df89e08febf214cc965e95ede097d9fe8755af00e01"}, + {file = "pulsar_client-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f66982284571674b215324cc26b5c2f7c56c7043113c47a7084cb70d67a8afb"}, + {file = "pulsar_client-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fe50a06f81c48a75a9b95c27a6446260039adca71d9face273740de96b2efca"}, + {file = "pulsar_client-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d4c46a4b96a6e9919cfe220156d69a2ede8053d9ea1add4ada108abcf2ba9775"}, + {file = "pulsar_client-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1e4b5d44b992c9b036286b483f3588c10b89c6047fb59d80c7474445997f4e10"}, + {file = "pulsar_client-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:497a59ac6b650835a3b2c502f53477e5c98e5226998ca3f17c0b0a3eb4d67d08"}, + {file = "pulsar_client-3.3.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:386e78ff52058d881780bae1f6e84ac9434ae0b01a8581755ca8cc0dc844a332"}, + {file = "pulsar_client-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e4ecb780df58bcfd3918590bd3ff31ed79bccfbef3a1a60370642eb1e14a9d2"}, + {file = "pulsar_client-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ce1e215c252f22a6f26ca5e9076826041a04d88dc213b92c86b524be2774a64"}, + {file = "pulsar_client-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b0fd5be73a4103986b9dbe3a66468cf8829371e34af87ff8f216e3980f4cbe"}, + {file = "pulsar_client-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33656450536d83eed1563ff09692c2c415fb199d88e9ed97d701ca446a119e1b"}, + {file = "pulsar_client-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:ce33de700b06583df8777e139d68cb4b4b3d0a2eac168d74278d8935f357fb10"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-macosx_10_15_universal2.whl", hash = "sha256:7b5dd25cf778d6c980d36c53081e843ea272afe7af4f0ad6394ae9513f94641b"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c4e6865fda62a2e460f823dce4d49ac2973a4459b8ff99eda5fdd6aaaebf46"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1810ddc623c8de2675d17405ce47057a9a2b92298e708ce4d9564847f5ad904"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8259c3b856eb6deaa1f93dce893ab18d99d36d102da5612c8e97a4fb41b70ab1"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e7a48b2e505cde758fd51a601b5da0671fa98c9baee38362aaaa3ab2b930c28"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ede264385d47257b2f2b08ecde9181ec5338bea5639cc543d1856f01736778d2"}, + {file = "pulsar_client-3.3.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:0f64c62746ccd5b65a0c505f5f40b9af1f147eb1fa2d8f9c90cd5c8b92dd8597"}, + {file = "pulsar_client-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b84a20c9012e3c4ef1b7085acd7467197118c090b378dec27d773fb79d91556"}, + {file = "pulsar_client-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4e15fa696e275ccb66d0791fdc19c4dea0420d81349c8055e485b134125e14f"}, + {file = "pulsar_client-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:72cbb1bdcba2dd1265296b5ba65331622ee89c16db75edaad46dd7b90c6dd447"}, + {file = "pulsar_client-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d54dd12955bf587dd46d9184444af5e853d9da2a14bbfb739ed2c7c3b78ce280"}, + {file = "pulsar_client-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:43f98afdf0334b2b957a4d96f97a1fe8a7f7fd1e2631d40c3f00b4162f396485"}, + {file = "pulsar_client-3.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:efe7c1e6a96daccc522c3567b6847ffa54c13e0f510d9a427b4aeff9fbebe54b"}, + {file = "pulsar_client-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f28e94420090fceeb38e23fc744f3edf8710e48314ef5927d2b674a1d1e43ee0"}, + {file = "pulsar_client-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c8f3eaa98e2351805ecb6efb6d5fedf47a314a3ce6af0e05ea1449ea7244ed"}, + {file = "pulsar_client-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e69750f8ae57e55fddf97b459ce0d8b38b2bb85f464a71e871ee6a86d893be7"}, + {file = "pulsar_client-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e147e5ba460c1818bc05254279a885b4e552bcafb8961d40e31f98d5ff46628"}, + {file = "pulsar_client-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:694530af1d6c75fb81456fb509778c1868adee31e997ddece6e21678200182ea"}, ] [package.dependencies] @@ -3288,40 +3461,47 @@ tests = ["pytest"] [[package]] name = "pyarrow" -version = "13.0.0" +version = "14.0.1" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-13.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:1afcc2c33f31f6fb25c92d50a86b7a9f076d38acbcb6f9e74349636109550148"}, - {file = "pyarrow-13.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:70fa38cdc66b2fc1349a082987f2b499d51d072faaa6b600f71931150de2e0e3"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd57b13a6466822498238877892a9b287b0a58c2e81e4bdb0b596dbb151cbb73"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ce69f7bf01de2e2764e14df45b8404fc6f1a5ed9871e8e08a12169f87b7a26"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:588f0d2da6cf1b1680974d63be09a6530fd1bd825dc87f76e162404779a157dc"}, - {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6241afd72b628787b4abea39e238e3ff9f34165273fad306c7acf780dd850956"}, - {file = "pyarrow-13.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:fda7857e35993673fcda603c07d43889fca60a5b254052a462653f8656c64f44"}, - {file = "pyarrow-13.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:aac0ae0146a9bfa5e12d87dda89d9ef7c57a96210b899459fc2f785303dcbb67"}, - {file = "pyarrow-13.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7759994217c86c161c6a8060509cfdf782b952163569606bb373828afdd82e8"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868a073fd0ff6468ae7d869b5fc1f54de5c4255b37f44fb890385eb68b68f95d"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51be67e29f3cfcde263a113c28e96aa04362ed8229cb7c6e5f5c719003659d33"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d1b4e7176443d12610874bb84d0060bf080f000ea9ed7c84b2801df851320295"}, - {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:69b6f9a089d116a82c3ed819eea8fe67dae6105f0d81eaf0fdd5e60d0c6e0944"}, - {file = "pyarrow-13.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ab1268db81aeb241200e321e220e7cd769762f386f92f61b898352dd27e402ce"}, - {file = "pyarrow-13.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ee7490f0f3f16a6c38f8c680949551053c8194e68de5046e6c288e396dccee80"}, - {file = "pyarrow-13.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3ad79455c197a36eefbd90ad4aa832bece7f830a64396c15c61a0985e337287"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68fcd2dc1b7d9310b29a15949cdd0cb9bc34b6de767aff979ebf546020bf0ba0"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc6fd330fd574c51d10638e63c0d00ab456498fc804c9d01f2a61b9264f2c5b2"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e66442e084979a97bb66939e18f7b8709e4ac5f887e636aba29486ffbf373763"}, - {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:0f6eff839a9e40e9c5610d3ff8c5bdd2f10303408312caf4c8003285d0b49565"}, - {file = "pyarrow-13.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b30a27f1cddf5c6efcb67e598d7823a1e253d743d92ac32ec1eb4b6a1417867"}, - {file = "pyarrow-13.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:09552dad5cf3de2dc0aba1c7c4b470754c69bd821f5faafc3d774bedc3b04bb7"}, - {file = "pyarrow-13.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3896ae6c205d73ad192d2fc1489cd0edfab9f12867c85b4c277af4d37383c18c"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6647444b21cb5e68b593b970b2a9a07748dd74ea457c7dadaa15fd469c48ada1"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47663efc9c395e31d09c6aacfa860f4473815ad6804311c5433f7085415d62a7"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b9ba6b6d34bd2563345488cf444510588ea42ad5613df3b3509f48eb80250afd"}, - {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d00d374a5625beeb448a7fa23060df79adb596074beb3ddc1838adb647b6ef09"}, - {file = "pyarrow-13.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c51afd87c35c8331b56f796eff954b9c7f8d4b7fef5903daf4e05fcf017d23a8"}, - {file = "pyarrow-13.0.0.tar.gz", hash = "sha256:83333726e83ed44b0ac94d8d7a21bbdee4a05029c3b1e8db58a863eec8fd8a33"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, + {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, + {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, + {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, + {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, + {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, + {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, ] [package.dependencies] @@ -3329,13 +3509,13 @@ numpy = ">=1.16.6" [[package]] name = "pyasn1" -version = "0.5.0" +version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, ] [[package]] @@ -3363,71 +3543,197 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pycryptodome" +version = "3.19.1" +description = "Cryptographic library for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodome-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:694020d2ff985cd714381b9da949a21028c24b86f562526186f6af7c7547e986"}, + {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4464b0e8fd5508bff9baf18e6fd4c6548b1ac2ce9862d6965ff6a84ec9cb302a"}, + {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:420972f9c62978e852c74055d81c354079ce3c3a2213a92c9d7e37bbc63a26e2"}, + {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1bc0c49d986a1491d66d2a56570f12e960b12508b7e71f2423f532e28857f36"}, + {file = "pycryptodome-3.19.1-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e038ab77fec0956d7aa989a3c647652937fc142ef41c9382c2ebd13c127d5b4a"}, + {file = "pycryptodome-3.19.1-cp27-cp27m-win32.whl", hash = "sha256:a991f8ffe8dfe708f86690948ae46442eebdd0fff07dc1b605987939a34ec979"}, + {file = "pycryptodome-3.19.1-cp27-cp27m-win_amd64.whl", hash = "sha256:2c16426ef49d9cba018be2340ea986837e1dfa25c2ea181787971654dd49aadd"}, + {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6d0d2b97758ebf2f36c39060520447c26455acb3bcff309c28b1c816173a6ff5"}, + {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:b8b80ff92049fd042177282917d994d344365ab7e8ec2bc03e853d93d2401786"}, + {file = "pycryptodome-3.19.1-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd4e7e8bf0fc1ada854688b9b309ee607e2aa85a8b44180f91021a4dd330a928"}, + {file = "pycryptodome-3.19.1-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:8cf5d3d6cf921fa81acd1f632f6cedcc03f5f68fc50c364cd39490ba01d17c49"}, + {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297"}, + {file = "pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180"}, + {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766"}, + {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065"}, + {file = "pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700"}, + {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96"}, + {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17"}, + {file = "pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2"}, + {file = "pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03"}, + {file = "pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7"}, + {file = "pycryptodome-3.19.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:ed932eb6c2b1c4391e166e1a562c9d2f020bfff44a0e1b108f67af38b390ea89"}, + {file = "pycryptodome-3.19.1-pp27-pypy_73-win32.whl", hash = "sha256:81e9d23c0316fc1b45d984a44881b220062336bbdc340aa9218e8d0656587934"}, + {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37e531bf896b70fe302f003d3be5a0a8697737a8d177967da7e23eff60d6483c"}, + {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd4e95b0eb4b28251c825fe7aa941fe077f993e5ca9b855665935b86fbb1cc08"}, + {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22c80246c3c880c6950d2a8addf156cee74ec0dc5757d01e8e7067a3c7da015"}, + {file = "pycryptodome-3.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e70f5c839c7798743a948efa2a65d1fe96bb397fe6d7f2bde93d869fe4f0ad69"}, + {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6c3df3613592ea6afaec900fd7189d23c8c28b75b550254f4bd33fe94acb84b9"}, + {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08b445799d571041765e7d5c9ca09c5d3866c2f22eeb0dd4394a4169285184f4"}, + {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:954d156cd50130afd53f8d77f830fe6d5801bd23e97a69d358fed068f433fbfe"}, + {file = "pycryptodome-3.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b7efd46b0b4ac869046e814d83244aeab14ef787f4850644119b1c8b0ec2d637"}, + {file = "pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776"}, +] + [[package]] name = "pydantic" -version = "1.10.12" -description = "Data validation and settings management using python type hints" +version = "2.5.2" +description = "Data validation using Python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.4.0" +pydantic-core = "2.14.5" +typing-extensions = ">=4.6.1" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.5" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" @@ -3451,13 +3757,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymilvus" -version = "2.3.1" +version = "2.2.16" description = "Python Sdk for Milvus" optional = false python-versions = ">=3.7" files = [ - {file = "pymilvus-2.3.1-py3-none-any.whl", hash = "sha256:ce65e1de8700f33bd9aade20f013291629702e25b05726773208f1f0b22548ff"}, - {file = "pymilvus-2.3.1.tar.gz", hash = "sha256:d460f6204d7deb2cff93716bd65670c1b440694b77701fb0ab0ead791aa582c6"}, + {file = "pymilvus-2.2.16-py3-none-any.whl", hash = "sha256:874c68a361a71badaa880fa32000983d2d716dd8ee5daf2c53e9d928dbe932e3"}, + {file = "pymilvus-2.2.16.tar.gz", hash = "sha256:2921454762c79713e7cc48490c88847734c8d84cf0801dfc57cc224ee84b17ab"}, ] [package.dependencies] @@ -3472,92 +3778,93 @@ ujson = ">=2.0.0" [[package]] name = "pymongo" -version = "4.5.0" +version = "4.6.1" description = "Python driver for MongoDB " optional = false python-versions = ">=3.7" files = [ - {file = "pymongo-4.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d4fa1b01fa7e5b7bb8d312e3542e211b320eb7a4e3d8dc884327039d93cb9e0"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:dfcd2b9f510411de615ccedd47462dae80e82fdc09fe9ab0f0f32f11cf57eeb5"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:3e33064f1984db412b34d51496f4ea785a9cff621c67de58e09fb28da6468a52"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:33faa786cc907de63f745f587e9879429b46033d7d97a7b84b37f4f8f47b9b32"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:76a262c41c1a7cbb84a3b11976578a7eb8e788c4b7bfbd15c005fb6ca88e6e50"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:0f4b125b46fe377984fbaecf2af40ed48b05a4b7676a2ff98999f2016d66b3ec"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:40d5f6e853ece9bfc01e9129b228df446f49316a4252bb1fbfae5c3c9dedebad"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:152259f0f1a60f560323aacf463a3642a65a25557683f49cfa08c8f1ecb2395a"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d64878d1659d2a5bdfd0f0a4d79bafe68653c573681495e424ab40d7b6d6d41"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1bb3a62395ffe835dbef3a1cbff48fbcce709c78bd1f52e896aee990928432b"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe48f50fb6348511a3268a893bfd4ab5f263f5ac220782449d03cd05964d1ae7"}, - {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7591a3beea6a9a4fa3080d27d193b41f631130e3ffa76b88c9ccea123f26dc59"}, - {file = "pymongo-4.5.0-cp310-cp310-win32.whl", hash = "sha256:3a7166d57dc74d679caa7743b8ecf7dc3a1235a9fd178654dddb2b2a627ae229"}, - {file = "pymongo-4.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:21b953da14549ff62ea4ae20889c71564328958cbdf880c64a92a48dda4c9c53"}, - {file = "pymongo-4.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ead4f19d0257a756b21ac2e0e85a37a7245ddec36d3b6008d5bfe416525967dc"}, - {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aff6279e405dc953eeb540ab061e72c03cf38119613fce183a8e94f31be608f"}, - {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4c8d6aa91d3e35016847cbe8d73106e3d1c9a4e6578d38e2c346bfe8edb3ca"}, - {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08819da7864f9b8d4a95729b2bea5fffed08b63d3b9c15b4fea47de655766cf5"}, - {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a253b765b7cbc4209f1d8ee16c7287c4268d3243070bf72d7eec5aa9dfe2a2c2"}, - {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8027c9063579083746147cf401a7072a9fb6829678076cd3deff28bb0e0f50c8"}, - {file = "pymongo-4.5.0-cp311-cp311-win32.whl", hash = "sha256:9d2346b00af524757576cc2406414562cced1d4349c92166a0ee377a2a483a80"}, - {file = "pymongo-4.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:c3c3525ea8658ee1192cdddf5faf99b07ebe1eeaa61bf32821126df6d1b8072b"}, - {file = "pymongo-4.5.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e5a27f348909235a106a3903fc8e70f573d89b41d723a500869c6569a391cff7"}, - {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9a9a39b7cac81dca79fca8c2a6479ef4c7b1aab95fad7544cc0e8fd943595a2"}, - {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:496c9cbcb4951183d4503a9d7d2c1e3694aab1304262f831d5e1917e60386036"}, - {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23cc6d7eb009c688d70da186b8f362d61d5dd1a2c14a45b890bd1e91e9c451f2"}, - {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fff7d17d30b2cd45afd654b3fc117755c5d84506ed25fda386494e4e0a3416e1"}, - {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6422b6763b016f2ef2beedded0e546d6aa6ba87910f9244d86e0ac7690f75c96"}, - {file = "pymongo-4.5.0-cp312-cp312-win32.whl", hash = "sha256:77cfff95c1fafd09e940b3fdcb7b65f11442662fad611d0e69b4dd5d17a81c60"}, - {file = "pymongo-4.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:e57d859b972c75ee44ea2ef4758f12821243e99de814030f69a3decb2aa86807"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2b0176f9233a5927084c79ff80b51bd70bfd57e4f3d564f50f80238e797f0c8a"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89b3f2da57a27913d15d2a07d58482f33d0a5b28abd20b8e643ab4d625e36257"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:5caee7bd08c3d36ec54617832b44985bd70c4cbd77c5b313de6f7fce0bb34f93"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:1d40ad09d9f5e719bc6f729cc6b17f31c0b055029719406bd31dde2f72fca7e7"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:076afa0a4a96ca9f77fec0e4a0d241200b3b3a1766f8d7be9a905ecf59a7416b"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:3fa3648e4f1e63ddfe53563ee111079ea3ab35c3b09cd25bc22dadc8269a495f"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:44ee985194c426ddf781fa784f31ffa29cb59657b2dba09250a4245431847d73"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b33c17d9e694b66d7e96977e9e56df19d662031483efe121a24772a44ccbbc7e"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d79ae3bb1ff041c0db56f138c88ce1dfb0209f3546d8d6e7c3f74944ecd2439"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d67225f05f6ea27c8dc57f3fa6397c96d09c42af69d46629f71e82e66d33fa4f"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41771b22dd2822540f79a877c391283d4e6368125999a5ec8beee1ce566f3f82"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a1f26bc1f5ce774d99725773901820dfdfd24e875028da4a0252a5b48dcab5c"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3236cf89d69679eaeb9119c840f5c7eb388a2110b57af6bb6baf01a1da387c18"}, - {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e1f61355c821e870fb4c17cdb318669cfbcf245a291ce5053b41140870c3e5cc"}, - {file = "pymongo-4.5.0-cp37-cp37m-win32.whl", hash = "sha256:49dce6957598975d8b8d506329d2a3a6c4aee911fa4bbcf5e52ffc6897122950"}, - {file = "pymongo-4.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2227a08b091bd41df5aadee0a5037673f691e2aa000e1968b1ea2342afc6880"}, - {file = "pymongo-4.5.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:435228d3c16a375274ac8ab9c4f9aef40c5e57ddb8296e20ecec9e2461da1017"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8e559116e4128630ad3b7e788e2e5da81cbc2344dee246af44471fa650486a70"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:840eaf30ccac122df260b6005f9dfae4ac287c498ee91e3e90c56781614ca238"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b4fe46b58010115514b842c669a0ed9b6a342017b15905653a5b1724ab80917f"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:a8127437ebc196a6f5e8fddd746bd0903a400dc6b5ae35df672dd1ccc7170a2a"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:2988ef5e6b360b3ff1c6d55c53515499de5f48df31afd9f785d788cdacfbe2d3"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e249190b018d63c901678053b4a43e797ca78b93fb6d17633e3567d4b3ec6107"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:1240edc1a448d4ada4bf1a0e55550b6292420915292408e59159fd8bbdaf8f63"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6d2a56fc2354bb6378f3634402eec788a8f3facf0b3e7d468db5f2b5a78d763"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a0aade2b11dc0c326ccd429ee4134d2d47459ff68d449c6d7e01e74651bd255"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74c0da07c04d0781490b2915e7514b1adb265ef22af039a947988c331ee7455b"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3754acbd7efc7f1b529039fcffc092a15e1cf045e31f22f6c9c5950c613ec4d"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:631492573a1bef2f74f9ac0f9d84e0ce422c251644cd81207530af4aa2ee1980"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2654d1278384cff75952682d17c718ecc1ad1d6227bb0068fd826ba47d426a5"}, - {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:168172ef7856e20ec024fe2a746bfa895c88b32720138e6438fd765ebd2b62dd"}, - {file = "pymongo-4.5.0-cp38-cp38-win32.whl", hash = "sha256:b25f7bea162b3dbec6d33c522097ef81df7c19a9300722fa6853f5b495aecb77"}, - {file = "pymongo-4.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:b520aafc6cb148bac09ccf532f52cbd31d83acf4d3e5070d84efe3c019a1adbf"}, - {file = "pymongo-4.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8543253adfaa0b802bfa88386db1009c6ebb7d5684d093ee4edc725007553d21"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:bc5d8c3647b8ae28e4312f1492b8f29deebd31479cd3abaa989090fb1d66db83"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:505f8519c4c782a61d94a17b0da50be639ec462128fbd10ab0a34889218fdee3"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:53f2dda54d76a98b43a410498bd12f6034b2a14b6844ca08513733b2b20b7ad8"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:9c04b9560872fa9a91251030c488e0a73bce9321a70f991f830c72b3f8115d0d"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:58a63a26a1e3dc481dd3a18d6d9f8bd1d576cd1ffe0d479ba7dd38b0aeb20066"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:f076b779aa3dc179aa3ed861be063a313ed4e48ae9f6a8370a9b1295d4502111"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1b1d7d9aabd8629a31d63cd106d56cca0e6420f38e50563278b520f385c0d86e"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37df8f6006286a5896d1cbc3efb8471ced42e3568d38e6cb00857277047b0d63"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56320c401f544d762fc35766936178fbceb1d9261cd7b24fbfbc8fb6f67aa8a5"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbd705d5f3c3d1ff2d169e418bb789ff07ab3c70d567cc6ba6b72b04b9143481"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a167081c75cf66b32f30e2f1eaee9365af935a86dbd76788169911bed9b5d5"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c42748ccc451dfcd9cef6c5447a7ab727351fd9747ad431db5ebb18a9b78a4d"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf62da7a4cdec9a4b2981fcbd5e08053edffccf20e845c0b6ec1e77eb7fab61d"}, - {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b5bbb87fa0511bd313d9a2c90294c88db837667c2bda2ea3fa7a35b59fd93b1f"}, - {file = "pymongo-4.5.0-cp39-cp39-win32.whl", hash = "sha256:465fd5b040206f8bce7016b01d7e7f79d2fcd7c2b8e41791be9632a9df1b4999"}, - {file = "pymongo-4.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:63d8019eee119df308a075b8a7bdb06d4720bf791e2b73d5ab0e7473c115d79c"}, - {file = "pymongo-4.5.0.tar.gz", hash = "sha256:681f252e43b3ef054ca9161635f81b730f4d8cadd28b3f2b2004f5a72f853982"}, + {file = "pymongo-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4344c30025210b9fa80ec257b0e0aab5aa1d5cca91daa70d82ab97b482cc038e"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux1_i686.whl", hash = "sha256:1c5654bb8bb2bdb10e7a0bc3c193dd8b49a960b9eebc4381ff5a2043f4c3c441"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:eaf2f65190c506def2581219572b9c70b8250615dc918b3b7c218361a51ec42e"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:262356ea5fcb13d35fb2ab6009d3927bafb9504ef02339338634fffd8a9f1ae4"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:2dd2f6960ee3c9360bed7fb3c678be0ca2d00f877068556785ec2eb6b73d2414"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:ff925f1cca42e933376d09ddc254598f8c5fcd36efc5cac0118bb36c36217c41"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:3cadf7f4c8e94d8a77874b54a63c80af01f4d48c4b669c8b6867f86a07ba994f"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55dac73316e7e8c2616ba2e6f62b750918e9e0ae0b2053699d66ca27a7790105"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:154b361dcb358ad377d5d40df41ee35f1cc14c8691b50511547c12404f89b5cb"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2940aa20e9cc328e8ddeacea8b9a6f5ddafe0b087fedad928912e787c65b4909"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:010bc9aa90fd06e5cc52c8fac2c2fd4ef1b5f990d9638548dde178005770a5e8"}, + {file = "pymongo-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e470fa4bace5f50076c32f4b3cc182b31303b4fefb9b87f990144515d572820b"}, + {file = "pymongo-4.6.1-cp310-cp310-win32.whl", hash = "sha256:da08ea09eefa6b960c2dd9a68ec47949235485c623621eb1d6c02b46765322ac"}, + {file = "pymongo-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:13d613c866f9f07d51180f9a7da54ef491d130f169e999c27e7633abe8619ec9"}, + {file = "pymongo-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6a0ae7a48a6ef82ceb98a366948874834b86c84e288dbd55600c1abfc3ac1d88"}, + {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd94c503271e79917b27c6e77f7c5474da6930b3fb9e70a12e68c2dff386b9a"}, + {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d4ccac3053b84a09251da8f5350bb684cbbf8c8c01eda6b5418417d0a8ab198"}, + {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:349093675a2d3759e4fb42b596afffa2b2518c890492563d7905fac503b20daa"}, + {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88beb444fb438385e53dc9110852910ec2a22f0eab7dd489e827038fdc19ed8d"}, + {file = "pymongo-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8e62d06e90f60ea2a3d463ae51401475568b995bafaffd81767d208d84d7bb1"}, + {file = "pymongo-4.6.1-cp311-cp311-win32.whl", hash = "sha256:5556e306713e2522e460287615d26c0af0fe5ed9d4f431dad35c6624c5d277e9"}, + {file = "pymongo-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:b10d8cda9fc2fcdcfa4a000aa10413a2bf8b575852cd07cb8a595ed09689ca98"}, + {file = "pymongo-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b435b13bb8e36be11b75f7384a34eefe487fe87a6267172964628e2b14ecf0a7"}, + {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e438417ce1dc5b758742e12661d800482200b042d03512a8f31f6aaa9137ad40"}, + {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b47ebd89e69fbf33d1c2df79759d7162fc80c7652dacfec136dae1c9b3afac7"}, + {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbed8cccebe1169d45cedf00461b2842652d476d2897fd1c42cf41b635d88746"}, + {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30a9e06041fbd7a7590693ec5e407aa8737ad91912a1e70176aff92e5c99d20"}, + {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8729dbf25eb32ad0dc0b9bd5e6a0d0b7e5c2dc8ec06ad171088e1896b522a74"}, + {file = "pymongo-4.6.1-cp312-cp312-win32.whl", hash = "sha256:3177f783ae7e08aaf7b2802e0df4e4b13903520e8380915e6337cdc7a6ff01d8"}, + {file = "pymongo-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:00c199e1c593e2c8b033136d7a08f0c376452bac8a896c923fcd6f419e07bdd2"}, + {file = "pymongo-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6dcc95f4bb9ed793714b43f4f23a7b0c57e4ef47414162297d6f650213512c19"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:13552ca505366df74e3e2f0a4f27c363928f3dff0eef9f281eb81af7f29bc3c5"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:77e0df59b1a4994ad30c6d746992ae887f9756a43fc25dec2db515d94cf0222d"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3a7f02a58a0c2912734105e05dedbee4f7507e6f1bd132ebad520be0b11d46fd"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:026a24a36394dc8930cbcb1d19d5eb35205ef3c838a7e619e04bd170713972e7"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:3b287e814a01deddb59b88549c1e0c87cefacd798d4afc0c8bd6042d1c3d48aa"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:9a710c184ba845afb05a6f876edac8f27783ba70e52d5eaf939f121fc13b2f59"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:30b2c9caf3e55c2e323565d1f3b7e7881ab87db16997dc0cbca7c52885ed2347"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff62ba8ff70f01ab4fe0ae36b2cb0b5d1f42e73dfc81ddf0758cd9f77331ad25"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:547dc5d7f834b1deefda51aedb11a7af9c51c45e689e44e14aa85d44147c7657"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1de3c6faf948f3edd4e738abdb4b76572b4f4fdfc1fed4dad02427e70c5a6219"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2831e05ce0a4df10c4ac5399ef50b9a621f90894c2a4d2945dc5658765514ed"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:144a31391a39a390efce0c5ebcaf4bf112114af4384c90163f402cec5ede476b"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33bb16a07d3cc4e0aea37b242097cd5f7a156312012455c2fa8ca396953b11c4"}, + {file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b7b1a83ce514700276a46af3d9e481ec381f05b64939effc9065afe18456a6b9"}, + {file = "pymongo-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:3071ec998cc3d7b4944377e5f1217c2c44b811fae16f9a495c7a1ce9b42fb038"}, + {file = "pymongo-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2346450a075625c4d6166b40a013b605a38b6b6168ce2232b192a37fb200d588"}, + {file = "pymongo-4.6.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:061598cbc6abe2f382ab64c9caa83faa2f4c51256f732cdd890bcc6e63bfb67e"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d483793a384c550c2d12cb794ede294d303b42beff75f3b3081f57196660edaf"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f9756f1d25454ba6a3c2f1ef8b7ddec23e5cdeae3dc3c3377243ae37a383db00"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:1ed23b0e2dac6f84f44c8494fbceefe6eb5c35db5c1099f56ab78fc0d94ab3af"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:3d18a9b9b858ee140c15c5bfcb3e66e47e2a70a03272c2e72adda2482f76a6ad"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:c258dbacfff1224f13576147df16ce3c02024a0d792fd0323ac01bed5d3c545d"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:f7acc03a4f1154ba2643edeb13658d08598fe6e490c3dd96a241b94f09801626"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:76013fef1c9cd1cd00d55efde516c154aa169f2bf059b197c263a255ba8a9ddf"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0e6a6c807fa887a0c51cc24fe7ea51bb9e496fe88f00d7930063372c3664c3"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd1fa413f8b9ba30140de198e4f408ffbba6396864c7554e0867aa7363eb58b2"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d219b4508f71d762368caec1fc180960569766049bbc4d38174f05e8ef2fe5b"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27b81ecf18031998ad7db53b960d1347f8f29e8b7cb5ea7b4394726468e4295e"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56816e43c92c2fa8c11dc2a686f0ca248bea7902f4a067fa6cbc77853b0f041e"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef801027629c5b511cf2ba13b9be29bfee36ae834b2d95d9877818479cdc99ea"}, + {file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d4c2be9760b112b1caf649b4977b81b69893d75aa86caf4f0f398447be871f3c"}, + {file = "pymongo-4.6.1-cp38-cp38-win32.whl", hash = "sha256:39d77d8bbb392fa443831e6d4ae534237b1f4eee6aa186f0cdb4e334ba89536e"}, + {file = "pymongo-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:4497d49d785482cc1a44a0ddf8830b036a468c088e72a05217f5b60a9e025012"}, + {file = "pymongo-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:69247f7a2835fc0984bbf0892e6022e9a36aec70e187fcfe6cae6a373eb8c4de"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7bb0e9049e81def6829d09558ad12d16d0454c26cabe6efc3658e544460688d9"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6a1810c2cbde714decf40f811d1edc0dae45506eb37298fd9d4247b8801509fe"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2aced6fb2f5261b47d267cb40060b73b6527e64afe54f6497844c9affed5fd0"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d0355cff58a4ed6d5e5f6b9c3693f52de0784aa0c17119394e2a8e376ce489d4"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:3c74f4725485f0a7a3862cfd374cc1b740cebe4c133e0c1425984bcdcce0f4bb"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:9c79d597fb3a7c93d7c26924db7497eba06d58f88f58e586aa69b2ad89fee0f8"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8ec75f35f62571a43e31e7bd11749d974c1b5cd5ea4a8388725d579263c0fdf6"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e641f931c5cd95b376fd3c59db52770e17bec2bf86ef16cc83b3906c054845"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9aafd036f6f2e5ad109aec92f8dbfcbe76cff16bad683eb6dd18013739c0b3ae"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f2b856518bfcfa316c8dae3d7b412aecacf2e8ba30b149f5eb3b63128d703b9"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec31adc2e988fd7db3ab509954791bbc5a452a03c85e45b804b4bfc31fa221d"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9167e735379ec43d8eafa3fd675bfbb12e2c0464f98960586e9447d2cf2c7a83"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1461199b07903fc1424709efafe379205bf5f738144b1a50a08b0396357b5abf"}, + {file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3094c7d2f820eecabadae76bfec02669567bbdd1730eabce10a5764778564f7b"}, + {file = "pymongo-4.6.1-cp39-cp39-win32.whl", hash = "sha256:c91ea3915425bd4111cb1b74511cdc56d1d16a683a48bf2a5a96b6a6c0f297f7"}, + {file = "pymongo-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ef102a67ede70e1721fe27f75073b5314911dbb9bc27cde0a1c402a11531e7bd"}, + {file = "pymongo-4.6.1.tar.gz", hash = "sha256:31dab1f3e1d0cdd57e8df01b645f52d43cc1b653ed3afd535d2891f4fc4f9712"}, ] [package.dependencies] @@ -3569,6 +3876,7 @@ encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"] gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] snappy = ["python-snappy"] +test = ["pytest (>=7)"] zstd = ["zstandard"] [[package]] @@ -3594,13 +3902,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.2" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, - {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -3616,13 +3924,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.21.1" +version = "0.23.2" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, - {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, + {file = "pytest-asyncio-0.23.2.tar.gz", hash = "sha256:c16052382554c7b22d48782ab3438d5b10f8cf7a4bdcae7f0f67f097d95beecc"}, + {file = "pytest_asyncio-0.23.2-py3-none-any.whl", hash = "sha256:ea9021364e32d58f0be43b91c6233fb8d2224ccef2398d6837559e587682808f"}, ] [package.dependencies] @@ -3630,7 +3938,25 @@ pytest = ">=7.0.0" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "python-dateutil" @@ -3662,13 +3988,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -3719,6 +4045,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3755,104 +4082,104 @@ files = [ [[package]] name = "pyzmq" -version = "25.1.1" +version = "25.1.2" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, - {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, - {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, - {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, - {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, - {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, - {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, - {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, - {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, - {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, - {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, - {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, ] [package.dependencies] @@ -3860,26 +4187,26 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qdrant-client" -version = "1.5.4" +version = "1.7.0" description = "Client library for the Qdrant vector search engine" optional = false -python-versions = ">=3.8,<3.12" +python-versions = ">=3.8,<3.13" files = [ - {file = "qdrant_client-1.5.4-py3-none-any.whl", hash = "sha256:b0247886c51d755f70dc1a62545f38ba5c7d72971ab533f1264fb695c21a6d8f"}, - {file = "qdrant_client-1.5.4.tar.gz", hash = "sha256:6ffbca94f7cab23230001710b7dc04684dbc18dadf66982179a37531b4c4b178"}, + {file = "qdrant_client-1.7.0-py3-none-any.whl", hash = "sha256:ab5779cf3f008da2a801c943413423f1ff434128dfaeda031f037453e1fa8306"}, + {file = "qdrant_client-1.7.0.tar.gz", hash = "sha256:bbe0656020c2f11061d7836b87e99ba6b50a028f5318459cc1fddf4ef73d9a8b"}, ] [package.dependencies] grpcio = ">=1.41.0" grpcio-tools = ">=1.41.0" httpx = {version = ">=0.14.0", extras = ["http2"]} -numpy = {version = ">=1.21", markers = "python_version >= \"3.8\""} +numpy = {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""} portalocker = ">=2.7.0,<3.0.0" pydantic = ">=1.10.8" urllib3 = ">=1.26.14,<2.0.0" [package.extras] -fastembed = ["fastembed (==0.0.4)"] +fastembed = ["fastembed (==0.1.1)"] [[package]] name = "redis" @@ -4048,108 +4375,110 @@ six = "*" [[package]] name = "rpds-py" -version = "0.9.2" +version = "0.15.2" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.9.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ab6919a09c055c9b092798ce18c6c4adf49d24d4d9e43a92b257e3f2548231e7"}, - {file = "rpds_py-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d55777a80f78dd09410bd84ff8c95ee05519f41113b2df90a69622f5540c4f8b"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a216b26e5af0a8e265d4efd65d3bcec5fba6b26909014effe20cd302fd1138fa"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29cd8bfb2d716366a035913ced99188a79b623a3512292963d84d3e06e63b496"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44659b1f326214950a8204a248ca6199535e73a694be8d3e0e869f820767f12f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:745f5a43fdd7d6d25a53ab1a99979e7f8ea419dfefebcab0a5a1e9095490ee5e"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a987578ac5214f18b99d1f2a3851cba5b09f4a689818a106c23dbad0dfeb760f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf4151acb541b6e895354f6ff9ac06995ad9e4175cbc6d30aaed08856558201f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03421628f0dc10a4119d714a17f646e2837126a25ac7a256bdf7c3943400f67f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13b602dc3e8dff3063734f02dcf05111e887f301fdda74151a93dbbc249930fe"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fae5cb554b604b3f9e2c608241b5d8d303e410d7dfb6d397c335f983495ce7f6"}, - {file = "rpds_py-0.9.2-cp310-none-win32.whl", hash = "sha256:47c5f58a8e0c2c920cc7783113df2fc4ff12bf3a411d985012f145e9242a2764"}, - {file = "rpds_py-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:4ea6b73c22d8182dff91155af018b11aac9ff7eca085750455c5990cb1cfae6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e564d2238512c5ef5e9d79338ab77f1cbbda6c2d541ad41b2af445fb200385e3"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f411330a6376fb50e5b7a3e66894e4a39e60ca2e17dce258d53768fea06a37bd"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e7521f5af0233e89939ad626b15278c71b69dc1dfccaa7b97bd4cdf96536bb7"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3335c03100a073883857e91db9f2e0ef8a1cf42dc0369cbb9151c149dbbc1b"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d25b1c1096ef0447355f7293fbe9ad740f7c47ae032c2884113f8e87660d8f6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a5d3fbd02efd9cf6a8ffc2f17b53a33542f6b154e88dd7b42ef4a4c0700fdad"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5934e2833afeaf36bd1eadb57256239785f5af0220ed8d21c2896ec4d3a765f"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:095b460e117685867d45548fbd8598a8d9999227e9061ee7f012d9d264e6048d"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91378d9f4151adc223d584489591dbb79f78814c0734a7c3bfa9c9e09978121c"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:24a81c177379300220e907e9b864107614b144f6c2a15ed5c3450e19cf536fae"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:de0b6eceb46141984671802d412568d22c6bacc9b230174f9e55fc72ef4f57de"}, - {file = "rpds_py-0.9.2-cp311-none-win32.whl", hash = "sha256:700375326ed641f3d9d32060a91513ad668bcb7e2cffb18415c399acb25de2ab"}, - {file = "rpds_py-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:0766babfcf941db8607bdaf82569ec38107dbb03c7f0b72604a0b346b6eb3298"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1440c291db3f98a914e1afd9d6541e8fc60b4c3aab1a9008d03da4651e67386"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f2996fbac8e0b77fd67102becb9229986396e051f33dbceada3debaacc7033f"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f30d205755566a25f2ae0382944fcae2f350500ae4df4e795efa9e850821d82"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:159fba751a1e6b1c69244e23ba6c28f879a8758a3e992ed056d86d74a194a0f3"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1f044792e1adcea82468a72310c66a7f08728d72a244730d14880cd1dabe36b"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9251eb8aa82e6cf88510530b29eef4fac825a2b709baf5b94a6094894f252387"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01899794b654e616c8625b194ddd1e5b51ef5b60ed61baa7a2d9c2ad7b2a4238"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0c43f8ae8f6be1d605b0465671124aa8d6a0e40f1fb81dcea28b7e3d87ca1e1"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207f57c402d1f8712618f737356e4b6f35253b6d20a324d9a47cb9f38ee43a6b"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b52e7c5ae35b00566d244ffefba0f46bb6bec749a50412acf42b1c3f402e2c90"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:978fa96dbb005d599ec4fd9ed301b1cc45f1a8f7982d4793faf20b404b56677d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6aa8326a4a608e1c28da191edd7c924dff445251b94653988efb059b16577a4d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aad51239bee6bff6823bbbdc8ad85136c6125542bbc609e035ab98ca1e32a192"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4dc3602370679c2dfb818d9c97b1137d4dd412230cfecd3c66a1bf388a196"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd9da77c6ec1f258387957b754f0df60766ac23ed698b61941ba9acccd3284d1"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:190ca6f55042ea4649ed19c9093a9be9d63cd8a97880106747d7147f88a49d18"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:876bf9ed62323bc7dcfc261dbc5572c996ef26fe6406b0ff985cbcf460fc8a4c"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa2818759aba55df50592ecbc95ebcdc99917fa7b55cc6796235b04193eb3c55"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ea4d00850ef1e917815e59b078ecb338f6a8efda23369677c54a5825dbebb55"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5855c85eb8b8a968a74dc7fb014c9166a05e7e7a8377fb91d78512900aadd13d"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:14c408e9d1a80dcb45c05a5149e5961aadb912fff42ca1dd9b68c0044904eb32"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:65a0583c43d9f22cb2130c7b110e695fff834fd5e832a776a107197e59a1898e"}, - {file = "rpds_py-0.9.2-cp38-none-win32.whl", hash = "sha256:71f2f7715935a61fa3e4ae91d91b67e571aeb5cb5d10331ab681256bda2ad920"}, - {file = "rpds_py-0.9.2-cp38-none-win_amd64.whl", hash = "sha256:674c704605092e3ebbbd13687b09c9f78c362a4bc710343efe37a91457123044"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:07e2c54bef6838fa44c48dfbc8234e8e2466d851124b551fc4e07a1cfeb37260"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fdf55283ad38c33e35e2855565361f4bf0abd02470b8ab28d499c663bc5d7c"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:890ba852c16ace6ed9f90e8670f2c1c178d96510a21b06d2fa12d8783a905193"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50025635ba8b629a86d9d5474e650da304cb46bbb4d18690532dd79341467846"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517cbf6e67ae3623c5127206489d69eb2bdb27239a3c3cc559350ef52a3bbf0b"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0836d71ca19071090d524739420a61580f3f894618d10b666cf3d9a1688355b1"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c439fd54b2b9053717cca3de9583be6584b384d88d045f97d409f0ca867d80f"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f68996a3b3dc9335037f82754f9cdbe3a95db42bde571d8c3be26cc6245f2324"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7d68dc8acded354c972116f59b5eb2e5864432948e098c19fe6994926d8e15c3"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f963c6b1218b96db85fc37a9f0851eaf8b9040aa46dec112611697a7023da535"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a46859d7f947061b4010e554ccd1791467d1b1759f2dc2ec9055fa239f1bc26"}, - {file = "rpds_py-0.9.2-cp39-none-win32.whl", hash = "sha256:e07e5dbf8a83c66783a9fe2d4566968ea8c161199680e8ad38d53e075df5f0d0"}, - {file = "rpds_py-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:682726178138ea45a0766907957b60f3a1bf3acdf212436be9733f28b6c5af3c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:196cb208825a8b9c8fc360dc0f87993b8b260038615230242bf18ec84447c08d"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c7671d45530fcb6d5e22fd40c97e1e1e01965fc298cbda523bb640f3d923b387"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b32f0940adec65099f3b1c215ef7f1d025d13ff947975a055989cb7fd019a4"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f67da97f5b9eac838b6980fc6da268622e91f8960e083a34533ca710bec8611"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03975db5f103997904c37e804e5f340c8fdabbb5883f26ee50a255d664eed58c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:987b06d1cdb28f88a42e4fb8a87f094e43f3c435ed8e486533aea0bf2e53d931"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c861a7e4aef15ff91233751619ce3a3d2b9e5877e0fcd76f9ea4f6847183aa16"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02938432352359805b6da099c9c95c8a0547fe4b274ce8f1a91677401bb9a45f"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef1f08f2a924837e112cba2953e15aacfccbbfcd773b4b9b4723f8f2ddded08e"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:35da5cc5cb37c04c4ee03128ad59b8c3941a1e5cd398d78c37f716f32a9b7f67"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:141acb9d4ccc04e704e5992d35472f78c35af047fa0cfae2923835d153f091be"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79f594919d2c1a0cc17d1988a6adaf9a2f000d2e1048f71f298b056b1018e872"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a06418fe1155e72e16dddc68bb3780ae44cebb2912fbd8bb6ff9161de56e1798"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2eb034c94b0b96d5eddb290b7b5198460e2d5d0c421751713953a9c4e47d10"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b08605d248b974eb02f40bdcd1a35d3924c83a2a5e8f5d0fa5af852c4d960af"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0805911caedfe2736935250be5008b261f10a729a303f676d3d5fea6900c96a"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab2299e3f92aa5417d5e16bb45bb4586171c1327568f638e8453c9f8d9e0f020"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c8d7594e38cf98d8a7df25b440f684b510cf4627fe038c297a87496d10a174f"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b9ec12ad5f0a4625db34db7e0005be2632c1013b253a4a60e8302ad4d462afd"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1fcdee18fea97238ed17ab6478c66b2095e4ae7177e35fb71fbe561a27adf620"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:933a7d5cd4b84f959aedeb84f2030f0a01d63ae6cf256629af3081cf3e3426e8"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:686ba516e02db6d6f8c279d1641f7067ebb5dc58b1d0536c4aaebb7bf01cdc5d"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0173c0444bec0a3d7d848eaeca2d8bd32a1b43f3d3fde6617aac3731fa4be05f"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d576c3ef8c7b2d560e301eb33891d1944d965a4d7a2eacb6332eee8a71827db6"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed89861ee8c8c47d6beb742a602f912b1bb64f598b1e2f3d758948721d44d468"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1054a08e818f8e18910f1bee731583fe8f899b0a0a5044c6e680ceea34f93876"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99e7c4bb27ff1aab90dcc3e9d37ee5af0231ed98d99cb6f5250de28889a3d502"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c545d9d14d47be716495076b659db179206e3fd997769bc01e2d550eeb685596"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9039a11bca3c41be5a58282ed81ae422fa680409022b996032a43badef2a3752"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb39aca7a64ad0c9490adfa719dbeeb87d13be137ca189d2564e596f8ba32c07"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2d8b3b3a2ce0eaa00c5bbbb60b6713e94e7e0becab7b3db6c5c77f979e8ed1f1"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:99b1c16f732b3a9971406fbfe18468592c5a3529585a45a35adbc1389a529a03"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c27ee01a6c3223025f4badd533bea5e87c988cb0ba2811b690395dfe16088cfe"}, - {file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"}, + {file = "rpds_py-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:337a8653fb11d2fbe7157c961cc78cb3c161d98cf44410ace9a3dc2db4fad882"}, + {file = "rpds_py-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:813a65f95bfcb7c8f2a70dd6add9b51e9accc3bdb3e03d0ff7a9e6a2d3e174bf"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:082e0e55d73690ffb4da4352d1b5bbe1b5c6034eb9dc8c91aa2a3ee15f70d3e2"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5595c80dd03d7e6c6afb73f3594bf3379a7d79fa57164b591d012d4b71d6ac4c"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb10bb720348fe1647a94eb605accb9ef6a9b1875d8845f9e763d9d71a706387"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53304cc14b1d94487d70086e1cb0cb4c29ec6da994d58ae84a4d7e78c6a6d04d"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d64a657de7aae8db2da60dc0c9e4638a0c3893b4d60101fd564a3362b2bfeb34"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ee40206d1d6e95eaa2b7b919195e3689a5cf6ded730632de7f187f35a1b6052c"}, + {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1607cda6129f815493a3c184492acb5ae4aa6ed61d3a1b3663aa9824ed26f7ac"}, + {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3e6e2e502c4043c52a99316d89dc49f416acda5b0c6886e0dd8ea7bb35859e8"}, + {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:044f6f46d62444800402851afa3c3ae50141f12013060c1a3a0677e013310d6d"}, + {file = "rpds_py-0.15.2-cp310-none-win32.whl", hash = "sha256:c827a931c6b57f50f1bb5de400dcfb00bad8117e3753e80b96adb72d9d811514"}, + {file = "rpds_py-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3bbc89ce2a219662ea142f0abcf8d43f04a41d5b1880be17a794c39f0d609cb0"}, + {file = "rpds_py-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:1fd0f0b1ccd7d537b858a56355a250108df692102e08aa2036e1a094fd78b2dc"}, + {file = "rpds_py-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b414ef79f1f06fb90b5165db8aef77512c1a5e3ed1b4807da8476b7e2c853283"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c31272c674f725dfe0f343d73b0abe8c878c646967ec1c6106122faae1efc15b"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6945c2d61c42bb7e818677f43638675b8c1c43e858b67a96df3eb2426a86c9d"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02744236ac1895d7be837878e707a5c35fb8edc5137602f253b63623d7ad5c8c"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2181e86d4e1cdf49a7320cb72a36c45efcb7670d0a88f09fd2d3a7967c0540fd"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a8ff8e809da81363bffca2b965cb6e4bf6056b495fc3f078467d1f8266fe27f"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97532802f14d383f37d603a56e226909f825a83ff298dc1b6697de00d2243999"}, + {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:13716e53627ad97babf72ac9e01cf9a7d4af2f75dd5ed7b323a7a9520e948282"}, + {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2f1f295a5c28cfa74a7d48c95acc1c8a7acd49d7d9072040d4b694fe11cd7166"}, + {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8ec464f20fe803ae00419bd1610934e3bda963aeba1e6181dfc9033dc7e8940c"}, + {file = "rpds_py-0.15.2-cp311-none-win32.whl", hash = "sha256:b61d5096e75fd71018b25da50b82dd70ec39b5e15bb2134daf7eb7bbbc103644"}, + {file = "rpds_py-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:9d41ebb471a6f064c0d1c873c4f7dded733d16ca5db7d551fb04ff3805d87802"}, + {file = "rpds_py-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:13ff62d3561a23c17341b4afc78e8fcfd799ab67c0b1ca32091d71383a98ba4b"}, + {file = "rpds_py-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b70b45a40ad0798b69748b34d508259ef2bdc84fb2aad4048bc7c9cafb68ddb3"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ecbba7efd82bd2a4bb88aab7f984eb5470991c1347bdd1f35fb34ea28dba6e"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9d38494a8d21c246c535b41ecdb2d562c4b933cf3d68de03e8bc43a0d41be652"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13152dfe7d7c27c40df8b99ac6aab12b978b546716e99f67e8a67a1d441acbc3"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:164fcee32f15d04d61568c9cb0d919e37ff3195919cd604039ff3053ada0461b"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a5122b17a4faf5d7a6d91fa67b479736c0cacc7afe791ddebb7163a8550b799"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:46b4f3d47d1033db569173be62365fbf7808c2bd3fb742314d251f130d90d44c"}, + {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c61e42b4ceb9759727045765e87d51c1bb9f89987aca1fcc8a040232138cad1c"}, + {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d2aa3ca9552f83b0b4fa6ca8c6ce08da6580f37e3e0ab7afac73a1cfdc230c0e"}, + {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec19e823b4ccd87bd69e990879acbce9e961fc7aebe150156b8f4418d4b27b7f"}, + {file = "rpds_py-0.15.2-cp312-none-win32.whl", hash = "sha256:afeabb382c1256a7477b739820bce7fe782bb807d82927102cee73e79b41b38b"}, + {file = "rpds_py-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:422b0901878a31ef167435c5ad46560362891816a76cc0d150683f3868a6f0d1"}, + {file = "rpds_py-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:baf744e5f9d5ee6531deea443be78b36ed1cd36c65a0b95ea4e8d69fa0102268"}, + {file = "rpds_py-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e072f5da38d6428ba1fc1115d3cc0dae895df671cb04c70c019985e8c7606be"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f138f550b83554f5b344d6be35d3ed59348510edc3cb96f75309db6e9bfe8210"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2a4cd924d0e2f4b1a68034abe4cadc73d69ad5f4cf02db6481c0d4d749f548f"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5eb05b654a41e0f81ab27a7c3e88b6590425eb3e934e1d533ecec5dc88a6ffff"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ee066a64f0d2ba45391cac15b3a70dcb549e968a117bd0500634754cfe0e5fc"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51a899792ee2c696072791e56b2020caff58b275abecbc9ae0cb71af0645c95"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac2ac84a4950d627d84b61f082eba61314373cfab4b3c264b62efab02ababe83"}, + {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:62b292fff4739c6be89e6a0240c02bda5a9066a339d90ab191cf66e9fdbdc193"}, + {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:98ee201a52a7f65608e5494518932e1473fd43535f12cade0a1b4ab32737fe28"}, + {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3d40fb3ca22e3d40f494d577441b263026a3bd8c97ae6ce89b2d3c4b39ac9581"}, + {file = "rpds_py-0.15.2-cp38-none-win32.whl", hash = "sha256:30479a9f1fce47df56b07460b520f49fa2115ec2926d3b1303c85c81f8401ed1"}, + {file = "rpds_py-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:2df3d07a16a3bef0917b28cd564778fbb31f3ffa5b5e33584470e2d1b0f248f0"}, + {file = "rpds_py-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:56b51ba29a18e5f5810224bcf00747ad931c0716e3c09a76b4a1edd3d4aba71f"}, + {file = "rpds_py-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c11bc5814554b018f6c5d6ae0969e43766f81e995000b53a5d8c8057055e886"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2faa97212b0dc465afeedf49045cdd077f97be1188285e646a9f689cb5dfff9e"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:86c01299942b0f4b5b5f28c8701689181ad2eab852e65417172dbdd6c5b3ccc8"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd7d3608589072f63078b4063a6c536af832e76b0b3885f1bfe9e892abe6c207"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:938518a11780b39998179d07f31a4a468888123f9b00463842cd40f98191f4d3"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dccc623725d0b298f557d869a68496a2fd2a9e9c41107f234fa5f7a37d278ac"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d46ee458452727a147d7897bb33886981ae1235775e05decae5d5d07f537695a"}, + {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d9d7ebcd11ea76ba0feaae98485cd8e31467c3d7985210fab46983278214736b"}, + {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8a5f574b92b3ee7d254e56d56e37ec0e1416acb1ae357c4956d76a1788dc58fb"}, + {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3db0c998c92b909d7c90b66c965590d4f3cd86157176a6cf14aa1f867b77b889"}, + {file = "rpds_py-0.15.2-cp39-none-win32.whl", hash = "sha256:bbc7421cbd28b4316d1d017db338039a7943f945c6f2bb15e1439b14b5682d28"}, + {file = "rpds_py-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:1c24e30d720c0009b6fb2e1905b025da56103c70a8b31b99138e4ed1c2a6c5b0"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e6fcd0a0f62f2997107f758bb372397b8d5fd5f39cc6dcb86f7cb98a2172d6c"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d800a8e2ac62db1b9ea5d6d1724f1a93c53907ca061de4d05ed94e8dfa79050c"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e09d017e3f4d9bd7d17a30d3f59e4d6d9ba2d2ced280eec2425e84112cf623f"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b88c3ab98556bc351b36d6208a6089de8c8db14a7f6e1f57f82a334bd2c18f0b"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f333bfe782a2d05a67cfaa0cc9cd68b36b39ee6acfe099f980541ed973a7093"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b629db53fe17e6ce478a969d30bd1d0e8b53238c46e3a9c9db39e8b65a9ef973"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485fbdd23becb822804ed05622907ee5c8e8a5f43f6f43894a45f463b2217045"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:893e38d0f4319dfa70c0f36381a37cc418985c87b11d9784365b1fff4fa6973b"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8ffdeb7dbd0160d4e391e1f857477e4762d00aa2199c294eb95dfb9451aa1d9f"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:fc33267d58dfbb2361baed52668c5d8c15d24bc0372cecbb79fed77339b55e0d"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2e7e5633577b3bd56bf3af2ef6ae3778bbafb83743989d57f0e7edbf6c0980e4"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8b9650f92251fdef843e74fc252cdfd6e3c700157ad686eeb0c6d7fdb2d11652"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:07a2e1d78d382f7181789713cdf0c16edbad4fe14fe1d115526cb6f0eef0daa3"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f9c5875515820633bd7709a25c3e60c1ea9ad1c5d4030ce8a8c203309c36fd"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:580182fa5b269c2981e9ce9764367cb4edc81982ce289208d4607c203f44ffde"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa1e626c524d2c7972c0f3a8a575d654a3a9c008370dc2a97e46abd0eaa749b9"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae9d83a81b09ce3a817e2cbb23aabc07f86a3abc664c613cd283ce7a03541e95"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9235be95662559141934fced8197de6fee8c58870f36756b0584424b6d708393"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a72e00826a2b032dda3eb25aa3e3579c6d6773d22d8446089a57a123481cc46c"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ab095edf1d840a6a6a4307e1a5b907a299a94e7b90e75436ee770b8c35d22a25"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b79c63d29101cbaa53a517683557bb550462394fb91044cc5998dd2acff7340"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:911e600e798374c0d86235e7ef19109cf865d1336942d398ff313375a25a93ba"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3cd61e759c4075510052d1eca5cddbd297fe1164efec14ef1fce3f09b974dfe4"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d2ae79f31da5143e020a8d4fc74e1f0cbcb8011bdf97453c140aa616db51406"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e99d6510c8557510c220b865d966b105464740dcbebf9b79ecd4fbab30a13d9"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c43e1b89099279cc03eb1c725c5de12af6edcd2f78e2f8a022569efa639ada3"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7187bee72384b9cfedf09a29a3b2b6e8815cc64c095cdc8b5e6aec81e9fd5f"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3423007fc0661827e06f8a185a3792c73dda41f30f3421562f210cf0c9e49569"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2974e6dff38afafd5ccf8f41cb8fc94600b3f4fd9b0a98f6ece6e2219e3158d5"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:93c18a1696a8e0388ed84b024fe1a188a26ba999b61d1d9a371318cb89885a8c"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd0841a586b7105513a7c8c3d5c276f3adc762a072d81ef7fae80632afad1e"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:709dc11af2f74ba89c68b1592368c6edcbccdb0a06ba77eb28c8fe08bb6997da"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:fc066395e6332da1e7525d605b4c96055669f8336600bef8ac569d5226a7c76f"}, + {file = "rpds_py-0.15.2.tar.gz", hash = "sha256:373b76eeb79e8c14f6d82cb1d4d5293f9e4059baec6c1b16dca7ad13b6131b39"}, ] [[package]] @@ -4168,206 +4497,264 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruamel-yaml" -version = "0.17.32" +version = "0.18.5" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false -python-versions = ">=3" +python-versions = ">=3.7" files = [ - {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, - {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, + {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, + {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, ] [package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} [package.extras] -docs = ["ryd"] +docs = ["mercurial (>5.7)", "ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" -version = "0.2.7" +version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, - {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, - {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, - {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, - {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, - {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, - {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, ] [[package]] name = "ruff" -version = "0.0.289" -description = "An extremely fast Python linter, written in Rust." +version = "0.1.8" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.289-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:c9a89d748e90c840bac9c37afe90cf13a5bfd460ca02ea93dad9d7bee3af03b4"}, - {file = "ruff-0.0.289-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7f7396c6ea01ba332a6ad9d47642bac25d16bd2076aaa595b001f58b2f32ff05"}, - {file = "ruff-0.0.289-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7180de86c8ecd39624dec1699136f941c07e723201b4ce979bec9e7c67b40ad2"}, - {file = "ruff-0.0.289-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73f37c65508203dd01a539926375a10243769c20d4fcab3fa6359cd3fbfc54b7"}, - {file = "ruff-0.0.289-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c14abcd7563b5c80be2dd809eeab20e4aa716bf849860b60a22d87ddf19eb88"}, - {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:91b6d63b6b46d4707916472c91baa87aa0592e73f62a80ff55efdf6c0668cfd6"}, - {file = "ruff-0.0.289-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6479b8c4be3c36046c6c92054762b276fa0fddb03f6b9a310fbbf4c4951267fd"}, - {file = "ruff-0.0.289-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5424318c254bcb091cb67e140ec9b9f7122074e100b06236f252923fb41e767"}, - {file = "ruff-0.0.289-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4daa90865796aedcedf0d8897fdd4cd09bf0ddd3504529a4ccf211edcaff3c7d"}, - {file = "ruff-0.0.289-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8057e8ab0016c13b9419bad119e854f881e687bd96bc5e2d52c8baac0f278a44"}, - {file = "ruff-0.0.289-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7eebfab2e6a6991908ff1bf82f2dc1e5095fc7e316848e62124526837b445f4d"}, - {file = "ruff-0.0.289-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ebc7af550018001a7fb39ca22cdce20e1a0de4388ea4a007eb5c822f6188c297"}, - {file = "ruff-0.0.289-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6e4e6eccb753efe760ba354fc8e9f783f6bba71aa9f592756f5bd0d78db898ed"}, - {file = "ruff-0.0.289-py3-none-win32.whl", hash = "sha256:bbb3044f931c09cf17dbe5b339896eece0d6ac10c9a86e172540fcdb1974f2b7"}, - {file = "ruff-0.0.289-py3-none-win_amd64.whl", hash = "sha256:6d043c5456b792be2615a52f16056c3cf6c40506ce1f2d6f9d3083cfcb9eeab6"}, - {file = "ruff-0.0.289-py3-none-win_arm64.whl", hash = "sha256:04a720bcca5e987426bb14ad8b9c6f55e259ea774da1cbeafe71569744cfd20a"}, - {file = "ruff-0.0.289.tar.gz", hash = "sha256:2513f853b0fc42f0339b7ab0d2751b63ce7a50a0032d2689b54b2931b3b866d7"}, + {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7de792582f6e490ae6aef36a58d85df9f7a0cfd1b0d4fe6b4fb51803a3ac96fa"}, + {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8e3255afd186c142eef4ec400d7826134f028a85da2146102a1172ecc7c3696"}, + {file = "ruff-0.1.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff78a7583020da124dd0deb835ece1d87bb91762d40c514ee9b67a087940528b"}, + {file = "ruff-0.1.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd8ee69b02e7bdefe1e5da2d5b6eaaddcf4f90859f00281b2333c0e3a0cc9cd6"}, + {file = "ruff-0.1.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a05b0ddd7ea25495e4115a43125e8a7ebed0aa043c3d432de7e7d6e8e8cd6448"}, + {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6f08ca730f4dc1b76b473bdf30b1b37d42da379202a059eae54ec7fc1fbcfed"}, + {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f35960b02df6b827c1b903091bb14f4b003f6cf102705efc4ce78132a0aa5af3"}, + {file = "ruff-0.1.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d076717c67b34c162da7c1a5bda16ffc205e0e0072c03745275e7eab888719f"}, + {file = "ruff-0.1.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a21ab023124eafb7cef6d038f835cb1155cd5ea798edd8d9eb2f8b84be07d9"}, + {file = "ruff-0.1.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ce697c463458555027dfb194cb96d26608abab920fa85213deb5edf26e026664"}, + {file = "ruff-0.1.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:db6cedd9ffed55548ab313ad718bc34582d394e27a7875b4b952c2d29c001b26"}, + {file = "ruff-0.1.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:05ffe9dbd278965271252704eddb97b4384bf58b971054d517decfbf8c523f05"}, + {file = "ruff-0.1.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5daaeaf00ae3c1efec9742ff294b06c3a2a9db8d3db51ee4851c12ad385cda30"}, + {file = "ruff-0.1.8-py3-none-win32.whl", hash = "sha256:e49fbdfe257fa41e5c9e13c79b9e79a23a79bd0e40b9314bc53840f520c2c0b3"}, + {file = "ruff-0.1.8-py3-none-win_amd64.whl", hash = "sha256:f41f692f1691ad87f51708b823af4bb2c5c87c9248ddd3191c8f088e66ce590a"}, + {file = "ruff-0.1.8-py3-none-win_arm64.whl", hash = "sha256:aa8ee4f8440023b0a6c3707f76cadce8657553655dcbb5fc9b2f9bb9bee389f6"}, + {file = "ruff-0.1.8.tar.gz", hash = "sha256:f7ee467677467526cfe135eab86a40a0e8db43117936ac4f9b469ce9cdb3fb62"}, ] [[package]] name = "safetensors" -version = "0.3.3" -description = "Fast and Safe Tensor serialization" +version = "0.4.1" +description = "" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "safetensors-0.3.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:92e4d0c8b2836120fddd134474c5bda8963f322333941f8b9f643e5b24f041eb"}, - {file = "safetensors-0.3.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3dcadb6153c42addc9c625a622ebde9293fabe1973f9ef31ba10fb42c16e8536"}, - {file = "safetensors-0.3.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:08f26b61e1b0a14dc959aa9d568776bd038805f611caef1de04a80c468d4a7a4"}, - {file = "safetensors-0.3.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:17f41344d9a075f2f21b289a49a62e98baff54b5754240ba896063bce31626bf"}, - {file = "safetensors-0.3.3-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:f1045f798e1a16a6ced98d6a42ec72936d367a2eec81dc5fade6ed54638cd7d2"}, - {file = "safetensors-0.3.3-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:eaf0e4bc91da13f21ac846a39429eb3f3b7ed06295a32321fa3eb1a59b5c70f3"}, - {file = "safetensors-0.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25149180d4dc8ca48bac2ac3852a9424b466e36336a39659b35b21b2116f96fc"}, - {file = "safetensors-0.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9e943bf78c39de8865398a71818315e7d5d1af93c7b30d4da3fc852e62ad9bc"}, - {file = "safetensors-0.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cccfcac04a010354e87c7a2fe16a1ff004fc4f6e7ef8efc966ed30122ce00bc7"}, - {file = "safetensors-0.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a07121f427e646a50d18c1be0fa1a2cbf6398624c31149cd7e6b35486d72189e"}, - {file = "safetensors-0.3.3-cp310-cp310-win32.whl", hash = "sha256:a85e29cbfddfea86453cc0f4889b4bcc6b9c155be9a60e27be479a34e199e7ef"}, - {file = "safetensors-0.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:e13adad4a3e591378f71068d14e92343e626cf698ff805f61cdb946e684a218e"}, - {file = "safetensors-0.3.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:cbc3312f134baf07334dd517341a4b470b2931f090bd9284888acb7dfaf4606f"}, - {file = "safetensors-0.3.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d15030af39d5d30c22bcbc6d180c65405b7ea4c05b7bab14a570eac7d7d43722"}, - {file = "safetensors-0.3.3-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:f84a74cbe9859b28e3d6d7715ac1dd3097bebf8d772694098f6d42435245860c"}, - {file = "safetensors-0.3.3-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:10d637423d98ab2e6a4ad96abf4534eb26fcaf8ca3115623e64c00759374e90d"}, - {file = "safetensors-0.3.3-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:3b46f5de8b44084aff2e480874c550c399c730c84b2e8ad1bddb062c94aa14e9"}, - {file = "safetensors-0.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76da691a82dfaf752854fa6d17c8eba0c8466370c5ad8cf1bfdf832d3c7ee17"}, - {file = "safetensors-0.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4e342fd54e66aa9512dd13e410f791e47aa4feeb5f4c9a20882c72f3d272f29"}, - {file = "safetensors-0.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:178fd30b5dc73bce14a39187d948cedd0e5698e2f055b7ea16b5a96c9b17438e"}, - {file = "safetensors-0.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e8fdf7407dba44587ed5e79d5de3533d242648e1f2041760b21474bd5ea5c8c"}, - {file = "safetensors-0.3.3-cp311-cp311-win32.whl", hash = "sha256:7d3b744cee8d7a46ffa68db1a2ff1a1a432488e3f7a5a97856fe69e22139d50c"}, - {file = "safetensors-0.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f579877d30feec9b6ba409d05fa174633a4fc095675a4a82971d831a8bb60b97"}, - {file = "safetensors-0.3.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:2fff5b19a1b462c17322998b2f4b8bce43c16fe208968174d2f3a1446284ceed"}, - {file = "safetensors-0.3.3-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:41adb1d39e8aad04b16879e3e0cbcb849315999fad73bc992091a01e379cb058"}, - {file = "safetensors-0.3.3-cp37-cp37m-macosx_12_0_x86_64.whl", hash = "sha256:0f2b404250b3b877b11d34afcc30d80e7035714a1116a3df56acaca6b6c00096"}, - {file = "safetensors-0.3.3-cp37-cp37m-macosx_13_0_x86_64.whl", hash = "sha256:b43956ef20e9f4f2e648818a9e7b3499edd6b753a0f5526d4f6a6826fbee8446"}, - {file = "safetensors-0.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d61a99b34169981f088ccfbb2c91170843efc869a0a0532f422db7211bf4f474"}, - {file = "safetensors-0.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0008aab36cd20e9a051a68563c6f80d40f238c2611811d7faa5a18bf3fd3984"}, - {file = "safetensors-0.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93d54166072b143084fdcd214a080a088050c1bb1651016b55942701b31334e4"}, - {file = "safetensors-0.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c32ee08f61cea56a5d62bbf94af95df6040c8ab574afffaeb7b44ae5da1e9e3"}, - {file = "safetensors-0.3.3-cp37-cp37m-win32.whl", hash = "sha256:351600f367badd59f7bfe86d317bb768dd8c59c1561c6fac43cafbd9c1af7827"}, - {file = "safetensors-0.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:034717e297849dae1af0a7027a14b8647bd2e272c24106dced64d83e10d468d1"}, - {file = "safetensors-0.3.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8530399666748634bc0b301a6a5523756931b0c2680d188e743d16304afe917a"}, - {file = "safetensors-0.3.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:9d741c1f1621e489ba10aa3d135b54202684f6e205df52e219d5eecd673a80c9"}, - {file = "safetensors-0.3.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:0c345fd85b4d2093a5109596ff4cd9dfc2e84992e881b4857fbc4a93a3b89ddb"}, - {file = "safetensors-0.3.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69ccee8d05f55cdf76f7e6c87d2bdfb648c16778ef8acfd2ecc495e273e9233e"}, - {file = "safetensors-0.3.3-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:c08a9a4b7a4ca389232fa8d097aebc20bbd4f61e477abc7065b5c18b8202dede"}, - {file = "safetensors-0.3.3-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:a002868d2e3f49bbe81bee2655a411c24fa1f8e68b703dec6629cb989d6ae42e"}, - {file = "safetensors-0.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bd2704cb41faa44d3ec23e8b97330346da0395aec87f8eaf9c9e2c086cdbf13"}, - {file = "safetensors-0.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2951bf3f0ad63df5e6a95263652bd6c194a6eb36fd4f2d29421cd63424c883"}, - {file = "safetensors-0.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07114cec116253ca2e7230fdea30acf76828f21614afd596d7b5438a2f719bd8"}, - {file = "safetensors-0.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab43aeeb9eadbb6b460df3568a662e6f1911ecc39387f8752afcb6a7d96c087"}, - {file = "safetensors-0.3.3-cp38-cp38-win32.whl", hash = "sha256:f2f59fce31dd3429daca7269a6b06f65e6547a0c248f5116976c3f1e9b73f251"}, - {file = "safetensors-0.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:c31ca0d8610f57799925bf08616856b39518ab772c65093ef1516762e796fde4"}, - {file = "safetensors-0.3.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:59a596b3225c96d59af412385981f17dd95314e3fffdf359c7e3f5bb97730a19"}, - {file = "safetensors-0.3.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:82a16e92210a6221edd75ab17acdd468dd958ef5023d9c6c1289606cc30d1479"}, - {file = "safetensors-0.3.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:98a929e763a581f516373ef31983ed1257d2d0da912a8e05d5cd12e9e441c93a"}, - {file = "safetensors-0.3.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:12b83f1986cd16ea0454c636c37b11e819d60dd952c26978310a0835133480b7"}, - {file = "safetensors-0.3.3-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:f439175c827c2f1bbd54df42789c5204a10983a30bc4242bc7deaf854a24f3f0"}, - {file = "safetensors-0.3.3-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:0085be33b8cbcb13079b3a8e131656e05b0bc5e6970530d4c24150f7afd76d70"}, - {file = "safetensors-0.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3ec70c87b1e910769034206ad5efc051069b105aac1687f6edcd02526767f4"}, - {file = "safetensors-0.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f490132383e5e490e710608f4acffcb98ed37f91b885c7217d3f9f10aaff9048"}, - {file = "safetensors-0.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79d1b6c7ed5596baf79c80fbce5198c3cdcc521ae6a157699f427aba1a90082d"}, - {file = "safetensors-0.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad3cc8006e7a86ee7c88bd2813ec59cd7cc75b03e6fa4af89b9c7b235b438d68"}, - {file = "safetensors-0.3.3-cp39-cp39-win32.whl", hash = "sha256:ab29f54c6b8c301ca05fa014728996bd83aac6e21528f893aaf8945c71f42b6d"}, - {file = "safetensors-0.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:0fa82004eae1a71e2aa29843ef99de9350e459a0fc2f65fc6ee0da9690933d2d"}, - {file = "safetensors-0.3.3.tar.gz", hash = "sha256:edb7072d788c4f929d0f5735d3a2fb51e5a27f833587828583b7f5747af1a2b8"}, + {file = "safetensors-0.4.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:cba01c6b76e01ec453933b3b3c0157c59b52881c83eaa0f7666244e71aa75fd1"}, + {file = "safetensors-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a8f6f679d97ea0135c7935c202feefbd042c149aa70ee759855e890c01c7814"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc2ce1f5ae5143a7fb72b71fa71db6a42b4f6cf912aa3acdc6b914084778e68"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d87d993eaefe6611a9c241a8bd364a5f1ffed5771c74840363a6c4ed8d868f6"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:097e9af2efa8778cd2f0cba451784253e62fa7cc9fc73c0744d27212f7294e25"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d10a9f7bae608ccfdc009351f01dc3d8535ff57f9488a58a4c38e45bf954fe93"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:270b99885ec14abfd56c1d7f28ada81740a9220b4bae960c3de1c6fe84af9e4d"}, + {file = "safetensors-0.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:285b52a481e7ba93e29ad4ec5841ef2c4479ef0a6c633c4e2629e0508453577b"}, + {file = "safetensors-0.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c3c9f0ca510e0de95abd6424789dcbc879942a3a4e29b0dfa99d9427bf1da75c"}, + {file = "safetensors-0.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:88b4653059c903015284a9722f9a46838c654257173b279c8f6f46dbe80b612d"}, + {file = "safetensors-0.4.1-cp310-none-win32.whl", hash = "sha256:2fe6926110e3d425c4b684a4379b7796fdc26ad7d16922ea1696c8e6ea7e920f"}, + {file = "safetensors-0.4.1-cp310-none-win_amd64.whl", hash = "sha256:a79e16222106b2f5edbca1b8185661477d8971b659a3c814cc6f15181a9b34c8"}, + {file = "safetensors-0.4.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:d93321eea0dd7e81b283e47a1d20dee6069165cc158286316d0d06d340de8fe8"}, + {file = "safetensors-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ff8e41c8037db17de0ea2a23bc684f43eaf623be7d34906fe1ac10985b8365e"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39d36f1d88468a87c437a1bc27c502e71b6ca44c385a9117a9f9ba03a75cc9c6"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ef010e9afcb4057fb6be3d0a0cfa07aac04fe97ef73fe4a23138d8522ba7c17"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b287304f2b2220d51ccb51fd857761e78bcffbeabe7b0238f8dc36f2edfd9542"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e09000b2599e1836314430f81a3884c66a5cbabdff5d9f175b5d560d4de38d78"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c80ce0001efa16066358d2dd77993adc25f5a6c61850e4ad096a2232930bce"}, + {file = "safetensors-0.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:413e1f6ac248f7d1b755199a06635e70c3515493d3b41ba46063dec33aa2ebb7"}, + {file = "safetensors-0.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3ac139377cfe71ba04573f1cda66e663b7c3e95be850e9e6c2dd4b5984bd513"}, + {file = "safetensors-0.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:04157d008385bea66d12fe90844a80d4a76dc25ec5230b5bd9a630496d1b7c03"}, + {file = "safetensors-0.4.1-cp311-none-win32.whl", hash = "sha256:5f25297148ec665f0deb8bd67e9564634d8d6841041ab5393ccfe203379ea88b"}, + {file = "safetensors-0.4.1-cp311-none-win_amd64.whl", hash = "sha256:b2f8877990a72ff595507b80f4b69036a9a1986a641f8681adf3425d97d3d2a5"}, + {file = "safetensors-0.4.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:eb2c1da1cc39509d1a55620a5f4d14f8911c47a89c926a96e6f4876e864375a3"}, + {file = "safetensors-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:303d2c0415cf15a28f8d7f17379ea3c34c2b466119118a34edd9965983a1a8a6"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb4cb3e37a9b961ddd68e873b29fe9ab4a081e3703412e34aedd2b7a8e9cafd9"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae5497adc68669db2fed7cb2dad81e6a6106e79c9a132da3efdb6af1db1014fa"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b30abd0cddfe959d1daedf92edcd1b445521ebf7ddefc20860ed01486b33c90"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d784a98c492c751f228a4a894c3b8a092ff08b24e73b5568938c28b8c0e8f8df"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57a5ab08b0ec7a7caf30d2ac79bb30c89168431aca4f8854464bb9461686925"}, + {file = "safetensors-0.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:edcf3121890b5f0616aa5a54683b1a5d2332037b970e507d6bb7841a3a596556"}, + {file = "safetensors-0.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fdb58dee173ef33634c3016c459d671ca12d11e6acf9db008261cbe58107e579"}, + {file = "safetensors-0.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:780dc21eb3fd32ddd0e8c904bdb0290f2454f4ac21ae71e94f9ce72db1900a5a"}, + {file = "safetensors-0.4.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:48901bd540f8a3c1791314bc5c8a170927bf7f6acddb75bf0a263d081a3637d4"}, + {file = "safetensors-0.4.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3b0b7b2d5976fbed8a05e2bbdce5816a59e6902e9e7c7e07dc723637ed539787"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f69903ff49cb30b9227fb5d029bea276ea20d04b06803877a420c5b1b74c689"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0ddd050e01f3e843aa8c1c27bf68675b8a08e385d0045487af4d70418c3cb356"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a82bc2bd7a9a0e08239bdd6d7774d64121f136add93dfa344a2f1a6d7ef35fa"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ace9e66a40f98a216ad661245782483cf79cf56eb2b112650bb904b0baa9db5"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82cbb8f4d022f2e94498cbefca900698b8ded3d4f85212f47da614001ff06652"}, + {file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:791edc10a3c359a2f5f52d5cddab0df8a45107d91027d86c3d44e57162e5d934"}, + {file = "safetensors-0.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:83c2cfbe8c6304f0891e7bb378d56f66d2148972eeb5f747cd8a2246886f0d8c"}, + {file = "safetensors-0.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:04dd14f53f5500eb4c4149674216ba1000670efbcf4b1b5c2643eb244e7882ea"}, + {file = "safetensors-0.4.1-cp37-none-win32.whl", hash = "sha256:d5b3defa74f3723a388bfde2f5d488742bc4879682bd93267c09a3bcdf8f869b"}, + {file = "safetensors-0.4.1-cp37-none-win_amd64.whl", hash = "sha256:25a043cbb59d4f75e9dd87fdf5c009dd8830105a2c57ace49b72167dd9808111"}, + {file = "safetensors-0.4.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:3f6a520af7f2717c5ecba112041f2c8af1ca6480b97bf957aba81ed9642e654c"}, + {file = "safetensors-0.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3807ac3b16288dffebb3474b555b56fe466baa677dfc16290dcd02dca1ab228"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b58ba13a9e82b4bc3fc221914f6ef237fe6c2adb13cede3ace64d1aacf49610"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dac4bb42f8679aadc59bd91a4c5a1784a758ad49d0912995945cd674089f628e"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911b48dc09e321a194def3a7431662ff4f03646832f3a8915bbf0f449b8a5fcb"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82571d20288c975c1b30b08deb9b1c3550f36b31191e1e81fae87669a92217d0"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da52ee0dc8ba03348ffceab767bd8230842fdf78f8a996e2a16445747143a778"}, + {file = "safetensors-0.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2536b11ce665834201072e9397404170f93f3be10cca9995b909f023a04501ee"}, + {file = "safetensors-0.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:998fbac99ca956c3a09fe07cc0b35fac26a521fa8865a690686d889f0ff4e4a6"}, + {file = "safetensors-0.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:845be0aafabf2a60c2d482d4e93023fecffe5e5443d801d7a7741bae9de41233"}, + {file = "safetensors-0.4.1-cp38-none-win32.whl", hash = "sha256:ce7a28bc8af685a69d7e869d09d3e180a275e3281e29cf5f1c7319e231932cc7"}, + {file = "safetensors-0.4.1-cp38-none-win_amd64.whl", hash = "sha256:e056fb9e22d118cc546107f97dc28b449d88274207dd28872bd668c86216e4f6"}, + {file = "safetensors-0.4.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:bdc0d039e44a727824639824090bd8869535f729878fa248addd3dc01db30eae"}, + {file = "safetensors-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c1b1d510c7aba71504ece87bf393ea82638df56303e371e5e2cf09d18977dd7"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd0afd95c1e497f520e680ea01e0397c0868a3a3030e128438cf6e9e3fcd671"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f603bdd8deac6726d39f41688ed353c532dd53935234405d79e9eb53f152fbfb"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8a85e3e47e0d4eebfaf9a58b40aa94f977a56050cb5598ad5396a9ee7c087c6"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0ccb5aa0f3be2727117e5631200fbb3a5b3a2b3757545a92647d6dd8be6658f"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d784938534e255473155e4d9f276ee69eb85455b6af1292172c731409bf9adee"}, + {file = "safetensors-0.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a257de175c254d39ccd6a21341cd62eb7373b05c1e618a78096a56a857e0c316"}, + {file = "safetensors-0.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6fd80f7794554091836d4d613d33a7d006e2b8d6ba014d06f97cebdfda744f64"}, + {file = "safetensors-0.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:35803201d980efcf964b75a0a2aee97fe5e9ecc5f3ad676b38fafdfe98e0620d"}, + {file = "safetensors-0.4.1-cp39-none-win32.whl", hash = "sha256:7ff8a36e0396776d3ed9a106fc9a9d7c55d4439ca9a056a24bf66d343041d3e6"}, + {file = "safetensors-0.4.1-cp39-none-win_amd64.whl", hash = "sha256:bfa2e20342b81921b98edba52f8deb68843fa9c95250739a56b52ceda5ea5c61"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ae2d5a31cfb8a973a318f7c4d2cffe0bd1fe753cdf7bb41a1939d45a0a06f964"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a45dbf03e8334d3a5dc93687d98b6dc422f5d04c7d519dac09b84a3c87dd7c6"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297b359d91126c0f9d4fd17bae3cfa2fe3a048a6971b8db07db746ad92f850c"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda3d98e2bcece388232cfc551ebf063b55bdb98f65ab54df397da30efc7dcc5"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8934bdfd202ebd0697040a3dff40dd77bc4c5bbf3527ede0532f5e7fb4d970f"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:42c3710cec7e5c764c7999697516370bee39067de0aa089b7e2cfb97ac8c6b20"}, + {file = "safetensors-0.4.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53134226053e56bd56e73f7db42596e7908ed79f3c9a1016e4c1dade593ac8e5"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:257d59e40a1b367cb544122e7451243d65b33c3f34d822a347f4eea6fdf97fdf"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d54c2f1826e790d1eb2d2512bfd0ee443f0206b423d6f27095057c7f18a0687"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645b3f1138fce6e818e79d4128afa28f0657430764cc045419c1d069ff93f732"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9a7ffb1e551c6df51d267f5a751f042b183df22690f6feceac8d27364fd51d7"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:44e230fbbe120de564b64f63ef3a8e6ff02840fa02849d9c443d56252a1646d4"}, + {file = "safetensors-0.4.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:9d16b3b2fcc6fca012c74bd01b5619c655194d3e3c13e4d4d0e446eefa39a463"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5d95ea4d8b32233910734a904123bdd3979c137c461b905a5ed32511defc075f"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:dab431699b5d45e0ca043bc580651ce9583dda594e62e245b7497adb32e99809"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d8bbb7344e39cb9d4762e85c21df94ebeb03edac923dd94bb9ed8c10eac070"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1faf5111c66a6ba91f85dff2e36edaaf36e6966172703159daeef330de4ddc7b"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:660ca1d8bff6c7bc7c6b30b9b32df74ef3ab668f5df42cefd7588f0d40feadcb"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ae2f67f04ed0bb2e56fd380a8bd3eef03f609df53f88b6f5c7e89c08e52aae00"}, + {file = "safetensors-0.4.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8ed5d2c04cdc1afc6b3c28d59580448ac07732c50d94c15e14670f9c473a2ce"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2b6a2814278b6660261aa9a9aae524616de9f1ec364e3716d219b6ed8f91801f"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3cfd1ca35eacc635f0eaa894e5c5ed83ffebd0f95cac298fd430014fa7323631"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4177b456c6b0c722d82429127b5beebdaf07149d265748e97e0a34ff0b3694c8"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:313e8472197bde54e3ec54a62df184c414582979da8f3916981b6a7954910a1b"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fdb4adb76e21bad318210310590de61c9f4adcef77ee49b4a234f9dc48867869"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1d568628e9c43ca15eb96c217da73737c9ccb07520fafd8a1eba3f2750614105"}, + {file = "safetensors-0.4.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:573b6023a55a2f28085fc0a84e196c779b6cbef4d9e73acea14c8094fee7686f"}, + {file = "safetensors-0.4.1.tar.gz", hash = "sha256:2304658e6ada81a5223225b4efe84748e760c46079bffedf7e321763cafb36c9"}, ] [package.extras] -all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] -dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] -jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)"] +all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] +dev = ["safetensors[all]"] +jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] numpy = ["numpy (>=1.21.6)"] -paddlepaddle = ["numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)"] -pinned-tf = ["tensorflow (==2.11.0)"] +paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] +pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] -tensorflow = ["numpy (>=1.21.6)", "tensorflow (>=2.11.0)"] -testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "numpy (>=1.21.6)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)"] -torch = ["numpy (>=1.21.6)", "torch (>=1.10)"] +tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] +testing = ["h5py (>=3.7.0)", "huggingface_hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools_rust (>=1.5.2)"] +torch = ["safetensors[numpy]", "torch (>=1.10)"] [[package]] name = "scikit-learn" -version = "1.3.0" +version = "1.3.2" description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.8" files = [ - {file = "scikit-learn-1.3.0.tar.gz", hash = "sha256:8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a"}, - {file = "scikit_learn-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981287869e576d42c682cf7ca96af0c6ac544ed9316328fd0d9292795c742cf5"}, - {file = "scikit_learn-1.3.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:436aaaae2c916ad16631142488e4c82f4296af2404f480e031d866863425d2a2"}, - {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7e28d8fa47a0b30ae1bd7a079519dd852764e31708a7804da6cb6f8b36e3630"}, - {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae80c08834a473d08a204d966982a62e11c976228d306a2648c575e3ead12111"}, - {file = "scikit_learn-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:552fd1b6ee22900cf1780d7386a554bb96949e9a359999177cf30211e6b20df6"}, - {file = "scikit_learn-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79970a6d759eb00a62266a31e2637d07d2d28446fca8079cf9afa7c07b0427f8"}, - {file = "scikit_learn-1.3.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:850a00b559e636b23901aabbe79b73dc604b4e4248ba9e2d6e72f95063765603"}, - {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee04835fb016e8062ee9fe9074aef9b82e430504e420bff51e3e5fffe72750ca"}, - {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d953531f5d9f00c90c34fa3b7d7cfb43ecff4c605dac9e4255a20b114a27369"}, - {file = "scikit_learn-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:151ac2bf65ccf363664a689b8beafc9e6aae36263db114b4ca06fbbbf827444a"}, - {file = "scikit_learn-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a885a9edc9c0a341cab27ec4f8a6c58b35f3d449c9d2503a6fd23e06bbd4f6a"}, - {file = "scikit_learn-1.3.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:9877af9c6d1b15486e18a94101b742e9d0d2f343d35a634e337411ddb57783f3"}, - {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c470f53cea065ff3d588050955c492793bb50c19a92923490d18fcb637f6383a"}, - {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd6e2d7389542eae01077a1ee0318c4fec20c66c957f45c7aac0c6eb0fe3c612"}, - {file = "scikit_learn-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:3a11936adbc379a6061ea32fa03338d4ca7248d86dd507c81e13af428a5bc1db"}, - {file = "scikit_learn-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:998d38fcec96584deee1e79cd127469b3ad6fefd1ea6c2dfc54e8db367eb396b"}, - {file = "scikit_learn-1.3.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ded35e810438a527e17623ac6deae3b360134345b7c598175ab7741720d7ffa7"}, - {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e8102d5036e28d08ab47166b48c8d5e5810704daecf3a476a4282d562be9a28"}, - {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7617164951c422747e7c32be4afa15d75ad8044f42e7d70d3e2e0429a50e6718"}, - {file = "scikit_learn-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:1d54fb9e6038284548072df22fd34777e434153f7ffac72c8596f2d6987110dd"}, + {file = "scikit-learn-1.3.2.tar.gz", hash = "sha256:a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e326c0eb5cf4d6ba40f93776a20e9a7a69524c4db0757e7ce24ba222471ee8a1"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:535805c2a01ccb40ca4ab7d081d771aea67e535153e35a1fd99418fcedd1648a"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1215e5e58e9880b554b01187b8c9390bf4dc4692eedeaf542d3273f4785e342c"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ee107923a623b9f517754ea2f69ea3b62fc898a3641766cb7deb2f2ce450161"}, + {file = "scikit_learn-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:35a22e8015048c628ad099da9df5ab3004cdbf81edc75b396fd0cff8699ac58c"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6fb6bc98f234fda43163ddbe36df8bcde1d13ee176c6dc9b92bb7d3fc842eb66"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:18424efee518a1cde7b0b53a422cde2f6625197de6af36da0b57ec502f126157"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3271552a5eb16f208a6f7f617b8cc6d1f137b52c8a1ef8edf547db0259b2c9fb"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4144a5004a676d5022b798d9e573b05139e77f271253a4703eed295bde0433"}, + {file = "scikit_learn-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:67f37d708f042a9b8d59551cf94d30431e01374e00dc2645fa186059c6c5d78b"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8db94cd8a2e038b37a80a04df8783e09caac77cbe052146432e67800e430c028"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:61a6efd384258789aa89415a410dcdb39a50e19d3d8410bd29be365bcdd512d5"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb06f8dce3f5ddc5dee1715a9b9f19f20d295bed8e3cd4fa51e1d050347de525"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b2de18d86f630d68fe1f87af690d451388bb186480afc719e5f770590c2ef6c"}, + {file = "scikit_learn-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:0402638c9a7c219ee52c94cbebc8fcb5eb9fe9c773717965c1f4185588ad3107"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a19f90f95ba93c1a7f7924906d0576a84da7f3b2282ac3bfb7a08a32801add93"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b8692e395a03a60cd927125eef3a8e3424d86dde9b2370d544f0ea35f78a8073"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e1e94cc23d04d39da797ee34236ce2375ddea158b10bee3c343647d615581d"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:785a2213086b7b1abf037aeadbbd6d67159feb3e30263434139c98425e3dcfcf"}, + {file = "scikit_learn-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:64381066f8aa63c2710e6b56edc9f0894cc7bf59bd71b8ce5613a4559b6145e0"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c43290337f7a4b969d207e620658372ba3c1ffb611f8bc2b6f031dc5c6d1d03"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dc9002fc200bed597d5d34e90c752b74df516d592db162f756cc52836b38fe0e"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d08ada33e955c54355d909b9c06a4789a729977f165b8bae6f225ff0a60ec4a"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f0ae4b79b0ff9cca0bf3716bcc9915bdacff3cebea15ec79652d1cc4fa5c9"}, + {file = "scikit_learn-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ed932ea780517b00dae7431e031faae6b49b20eb6950918eb83bd043237950e0"}, ] [package.dependencies] joblib = ">=1.1.1" -numpy = ">=1.17.3" +numpy = ">=1.17.3,<2.0" scipy = ">=1.5.0" threadpoolctl = ">=2.0.0" @@ -4493,19 +4880,19 @@ files = [ [[package]] name = "setuptools" -version = "68.1.2" +version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, - {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -4552,13 +4939,13 @@ tests = ["Django", "birdseye", "littleutils", "numpy (>=1.16.5)", "pandas (>=0.2 [[package]] name = "stack-data" -version = "0.6.2" +version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" files = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, ] [package.dependencies] @@ -4571,13 +4958,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "starlette" -version = "0.27.0" +version = "0.35.1" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, - {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, + {file = "starlette-0.35.1-py3-none-any.whl", hash = "sha256:50bbbda9baa098e361f398fda0928062abbaf1f54f4fadcbe17c092a01eb9a25"}, + {file = "starlette-0.35.1.tar.gz", hash = "sha256:3e2639dac3520e4f58734ed22553f950d3f3cb1001cd2eaac4d57e8cdc5f66bc"}, ] [package.dependencies] @@ -4614,113 +5001,113 @@ files = [ [[package]] name = "tokenizers" -version = "0.14.0" +version = "0.15.0" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.14.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1a90e1030d9c61de64045206c62721a36f892dcfc5bbbc119dfcd417c1ca60ca"}, - {file = "tokenizers-0.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7cacc5a33767bb2a03b6090eac556c301a1d961ac2949be13977bc3f20cc4e3c"}, - {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:81994795e1b4f868a6e73107af8cdf088d31357bae6f7abf26c42874eab16f43"}, - {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ec53f832bfa91abafecbf92b4259b466fb31438ab31e8291ade0fcf07de8fc2"}, - {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:854aa813a55d6031a6399b1bca09e4e7a79a80ec05faeea77fc6809d59deb3d5"}, - {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c34d2f02e25e0fa96e574cadb43a6f14bdefc77f84950991da6e3732489e164"}, - {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f17d5ad725c827d3dc7db2bbe58093a33db2de49bbb639556a6d88d82f0ca19"}, - {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:337a7b7d6b32c6f904faee4304987cb018d1488c88b91aa635760999f5631013"}, - {file = "tokenizers-0.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:98a7ceb767e1079ef2c99f52a4e7b816f2e682b2b6fef02c8eff5000536e54e1"}, - {file = "tokenizers-0.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25ad4a0f883a311a5b021ed979e21559cb4184242c7446cd36e07d046d1ed4be"}, - {file = "tokenizers-0.14.0-cp310-none-win32.whl", hash = "sha256:360706b0c2c6ba10e5e26b7eeb7aef106dbfc0a81ad5ad599a892449b4973b10"}, - {file = "tokenizers-0.14.0-cp310-none-win_amd64.whl", hash = "sha256:1c2ce437982717a5e221efa3c546e636f12f325cc3d9d407c91d2905c56593d0"}, - {file = "tokenizers-0.14.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:612d0ba4f40f4d41163af9613dac59c902d017dc4166ea4537a476af807d41c3"}, - {file = "tokenizers-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3013ad0cff561d9be9ce2cc92b76aa746b4e974f20e5b4158c03860a4c8ffe0f"}, - {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c89a0d6d2ec393a6261df71063b1e22bdd7c6ef3d77b8826541b596132bcf524"}, - {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5514417f37fc2ca8159b27853cd992a9a4982e6c51f04bd3ac3f65f68a8fa781"}, - {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e761fd1af8409c607b11f084dc7cc50f80f08bd426d4f01d1c353b097d2640f"}, - {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c16fbcd5ef10df9e51cc84238cdb05ee37e4228aaff39c01aa12b0a0409e29b8"}, - {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3439d9f858dd9033b69769be5a56eb4fb79fde13fad14fab01edbf2b98033ad9"}, - {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c19f8cdc3e84090464a6e28757f60461388cc8cd41c02c109e180a6b7c571f6"}, - {file = "tokenizers-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df763ce657a297eb73008d5907243a7558a45ae0930b38ebcb575a24f8296520"}, - {file = "tokenizers-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:095b0b6683a9b76002aa94659f75c09e4359cb291b318d6e77a60965d7a7f138"}, - {file = "tokenizers-0.14.0-cp311-none-win32.whl", hash = "sha256:712ec0e68a399ded8e115e7e25e7017802fa25ee6c36b4eaad88481e50d0c638"}, - {file = "tokenizers-0.14.0-cp311-none-win_amd64.whl", hash = "sha256:917aa6d6615b33d9aa811dcdfb3109e28ff242fbe2cb89ea0b7d3613e444a672"}, - {file = "tokenizers-0.14.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8464ee7d43ecd9dd1723f51652f49b979052ea3bcd25329e3df44e950c8444d1"}, - {file = "tokenizers-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:84c2b96469b34825557c6fe0bc3154c98d15be58c416a9036ca90afdc9979229"}, - {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:24b3ccec65ee6f876cd67251c1dcfa1c318c9beec5a438b134f7e33b667a8b36"}, - {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde333fc56dd5fbbdf2de3067d6c0c129867d33eac81d0ba9b65752ad6ef4208"}, - {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddcc2f251bd8a2b2f9a7763ad4468a34cfc4ee3b0fba3cfb34d12c964950cac"}, - {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10a34eb1416dcec3c6f9afea459acd18fcc93234687de605a768a987eda589ab"}, - {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:56bc7252530a6a20c6eed19b029914bb9cc781efbe943ca9530856051de99d0f"}, - {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07f5c2324326a00c85111081d5eae4da9d64d56abb5883389b3c98bee0b50a7c"}, - {file = "tokenizers-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5efd92e44e43f36332b5f3653743dca5a0b72cdabb012f20023e220f01f675cb"}, - {file = "tokenizers-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9223bcb77a826dbc9fd0efa6bce679a96b1a01005142778bb42ce967581c5951"}, - {file = "tokenizers-0.14.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:e2c1b4707344d3fbfce35d76802c2429ca54e30a5ecb05b3502c1e546039a3bb"}, - {file = "tokenizers-0.14.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:5892ba10fe0a477bde80b9f06bce05cb9d83c15a4676dcae5cbe6510f4524bfc"}, - {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0e1818f33ac901d5d63830cb6a69a707819f4d958ae5ecb955d8a5ad823a2e44"}, - {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06a6fe406df1e616f9e649522683411c6c345ddaaaad7e50bbb60a2cb27e04d"}, - {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6e2d4bc223dc6a99efbe9266242f1ac03eb0bef0104e6cef9f9512dd5c816b"}, - {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08ea1f612796e438c9a7e2ad86ab3c1c05c8fe0fad32fcab152c69a3a1a90a86"}, - {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab1a58c05a3bd8ece95eb5d1bc909b3fb11acbd3ff514e3cbd1669e3ed28f5b"}, - {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:495dc7d3b78815de79dafe7abce048a76154dadb0ffc7f09b7247738557e5cef"}, - {file = "tokenizers-0.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aaa0401a245d891b3b2ba9cf027dc65ca07627e11fe3ce597644add7d07064f8"}, - {file = "tokenizers-0.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae4fa13a786fd0d6549da241c6a1077f9b6320a7120d922ccc201ad1d4feea8f"}, - {file = "tokenizers-0.14.0-cp37-none-win32.whl", hash = "sha256:ae0d5b5ab6032c24a2e74cc15f65b6510070926671129e922aa3826c834558d7"}, - {file = "tokenizers-0.14.0-cp37-none-win_amd64.whl", hash = "sha256:2839369a9eb948905612f5d8e70453267d9c7bf17573e5ab49c2f28368fd635d"}, - {file = "tokenizers-0.14.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:f483af09a07fcb8b8b4cd07ac1be9f58bb739704ef9156e955531299ab17ec75"}, - {file = "tokenizers-0.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9c2ec661d0d63e618cb145ad15ddb6a81e16d9deb7a203f385d78141da028984"}, - {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:97e87eb7cbeff63c3b1aa770fdcf18ea4f1c852bfb75d0c913e71b8924a99d61"}, - {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98c4bd09b47f77f41785488971543de63db82608f0dc0bc6646c876b5ca44d1f"}, - {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cbeb5406be31f7605d032bb261f2e728da8ac1f4f196c003bc640279ceb0f52"}, - {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe799fa48fd7dd549a68abb7bee32dd3721f50210ad2e3e55058080158c72c25"}, - {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:66daf7c6375a95970e86cb3febc48becfeec4e38b2e0195218d348d3bb86593b"}, - {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b177422af79a77c46bb8f56d73827e688fdc092878cff54e24f5c07a908db"}, - {file = "tokenizers-0.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9aef7a5622648b70f979e96cbc2f795eba5b28987dd62f4dbf8f1eac6d64a1a"}, - {file = "tokenizers-0.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:397a24feff284d39b40fdd61c1c828bb6648dfe97b6766c84fbaf7256e272d09"}, - {file = "tokenizers-0.14.0-cp38-none-win32.whl", hash = "sha256:93cc2ec19b6ff6149b2e5127ceda3117cc187dd38556a1ed93baba13dffda069"}, - {file = "tokenizers-0.14.0-cp38-none-win_amd64.whl", hash = "sha256:bf7f540ab8a6fc53fb762963edb7539b11f00af8f70b206f0a6d1a25109ad307"}, - {file = "tokenizers-0.14.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a58d0b34586f4c5229de5aa124cf76b9455f2e01dc5bd6ed018f6e3bb12572d3"}, - {file = "tokenizers-0.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:90ceca6a06bb4b0048d0a51d0d47ef250d3cb37cc36b6b43334be8c02ac18b0f"}, - {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5f6c9554bda64799b1d65052d834553bff9a6ef4a6c2114668e2ed8f1871a2a3"}, - {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ee14b41024bc05ea172fc2c87f66b60d7c5c636c3a52a09a25ec18e752e6dc7"}, - {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:879201b1c76b24dc70ce02fc42c3eeb7ff20c353ce0ee638be6449f7c80e73ba"}, - {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca79ea6ddde5bb32f7ad1c51de1032829c531e76bbcae58fb3ed105a31faf021"}, - {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd5934048e60aedddf6c5b076d44ccb388702e1650e2eb7b325a1682d883fbf9"}, - {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1566cabd4bf8f09d6c1fa7a3380a181801a495e7218289dbbd0929de471711"}, - {file = "tokenizers-0.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a8fc72a7adc6fa12db38100c403d659bc01fbf6e57f2cc9219e75c4eb0ea313c"}, - {file = "tokenizers-0.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7fd08ed6c14aa285482d9e5f48c04de52bdbcecaca0d30465d7a36bbea6b14df"}, - {file = "tokenizers-0.14.0-cp39-none-win32.whl", hash = "sha256:3279c0c1d5fdea7d3499c582fed392fb0463d1046544ca010f53aeee5d2ce12c"}, - {file = "tokenizers-0.14.0-cp39-none-win_amd64.whl", hash = "sha256:203ca081d25eb6e4bc72ea04d552e457079c5c6a3713715ece246f6ca02ca8d0"}, - {file = "tokenizers-0.14.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:b45704d5175499387e33a1dd5c8d49ab4d7ef3c36a9ba8a410bb3e68d10f80a0"}, - {file = "tokenizers-0.14.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6d17d5eb38ccc2f615a7a3692dfa285abe22a1e6d73bbfd753599e34ceee511c"}, - {file = "tokenizers-0.14.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a7e6e7989ba77a20c33f7a8a45e0f5b3e7530b2deddad2c3b2a58b323156134"}, - {file = "tokenizers-0.14.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81876cefea043963abf6c92e0cf73ce6ee10bdc43245b6565ce82c0305c2e613"}, - {file = "tokenizers-0.14.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d8cd05f73d1ce875a23bfdb3a572417c0f46927c6070ca43a7f6f044c3d6605"}, - {file = "tokenizers-0.14.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:419a38b89be0081d872eac09449c03cd6589c2ee47461184592ee4b1ad93af1d"}, - {file = "tokenizers-0.14.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4caf274a9ba944eb83bc695beef95abe24ce112907fb06217875894d8a4f62b8"}, - {file = "tokenizers-0.14.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:6ecb3a7741d7ebf65db93d246b102efca112860707e07233f1b88703cb01dbc5"}, - {file = "tokenizers-0.14.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cb7fe9a383cb2932848e459d0277a681d58ad31aa6ccda204468a8d130a9105c"}, - {file = "tokenizers-0.14.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4731e0577780d85788ab4f00d54e16e76fe305739396e6fb4c54b89e6fa12de"}, - {file = "tokenizers-0.14.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9900291ccd19417128e328a26672390365dab1d230cd00ee7a5e2a0319e2716"}, - {file = "tokenizers-0.14.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:493e6932fbca6875fd2e51958f1108ce4c5ae41aa6f2b8017c5f07beaff0a1ac"}, - {file = "tokenizers-0.14.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1792e6b46b89aba0d501c0497f38c96e5b54735379fd8a07a28f45736ba51bb1"}, - {file = "tokenizers-0.14.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0af26d37c7080688ef606679f3a3d44b63b881de9fa00cc45adc240ba443fd85"}, - {file = "tokenizers-0.14.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:99379ec4d7023c07baed85c68983bfad35fd210dfbc256eaafeb842df7f888e3"}, - {file = "tokenizers-0.14.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:84118aa60dcbb2686730342a0cb37e54e02fde001f936557223d46b6cd8112cd"}, - {file = "tokenizers-0.14.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d616e1859ffcc8fcda60f556c34338b96fb72ca642f6dafc3b1d2aa1812fb4dd"}, - {file = "tokenizers-0.14.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7826b79bbbffc2150bf8d621297cc600d8a1ea53992547c4fd39630de10466b4"}, - {file = "tokenizers-0.14.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eb3931d734f1e66b77c2a8e22ebe0c196f127c7a0f48bf9601720a6f85917926"}, - {file = "tokenizers-0.14.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6a475b5cafc7a740bf33d00334b1f2b434b6124198384d8b511931a891be39ff"}, - {file = "tokenizers-0.14.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3d3c9e286ae00b0308903d2ef7b31efc84358109aa41abaa27bd715401c3fef4"}, - {file = "tokenizers-0.14.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:27244e96810434cf705f317e9b74a1163cd2be20bdbd3ed6b96dae1914a6778c"}, - {file = "tokenizers-0.14.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ca9b0536fd5f03f62427230e85d9d57f9eed644ab74c319ae4877c9144356aed"}, - {file = "tokenizers-0.14.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f64cdff8c0454295b739d77e25cff7264fa9822296395e60cbfecc7f66d88fb"}, - {file = "tokenizers-0.14.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00cdfb40544656b7a3b176049d63227d5e53cf2574912514ebb4b9da976aaa1"}, - {file = "tokenizers-0.14.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b611d96b96957cb2f39560c77cc35d2fcb28c13d5b7d741412e0edfdb6f670a8"}, - {file = "tokenizers-0.14.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:27ad1c02fdd74dcf3502fafb87393412e65f698f2e3aba4ad568a1f3b43d5c9f"}, - {file = "tokenizers-0.14.0.tar.gz", hash = "sha256:a06efa1f19dcc0e9bd0f4ffbf963cb0217af92a9694f68fe7eee5e1c6ddc4bde"}, + {file = "tokenizers-0.15.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:cd3cd0299aaa312cd2988957598f80becd04d5a07338741eca076057a2b37d6e"}, + {file = "tokenizers-0.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a922c492c721744ee175f15b91704be2d305569d25f0547c77cd6c9f210f9dc"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:331dd786d02fc38698f835fff61c99480f98b73ce75a4c65bd110c9af5e4609a"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88dd0961c437d413ab027f8b115350c121d49902cfbadf08bb8f634b15fa1814"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6fdcc55339df7761cd52e1fbe8185d3b3963bc9e3f3545faa6c84f9e8818259a"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1480b0051d8ab5408e8e4db2dc832f7082ea24aa0722c427bde2418c6f3bd07"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9855e6c258918f9cf62792d4f6ddfa6c56dccd8c8118640f867f6393ecaf8bd7"}, + {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9529fe75efcd54ba8d516aa725e1851df9199f0669b665c55e90df08f5af86"}, + {file = "tokenizers-0.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8edcc90a36eab0705fe9121d6c77c6e42eeef25c7399864fd57dfb27173060bf"}, + {file = "tokenizers-0.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae17884aafb3e94f34fb7cfedc29054f5f54e142475ebf8a265a4e388fee3f8b"}, + {file = "tokenizers-0.15.0-cp310-none-win32.whl", hash = "sha256:9a3241acdc9b44cff6e95c4a55b9be943ef3658f8edb3686034d353734adba05"}, + {file = "tokenizers-0.15.0-cp310-none-win_amd64.whl", hash = "sha256:4b31807cb393d6ea31926b307911c89a1209d5e27629aa79553d1599c8ffdefe"}, + {file = "tokenizers-0.15.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:af7e9be8c05d30bb137b9fd20f9d99354816599e5fd3d58a4b1e28ba3b36171f"}, + {file = "tokenizers-0.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c3d7343fa562ea29661783344a2d83662db0d3d17a6fa6a403cac8e512d2d9fd"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:32371008788aeeb0309a9244809a23e4c0259625e6b74a103700f6421373f395"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9db64c7c9954fbae698884c5bb089764edc549731e5f9b7fa1dd4e4d78d77f"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbed5944c31195514669cf6381a0d8d47f164943000d10f93d6d02f0d45c25e0"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aab16c4a26d351d63e965b0c792f5da7227a37b69a6dc6d922ff70aa595b1b0c"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c2b60b12fdd310bf85ce5d7d3f823456b9b65eed30f5438dd7761879c495983"}, + {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0344d6602740e44054a9e5bbe9775a5e149c4dddaff15959bb07dcce95a5a859"}, + {file = "tokenizers-0.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4525f6997d81d9b6d9140088f4f5131f6627e4c960c2c87d0695ae7304233fc3"}, + {file = "tokenizers-0.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:65975094fef8cc68919644936764efd2ce98cf1bacbe8db2687155d2b0625bee"}, + {file = "tokenizers-0.15.0-cp311-none-win32.whl", hash = "sha256:ff5d2159c5d93015f5a4542aac6c315506df31853123aa39042672031768c301"}, + {file = "tokenizers-0.15.0-cp311-none-win_amd64.whl", hash = "sha256:2dd681b53cf615e60a31a115a3fda3980e543d25ca183797f797a6c3600788a3"}, + {file = "tokenizers-0.15.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:c9cce6ee149a3d703f86877bc2a6d997e34874b2d5a2d7839e36b2273f31d3d9"}, + {file = "tokenizers-0.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a0a94bc3370e6f1cc8a07a8ae867ce13b7c1b4291432a773931a61f256d44ea"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:309cfcccfc7e502cb1f1de2c9c1c94680082a65bfd3a912d5a5b2c90c677eb60"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8413e994dd7d875ab13009127fc85633916c71213917daf64962bafd488f15dc"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0ebf9430f901dbdc3dcb06b493ff24a3644c9f88c08e6a1d6d0ae2228b9b818"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10361e9c7864b22dd791ec5126327f6c9292fb1d23481d4895780688d5e298ac"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:babe42635b8a604c594bdc56d205755f73414fce17ba8479d142a963a6c25cbc"}, + {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3768829861e964c7a4556f5f23307fce6a23872c2ebf030eb9822dbbbf7e9b2a"}, + {file = "tokenizers-0.15.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9c91588a630adc88065e1c03ac6831e3e2112558869b9ebcb2b8afd8a14c944d"}, + {file = "tokenizers-0.15.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:77606994e793ca54ecf3a3619adc8a906a28ca223d9354b38df41cb8766a0ed6"}, + {file = "tokenizers-0.15.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:6fe143939f3b596681922b2df12a591a5b010e7dcfbee2202482cd0c1c2f2459"}, + {file = "tokenizers-0.15.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:b7bee0f1795e3e3561e9a557061b1539e5255b8221e3f928f58100282407e090"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5d37e7f4439b4c46192ab4f2ff38ab815e4420f153caa13dec9272ef14403d34"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caadf255cf7f951b38d10097836d1f3bcff4aeaaffadfdf748bab780bf5bff95"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05accb9162bf711a941b1460b743d62fec61c160daf25e53c5eea52c74d77814"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26a2ef890740127cb115ee5260878f4a677e36a12831795fd7e85887c53b430b"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e54c5f26df14913620046b33e822cb3bcd091a332a55230c0e63cc77135e2169"}, + {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669b8ed653a578bcff919566631156f5da3aab84c66f3c0b11a6281e8b4731c7"}, + {file = "tokenizers-0.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0ea480d943297df26f06f508dab6e012b07f42bf3dffdd36e70799368a5f5229"}, + {file = "tokenizers-0.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc80a0a565ebfc7cd89de7dd581da8c2b3238addfca6280572d27d763f135f2f"}, + {file = "tokenizers-0.15.0-cp37-none-win32.whl", hash = "sha256:cdd945e678bbdf4517d5d8de66578a5030aeefecdb46f5320b034de9cad8d4dd"}, + {file = "tokenizers-0.15.0-cp37-none-win_amd64.whl", hash = "sha256:1ab96ab7dc706e002c32b2ea211a94c1c04b4f4de48354728c3a6e22401af322"}, + {file = "tokenizers-0.15.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:f21c9eb71c9a671e2a42f18b456a3d118e50c7f0fc4dd9fa8f4eb727fea529bf"}, + {file = "tokenizers-0.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a5f4543a35889679fc3052086e69e81880b2a5a28ff2a52c5a604be94b77a3f"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f8aa81afec893e952bd39692b2d9ef60575ed8c86fce1fd876a06d2e73e82dca"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1574a5a4af22c3def93fe8fe4adcc90a39bf5797ed01686a4c46d1c3bc677d2f"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c7982fd0ec9e9122d03b209dac48cebfea3de0479335100ef379a9a959b9a5a"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d16b647032df2ce2c1f9097236e046ea9fedd969b25637b9d5d734d78aa53b"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b3cdf29e6f9653da330515dc8fa414be5a93aae79e57f8acc50d4028dd843edf"}, + {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7286f3df10de840867372e3e64b99ef58c677210e3ceb653cd0e740a5c53fe78"}, + {file = "tokenizers-0.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aabc83028baa5a36ce7a94e7659250f0309c47fa4a639e5c2c38e6d5ea0de564"}, + {file = "tokenizers-0.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:72f78b0e0e276b1fc14a672fa73f3acca034ba8db4e782124a2996734a9ba9cf"}, + {file = "tokenizers-0.15.0-cp38-none-win32.whl", hash = "sha256:9680b0ecc26e7e42f16680c1aa62e924d58d1c2dd992707081cc10a374896ea2"}, + {file = "tokenizers-0.15.0-cp38-none-win_amd64.whl", hash = "sha256:f17cbd88dab695911cbdd385a5a7e3709cc61dff982351f5d1b5939f074a2466"}, + {file = "tokenizers-0.15.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:3661862df7382c5eb23ac4fbf7c75e69b02dc4f5784e4c5a734db406b5b24596"}, + {file = "tokenizers-0.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3045d191dad49647f5a5039738ecf1c77087945c7a295f7bcf051c37067e883"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9fcaad9ab0801f14457d7c820d9f246b5ab590c407fc6b073819b1573097aa7"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79f17027f24fe9485701c8dbb269b9c713954ec3bdc1e7075a66086c0c0cd3c"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:01a3aa332abc4bee7640563949fcfedca4de8f52691b3b70f2fc6ca71bfc0f4e"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05b83896a893cdfedad8785250daa3ba9f0504848323471524d4783d7291661e"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbbf2489fcf25d809731ba2744ff278dd07d9eb3f8b7482726bd6cae607073a4"}, + {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab806ad521a5e9de38078b7add97589c313915f6f5fec6b2f9f289d14d607bd6"}, + {file = "tokenizers-0.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a522612d5c88a41563e3463226af64e2fa00629f65cdcc501d1995dd25d23f5"}, + {file = "tokenizers-0.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e58a38c4e6075810bdfb861d9c005236a72a152ebc7005941cc90d1bbf16aca9"}, + {file = "tokenizers-0.15.0-cp39-none-win32.whl", hash = "sha256:b8034f1041fd2bd2b84ff9f4dc4ae2e1c3b71606820a9cd5c562ebd291a396d1"}, + {file = "tokenizers-0.15.0-cp39-none-win_amd64.whl", hash = "sha256:edde9aa964145d528d0e0dbf14f244b8a85ebf276fb76869bc02e2530fa37a96"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:309445d10d442b7521b98083dc9f0b5df14eca69dbbfebeb98d781ee2cef5d30"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d3125a6499226d4d48efc54f7498886b94c418e93a205b673bc59364eecf0804"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ed56ddf0d54877bb9c6d885177db79b41576e61b5ef6defeb579dcb803c04ad5"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b22cd714706cc5b18992a232b023f736e539495f5cc61d2d28d176e55046f6c"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2719b1e9bc8e8e7f6599b99d0a8e24f33d023eb8ef644c0366a596f0aa926"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85ddae17570ec7e5bfaf51ffa78d044f444a8693e1316e1087ee6150596897ee"}, + {file = "tokenizers-0.15.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76f1bed992e396bf6f83e3df97b64ff47885e45e8365f8983afed8556a0bc51f"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3bb0f4df6dce41a1c7482087b60d18c372ef4463cb99aa8195100fcd41e0fd64"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:22c27672c27a059a5f39ff4e49feed8c7f2e1525577c8a7e3978bd428eb5869d"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78104f5d035c9991f92831fc0efe9e64a05d4032194f2a69f67aaa05a4d75bbb"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a40b73dc19d82c3e3ffb40abdaacca8fbc95eeb26c66b7f9f860aebc07a73998"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d801d1368188c74552cd779b1286e67cb9fd96f4c57a9f9a2a09b6def9e1ab37"}, + {file = "tokenizers-0.15.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82641ffb13a4da1293fcc9f437d457647e60ed0385a9216cd135953778b3f0a1"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:160f9d1810f2c18fffa94aa98bf17632f6bd2dabc67fcb01a698ca80c37d52ee"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d7d6eea831ed435fdeeb9bcd26476226401d7309d115a710c65da4088841948"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f6456bec6c557d63d8ec0023758c32f589e1889ed03c055702e84ce275488bed"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eef39a502fad3bf104b9e1906b4fb0cee20e44e755e51df9a98f8922c3bf6d4"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1e4664c5b797e093c19b794bbecc19d2367e782b4a577d8b7c1821db5dc150d"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ca003fb5f3995ff5cf676db6681b8ea5d54d3b30bea36af1120e78ee1a4a4cdf"}, + {file = "tokenizers-0.15.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7f17363141eb0c53752c89e10650b85ef059a52765d0802ba9613dbd2d21d425"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:8a765db05581c7d7e1280170f2888cda351760d196cc059c37ea96f121125799"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2a0dd641a72604486cd7302dd8f87a12c8a9b45e1755e47d2682733f097c1af5"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a1a3c973e4dc97797fc19e9f11546c95278ffc55c4492acb742f69e035490bc"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4fab75642aae4e604e729d6f78e0addb9d7e7d49e28c8f4d16b24da278e5263"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65f80be77f6327a86d8fd35a4467adcfe6174c159b4ab52a1a8dd4c6f2d7d9e1"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8da7533dbe66b88afd430c56a2f2ce1fd82e2681868f857da38eeb3191d7498"}, + {file = "tokenizers-0.15.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa8eb4584fc6cbe6a84d7a7864be3ed28e23e9fd2146aa8ef1814d579df91958"}, + {file = "tokenizers-0.15.0.tar.gz", hash = "sha256:10c7e6e7b4cabd757da59e93f5f8d1126291d16f8b54f28510825ef56a3e5d0e"}, ] [package.dependencies] -huggingface_hub = ">=0.16.4,<0.17" +huggingface_hub = ">=0.16.4,<1.0" [package.extras] dev = ["tokenizers[testing]"] @@ -4740,115 +5127,123 @@ files = [ [[package]] name = "torch" -version = "2.0.0" +version = "2.2.0" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = false python-versions = ">=3.8.0" files = [ - {file = "torch-2.0.0-1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c9090bda7d2eeeecd74f51b721420dbeb44f838d4536cc1b284e879417e3064a"}, - {file = "torch-2.0.0-1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:bd42db2a48a20574d2c33489e120e9f32789c4dc13c514b0c44272972d14a2d7"}, - {file = "torch-2.0.0-1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8969aa8375bcbc0c2993e7ede0a7f889df9515f18b9b548433f412affed478d9"}, - {file = "torch-2.0.0-1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ab2da16567cb55b67ae39e32d520d68ec736191d88ac79526ca5874754c32203"}, - {file = "torch-2.0.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:7a9319a67294ef02459a19738bbfa8727bb5307b822dadd708bc2ccf6c901aca"}, - {file = "torch-2.0.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9f01fe1f6263f31bd04e1757946fd63ad531ae37f28bb2dbf66f5c826ee089f4"}, - {file = "torch-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:527f4ae68df7b8301ee6b1158ca56350282ea633686537b30dbb5d7b4a52622a"}, - {file = "torch-2.0.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:ce9b5a49bd513dff7950a5a07d6e26594dd51989cee05ba388b03e8e366fd5d5"}, - {file = "torch-2.0.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:53e1c33c6896583cdb9a583693e22e99266444c4a43392dddc562640d39e542b"}, - {file = "torch-2.0.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:09651bff72e439d004c991f15add0c397c66f98ab36fe60d5514b44e4da722e8"}, - {file = "torch-2.0.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d439aec349c98f12819e8564b8c54008e4613dd4428582af0e6e14c24ca85870"}, - {file = "torch-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2802f84f021907deee7e9470ed10c0e78af7457ac9a08a6cd7d55adef835fede"}, - {file = "torch-2.0.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:01858620f25f25e7a9ec4b547ff38e5e27c92d38ec4ccba9cfbfb31d7071ed9c"}, - {file = "torch-2.0.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:9a2e53b5783ef5896a6af338b36d782f28e83c8ddfc2ac44b67b066d9d76f498"}, - {file = "torch-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ec5fff2447663e369682838ff0f82187b4d846057ef4d119a8dea7772a0b17dd"}, - {file = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11b0384fe3c18c01b8fc5992e70fc519cde65e44c51cc87be1838c1803daf42f"}, - {file = "torch-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:e54846aa63855298cfb1195487f032e413e7ac9cbfa978fda32354cc39551475"}, - {file = "torch-2.0.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:cc788cbbbbc6eb4c90e52c550efd067586c2693092cf367c135b34893a64ae78"}, - {file = "torch-2.0.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:d292640f0fd72b7a31b2a6e3b635eb5065fcbedd4478f9cad1a1e7a9ec861d35"}, - {file = "torch-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6befaad784004b7af357e3d87fa0863c1f642866291f12a4c2af2de435e8ac5c"}, - {file = "torch-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a83b26bd6ae36fbf5fee3d56973d9816e2002e8a3b7d9205531167c28aaa38a7"}, - {file = "torch-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c7e67195e1c3e33da53954b026e89a8e1ff3bc1aeb9eb32b677172d4a9b5dcbf"}, - {file = "torch-2.0.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6e0b97beb037a165669c312591f242382e9109a240e20054d5a5782d9236cad0"}, - {file = "torch-2.0.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:297a4919aff1c0f98a58ebe969200f71350a1d4d4f986dbfd60c02ffce780e99"}, + {file = "torch-2.2.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:d366158d6503a3447e67f8c0ad1328d54e6c181d88572d688a625fac61b13a97"}, + {file = "torch-2.2.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:707f2f80402981e9f90d0038d7d481678586251e6642a7a6ef67fc93511cb446"}, + {file = "torch-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:15c8f0a105c66b28496092fca1520346082e734095f8eaf47b5786bac24b8a31"}, + {file = "torch-2.2.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:0ca4df4b728515ad009b79f5107b00bcb2c63dc202d991412b9eb3b6a4f24349"}, + {file = "torch-2.2.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:3d3eea2d5969b9a1c9401429ca79efc668120314d443d3463edc3289d7f003c7"}, + {file = "torch-2.2.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0d1c580e379c0d48f0f0a08ea28d8e373295aa254de4f9ad0631f9ed8bc04c24"}, + {file = "torch-2.2.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9328e3c1ce628a281d2707526b4d1080eae7c4afab4f81cea75bde1f9441dc78"}, + {file = "torch-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:03c8e660907ac1b8ee07f6d929c4e15cd95be2fb764368799cca02c725a212b8"}, + {file = "torch-2.2.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:da0cefe7f84ece3e3b56c11c773b59d1cb2c0fd83ddf6b5f7f1fd1a987b15c3e"}, + {file = "torch-2.2.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:f81d23227034221a4a4ff8ef24cc6cec7901edd98d9e64e32822778ff01be85e"}, + {file = "torch-2.2.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:dcbfb2192ac41ca93c756ebe9e2af29df0a4c14ee0e7a0dd78f82c67a63d91d4"}, + {file = "torch-2.2.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:9eeb42971619e24392c9088b5b6d387d896e267889d41d267b1fec334f5227c5"}, + {file = "torch-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:c718b2ca69a6cac28baa36d86d8c0ec708b102cebd1ceb1b6488e404cd9be1d1"}, + {file = "torch-2.2.0-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:f11d18fceb4f9ecb1ac680dde7c463c120ed29056225d75469c19637e9f98d12"}, + {file = "torch-2.2.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:ee1da852bfd4a7e674135a446d6074c2da7194c1b08549e31eae0b3138c6b4d2"}, + {file = "torch-2.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0d819399819d0862268ac531cf12a501c253007df4f9e6709ede8a0148f1a7b8"}, + {file = "torch-2.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:08f53ccc38c49d839bc703ea1b20769cc8a429e0c4b20b56921a9f64949bf325"}, + {file = "torch-2.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:93bffe3779965a71dab25fc29787538c37c5d54298fd2f2369e372b6fb137d41"}, + {file = "torch-2.2.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:c17ec323da778efe8dad49d8fb534381479ca37af1bfc58efdbb8607a9d263a3"}, + {file = "torch-2.2.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c02685118008834e878f676f81eab3a952b7936fa31f474ef8a5ff4b5c78b36d"}, + {file = "torch-2.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d9f39d6f53cec240a0e3baa82cb697593340f9d4554cee6d3d6ca07925c2fac0"}, + {file = "torch-2.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:51770c065206250dc1222ea7c0eff3f88ab317d3e931cca2aee461b85fbc2472"}, + {file = "torch-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:008e4c6ad703de55af760c73bf937ecdd61a109f9b08f2bbb9c17e7c7017f194"}, + {file = "torch-2.2.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:de8680472dd14e316f42ceef2a18a301461a9058cd6e99a1f1b20f78f11412f1"}, + {file = "torch-2.2.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:99e1dcecb488e3fd25bcaac56e48cdb3539842904bdc8588b0b255fde03a254c"}, ] [package.dependencies] filelock = "*" +fsspec = "*" jinja2 = "*" networkx = "*" -nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu11 = {version = "11.7.101", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu11 = {version = "10.9.0.58", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu11 = {version = "10.2.10.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu11 = {version = "11.4.0.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu11 = {version = "11.7.4.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu11 = {version = "2.14.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu11 = {version = "11.7.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu12 = {version = "2.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} sympy = "*" -triton = {version = "2.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -typing-extensions = "*" +triton = {version = "2.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +typing-extensions = ">=4.8.0" [package.extras] opt-einsum = ["opt-einsum (>=3.3)"] +optree = ["optree (>=0.9.1)"] [[package]] name = "torchvision" -version = "0.15.1" +version = "0.17.0" description = "image and video datasets and models for torch deep learning" optional = false python-versions = ">=3.8" files = [ - {file = "torchvision-0.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc10d48e9a60d006d0c1b48dea87f1ec9b63d856737d592f7c5c44cd87f3f4b7"}, - {file = "torchvision-0.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3708d3410fdcaf6280e358cda9de2a4ab06cc0b4c0fd9aeeac550ec2563a887e"}, - {file = "torchvision-0.15.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:d4de10c837f1493c1c54344388e300a06c96914c6cc55fcb2527c21f2f010bbd"}, - {file = "torchvision-0.15.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:b82fcc5abc9b5c96495c76596a1573025cc1e09d97d2d6fda717c44b9ca45881"}, - {file = "torchvision-0.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:c84e97d8cc4fe167d87adad0a2a6424cff90544365545b20669bc50e6ea46875"}, - {file = "torchvision-0.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97b90eb3b7333a31d049c4ccfd1064361e8491874959d38f466af64d67418cef"}, - {file = "torchvision-0.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b60e1c839ae2a071befbba69b17468d67feafdf576e90ff9645bfbee998de17"}, - {file = "torchvision-0.15.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:13f71a3372d9168b01481a754ebaa171207f3dc455bf2fd86906c69222443738"}, - {file = "torchvision-0.15.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b2e8394726009090b40f6cc3a95cc878cc011dfac3d8e7a6060c79213d360880"}, - {file = "torchvision-0.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:2852f501189483187ce9eb0ccd01b3f4f0918d29057e4a18b3cce8dad9a8a964"}, - {file = "torchvision-0.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e5861baaeea87d19b6fd7d131e11a4a6bd17be14234c490a259bb360775e9520"}, - {file = "torchvision-0.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e714f362b9d8217cf4d68509b679ebc9ddf128cfe80f6c1def8e3f8a18466e75"}, - {file = "torchvision-0.15.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:43624accad1e47f16824be4db37ad678dd89326ad90b69c9c6363eeb22b9467e"}, - {file = "torchvision-0.15.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7fe9b0cd3311b0db9e6d45ffab594ced06418fa4e2aa15eb2e60d55e5c51135c"}, - {file = "torchvision-0.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:b45324ea4911a23a4b00b5a15cdbe36d47f93137206dab9f8c606d81b69dd3a7"}, - {file = "torchvision-0.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1dfdec7c7df967330bba3341a781e0c047d4e0163e67164a9918500362bf7d91"}, - {file = "torchvision-0.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c153710186cec0338d4fff411459a57ddbc8504436123ca73b3f0bdc26ff918c"}, - {file = "torchvision-0.15.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:ff4e650aa601f32ab97bce06704868dd2baad69ca4d454fa1f0012a51199f2bc"}, - {file = "torchvision-0.15.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e9b4bb2a15849391df0415d2f76dd36e6528e4253f7b69322b7a0d682535544b"}, - {file = "torchvision-0.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:21e6beb69e77ef6575c4fdd0ab332b96e8a7f144eee0d333acff469c827a4b5e"}, + {file = "torchvision-0.17.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:153882cd8ff8e3dbef5c5054fdd15df64e85420546805a90c0b2221f2f119c4a"}, + {file = "torchvision-0.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c55c2f86e3f3a21ddd92739a972366244e9b17916e836ec47167b0a0c083c65f"}, + {file = "torchvision-0.17.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:605950cdcefe6c5aef85709ade17b1525bcf171e122cce1df09e666d96525b90"}, + {file = "torchvision-0.17.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:3d86c212fc6379e9bec3ac647d062e34c2cf36c26b98840b66573eb9fbe1f1d9"}, + {file = "torchvision-0.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:71b314813faf13cecb09a4a635b5e4b274e8df0b1921681038d491c529555bb6"}, + {file = "torchvision-0.17.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:10d276821f115fb369e6cf1f1b77b2cca60cda12cbb39a41513a9d3d0f2a93ae"}, + {file = "torchvision-0.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3eef2daddadb5c21e802e0550dd7e3ee3d98c430f4aed212ae3ba0358558be1"}, + {file = "torchvision-0.17.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:acc0d098ab8c295a750f0218bf5bf7bfc2f2c21f9c2fe3fc30b695cd94f4c759"}, + {file = "torchvision-0.17.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:3d2e9552d72e4037f2db6f7d97989a2e2f95763aa1861963a3faf521bb1610c4"}, + {file = "torchvision-0.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:f8e542cf71e1294fcb5635038eae6702df543dc90706f0836ec80e75efc511fc"}, + {file = "torchvision-0.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:816ae1a4506b1cb0f638e1827cae7ab768c731369ab23e86839f177926197143"}, + {file = "torchvision-0.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be39874c239215a39b3c431c7016501f1a45bfbbebf2fe8e11d8339b5ea23bca"}, + {file = "torchvision-0.17.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:8fe14d580557aef2c45dd462c069ff936b6507b215c4b496f30973ae8cff917d"}, + {file = "torchvision-0.17.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:4608ba3246c45c968ede40e7640e4eed64556210faa154cf1ffccb1cadabe445"}, + {file = "torchvision-0.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:b755d6d3e021239d2408bf3794d0d3dcffbc629f1fd808c43d8b346045a098c4"}, + {file = "torchvision-0.17.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:870d7cda57420e44d20eb07bfe37bf5344a06434a7a6195b4c7f3dd55838587d"}, + {file = "torchvision-0.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:477f6e64a9d798c0f5adefc300acc220da6f17ef5c1e110d20108f66554fee4d"}, + {file = "torchvision-0.17.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a54a15bd6f3dbb04ebd36c5a87530b2e090ee4b9b15eb89eda558ab3e50396a0"}, + {file = "torchvision-0.17.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e041ce3336364413bab051a3966d884bab25c200f98ca8a065f0abe758c3005e"}, + {file = "torchvision-0.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:7887f767670c72aa20f5237042d0ca1462da18f66a3ea8c36b6ba67ce26b82fc"}, + {file = "torchvision-0.17.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b1ced438b81ef662a71c8c81debaf0c80455b35b811ca55a4c3c593d721b560a"}, + {file = "torchvision-0.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b53569c52bd4bd1176a1e49d8ea55883bcf57e1614cb97e2e8ce372768299b70"}, + {file = "torchvision-0.17.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7f373507afcd9022ebd9f50b31da8dbac1ea6783ffb77d1f1ab8806425c0a83b"}, + {file = "torchvision-0.17.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:085251ab36340206dc7e1be59a15fa5e307d45ccd66889f5d7bf1ba5e7ecdc57"}, + {file = "torchvision-0.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4c0d4c0af58af2752aad235150bd794d0f324e6eeac5cd13c440bda5dce622d3"}, ] [package.dependencies] numpy = "*" pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0" requests = "*" -torch = "2.0.0" +torch = "2.2.0" [package.extras] scipy = ["scipy"] [[package]] name = "tornado" -version = "6.3.3" +version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, - {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, - {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, - {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] @@ -4873,68 +5268,67 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.9.0" +version = "5.14.0" description = "Traitlets Python configuration system" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, + {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, + {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "transformers" -version = "4.34.0" +version = "4.36.1" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.8.0" files = [ - {file = "transformers-4.34.0-py3-none-any.whl", hash = "sha256:3f0187183a7f22c51ecbbc9eac5145df666c5b86bec6feed10e11f0363f3a1f9"}, - {file = "transformers-4.34.0.tar.gz", hash = "sha256:cc2ae61bfbfaa45337fd9017326669fc60e4f55125f589d50da47819e3d6f504"}, + {file = "transformers-4.36.1-py3-none-any.whl", hash = "sha256:0e309d03634885f02d46801ec4f2c3fc1d614a5b9ebde608181f3e842bac53b8"}, + {file = "transformers-4.36.1.tar.gz", hash = "sha256:28e55952d9bed68f06cf45a3d29cc480679b528afe944e68f8cf6c799e428759"}, ] [package.dependencies] filelock = "*" -huggingface-hub = ">=0.16.4,<1.0" +huggingface-hub = ">=0.19.3,<1.0" numpy = ">=1.17" packaging = ">=20.0" pyyaml = ">=5.1" regex = "!=2019.12.17" requests = "*" safetensors = ">=0.3.1" -tokenizers = ">=0.14,<0.15" +tokenizers = ">=0.14,<0.19" tqdm = ">=4.27" [package.extras] -accelerate = ["accelerate (>=0.20.3)"] -agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.10,!=1.12.0)"] -all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.15)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] +accelerate = ["accelerate (>=0.21.0)"] +agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.10,!=1.12.0)"] +all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.20.3)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.15)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.15)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.15)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.15)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] +deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] +docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"] docs-specific = ["hf-doc-builder"] -fairscale = ["fairscale (>0.3)"] flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] +integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] modelcreation = ["cookiecutter (==1.7.3)"] natten = ["natten (>=0.14.6)"] onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)", "urllib3 (<2.0.0)"] -ray = ["ray[tune]"] +quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] +ray = ["ray[tune] (>=2.7.0)"] retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] sagemaker = ["sagemaker (>=2.31.0)"] sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] @@ -4942,65 +5336,72 @@ serving = ["fastapi", "pydantic (<2)", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic (<2)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] timm = ["timm"] -tokenizers = ["tokenizers (>=0.14,<0.15)"] -torch = ["accelerate (>=0.20.3)", "torch (>=1.10,!=1.12.0)"] +tokenizers = ["tokenizers (>=0.14,<0.19)"] +torch = ["accelerate (>=0.21.0)", "torch (>=1.10,!=1.12.0)"] torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (<10.0.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.16.4,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.15)", "torch (>=1.10,!=1.12.0)", "tqdm (>=4.27)"] +torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] +torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch (>=1.10,!=1.12.0)", "tqdm (>=4.27)"] video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (<10.0.0)"] +vision = ["Pillow (>=10.0.1,<=15.0)"] [[package]] name = "triton" -version = "2.0.0" +version = "2.2.0" description = "A language and compiler for custom Deep Learning operations" optional = false python-versions = "*" files = [ - {file = "triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38806ee9663f4b0f7cd64790e96c579374089e58f49aac4a6608121aa55e2505"}, - {file = "triton-2.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:226941c7b8595219ddef59a1fdb821e8c744289a132415ddd584facedeb475b1"}, - {file = "triton-2.0.0-1-cp36-cp36m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4c9fc8c89874bc48eb7e7b2107a9b8d2c0bf139778637be5bfccb09191685cfd"}, - {file = "triton-2.0.0-1-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d2684b6a60b9f174f447f36f933e9a45f31db96cb723723ecd2dcfd1c57b778b"}, - {file = "triton-2.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9d4978298b74fcf59a75fe71e535c092b023088933b2f1df933ec32615e4beef"}, - {file = "triton-2.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:74f118c12b437fb2ca25e1a04759173b517582fcf4c7be11913316c764213656"}, - {file = "triton-2.0.0-1-pp37-pypy37_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9618815a8da1d9157514f08f855d9e9ff92e329cd81c0305003eb9ec25cc5add"}, - {file = "triton-2.0.0-1-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1aca3303629cd3136375b82cb9921727f804e47ebee27b2677fef23005c3851a"}, - {file = "triton-2.0.0-1-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e3e13aa8b527c9b642e3a9defcc0fbd8ffbe1c80d8ac8c15a01692478dc64d8a"}, - {file = "triton-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f05a7e64e4ca0565535e3d5d3405d7e49f9d308505bb7773d21fb26a4c008c2"}, - {file = "triton-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4b99ca3c6844066e516658541d876c28a5f6e3a852286bbc97ad57134827fd"}, - {file = "triton-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47b4d70dc92fb40af553b4460492c31dc7d3a114a979ffb7a5cdedb7eb546c08"}, - {file = "triton-2.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fedce6a381901b1547e0e7e1f2546e4f65dca6d91e2d8a7305a2d1f5551895be"}, - {file = "triton-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75834f27926eab6c7f00ce73aaf1ab5bfb9bec6eb57ab7c0bfc0a23fac803b4c"}, - {file = "triton-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0117722f8c2b579cd429e0bee80f7731ae05f63fe8e9414acd9a679885fcbf42"}, - {file = "triton-2.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcd9be5d0c2e45d2b7e6ddc6da20112b6862d69741576f9c3dbaf941d745ecae"}, - {file = "triton-2.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a0d2c3fc2eab4ba71384f2e785fbfd47aa41ae05fa58bf12cb31dcbd0aeceb"}, - {file = "triton-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c47b72c72693198163ece9d90a721299e4fb3b8e24fd13141e384ad952724f"}, + {file = "triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2294514340cfe4e8f4f9e5c66c702744c4a117d25e618bd08469d0bfed1e2e5"}, + {file = "triton-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da58a152bddb62cafa9a857dd2bc1f886dbf9f9c90a2b5da82157cd2b34392b0"}, + {file = "triton-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af58716e721460a61886668b205963dc4d1e4ac20508cc3f623aef0d70283d5"}, + {file = "triton-2.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8fe46d3ab94a8103e291bd44c741cc294b91d1d81c1a2888254cbf7ff846dab"}, + {file = "triton-2.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ce26093e539d727e7cf6f6f0d932b1ab0574dc02567e684377630d86723ace"}, + {file = "triton-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:227cc6f357c5efcb357f3867ac2a8e7ecea2298cd4606a8ba1e931d1d5a947df"}, ] [package.dependencies] -cmake = "*" filelock = "*" -lit = "*" -torch = "*" [package.extras] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)"] -tutorials = ["matplotlib", "pandas", "tabulate"] +build = ["cmake (>=3.20)", "lit"] +tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"] +tutorials = ["matplotlib", "pandas", "tabulate", "torch"] + +[[package]] +name = "typer" +version = "0.9.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -5047,72 +5448,76 @@ pillow = "*" [[package]] name = "ujson" -version = "5.8.0" +version = "5.9.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" files = [ - {file = "ujson-5.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1"}, - {file = "ujson-5.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75"}, - {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76"}, - {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a"}, - {file = "ujson-5.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7"}, - {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2"}, - {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba"}, - {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf"}, - {file = "ujson-5.8.0-cp310-cp310-win32.whl", hash = "sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a"}, - {file = "ujson-5.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0"}, - {file = "ujson-5.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f"}, - {file = "ujson-5.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8"}, - {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405"}, - {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4"}, - {file = "ujson-5.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e"}, - {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3"}, - {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5"}, - {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa"}, - {file = "ujson-5.8.0-cp311-cp311-win32.whl", hash = "sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879"}, - {file = "ujson-5.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721"}, - {file = "ujson-5.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c"}, - {file = "ujson-5.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6"}, - {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae"}, - {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b"}, - {file = "ujson-5.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296"}, - {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7"}, - {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08"}, - {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30"}, - {file = "ujson-5.8.0-cp312-cp312-win32.whl", hash = "sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916"}, - {file = "ujson-5.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6"}, - {file = "ujson-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb"}, - {file = "ujson-5.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b"}, - {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582"}, - {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c"}, - {file = "ujson-5.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7"}, - {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67"}, - {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9"}, - {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07"}, - {file = "ujson-5.8.0-cp38-cp38-win32.whl", hash = "sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564"}, - {file = "ujson-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5"}, - {file = "ujson-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c"}, - {file = "ujson-5.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a"}, - {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207"}, - {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042"}, - {file = "ujson-5.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3"}, - {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95"}, - {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c"}, - {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc"}, - {file = "ujson-5.8.0-cp39-cp39-win32.whl", hash = "sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903"}, - {file = "ujson-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c"}, - {file = "ujson-5.8.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f"}, - {file = "ujson-5.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94"}, - {file = "ujson-5.8.0.tar.gz", hash = "sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425"}, + {file = "ujson-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab71bf27b002eaf7d047c54a68e60230fbd5cd9da60de7ca0aa87d0bccead8fa"}, + {file = "ujson-5.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a365eac66f5aa7a7fdf57e5066ada6226700884fc7dce2ba5483538bc16c8c5"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e015122b337858dba5a3dc3533af2a8fc0410ee9e2374092f6a5b88b182e9fcc"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:779a2a88c53039bebfbccca934430dabb5c62cc179e09a9c27a322023f363e0d"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10ca3c41e80509fd9805f7c149068fa8dbee18872bbdc03d7cca928926a358d5"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a566e465cb2fcfdf040c2447b7dd9718799d0d90134b37a20dff1e27c0e9096"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f833c529e922577226a05bc25b6a8b3eb6c4fb155b72dd88d33de99d53113124"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b68a0caab33f359b4cbbc10065c88e3758c9f73a11a65a91f024b2e7a1257106"}, + {file = "ujson-5.9.0-cp310-cp310-win32.whl", hash = "sha256:7cc7e605d2aa6ae6b7321c3ae250d2e050f06082e71ab1a4200b4ae64d25863c"}, + {file = "ujson-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6d3f10eb8ccba4316a6b5465b705ed70a06011c6f82418b59278fbc919bef6f"}, + {file = "ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b"}, + {file = "ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d"}, + {file = "ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120"}, + {file = "ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99"}, + {file = "ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c"}, + {file = "ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c"}, + {file = "ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437"}, + {file = "ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c"}, + {file = "ujson-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d581db9db9e41d8ea0b2705c90518ba623cbdc74f8d644d7eb0d107be0d85d9c"}, + {file = "ujson-5.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ff741a5b4be2d08fceaab681c9d4bc89abf3c9db600ab435e20b9b6d4dfef12e"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdcb02cabcb1e44381221840a7af04433c1dc3297af76fde924a50c3054c708c"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e208d3bf02c6963e6ef7324dadf1d73239fb7008491fdf523208f60be6437402"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4b3917296630a075e04d3d07601ce2a176479c23af838b6cf90a2d6b39b0d95"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0c4d6adb2c7bb9eb7c71ad6f6f612e13b264942e841f8cc3314a21a289a76c4e"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0b159efece9ab5c01f70b9d10bbb77241ce111a45bc8d21a44c219a2aec8ddfd"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0cb4a7814940ddd6619bdce6be637a4b37a8c4760de9373bac54bb7b229698b"}, + {file = "ujson-5.9.0-cp38-cp38-win32.whl", hash = "sha256:dc80f0f5abf33bd7099f7ac94ab1206730a3c0a2d17549911ed2cb6b7aa36d2d"}, + {file = "ujson-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:506a45e5fcbb2d46f1a51fead991c39529fc3737c0f5d47c9b4a1d762578fc30"}, + {file = "ujson-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0fd2eba664a22447102062814bd13e63c6130540222c0aa620701dd01f4be81"}, + {file = "ujson-5.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bdf7fc21a03bafe4ba208dafa84ae38e04e5d36c0e1c746726edf5392e9f9f36"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f909bc08ce01f122fd9c24bc6f9876aa087188dfaf3c4116fe6e4daf7e194f"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4ea86c2afd41429751d22a3ccd03311c067bd6aeee2d054f83f97e41e11d8f"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63fb2e6599d96fdffdb553af0ed3f76b85fda63281063f1cb5b1141a6fcd0617"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32bba5870c8fa2a97f4a68f6401038d3f1922e66c34280d710af00b14a3ca562"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:37ef92e42535a81bf72179d0e252c9af42a4ed966dc6be6967ebfb929a87bc60"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f69f16b8f1c69da00e38dc5f2d08a86b0e781d0ad3e4cc6a13ea033a439c4844"}, + {file = "ujson-5.9.0-cp39-cp39-win32.whl", hash = "sha256:3382a3ce0ccc0558b1c1668950008cece9bf463ebb17463ebf6a8bfc060dae34"}, + {file = "ujson-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:6adef377ed583477cf005b58c3025051b5faa6b8cc25876e594afbb772578f21"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ffdfebd819f492e48e4f31c97cb593b9c1a8251933d8f8972e81697f00326ff1"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eec2ddc046360d087cf35659c7ba0cbd101f32035e19047013162274e71fcf"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbb90aa5c23cb3d4b803c12aa220d26778c31b6e4b7a13a1f49971f6c7d088e"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0823cb70866f0d6a4ad48d998dd338dce7314598721bc1b7986d054d782dfd"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4e35d7885ed612feb6b3dd1b7de28e89baaba4011ecdf995e88be9ac614765e9"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b048aa93eace8571eedbd67b3766623e7f0acbf08ee291bef7d8106210432427"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323279e68c195110ef85cbe5edce885219e3d4a48705448720ad925d88c9f851"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ac92d86ff34296f881e12aa955f7014d276895e0e4e868ba7fddebbde38e378"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6eecbd09b316cea1fd929b1e25f70382917542ab11b692cb46ec9b0a26c7427f"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:473fb8dff1d58f49912323d7cb0859df5585cfc932e4b9c053bf8cf7f2d7c5c4"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f91719c6abafe429c1a144cfe27883eace9fb1c09a9c5ef1bcb3ae80a3076a4e"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1c0991c4fe256f5fdb19758f7eac7f47caac29a6c57d0de16a19048eb86bad"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea0f55a1396708e564595aaa6696c0d8af532340f477162ff6927ecc46e21"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:07e0cfdde5fd91f54cd2d7ffb3482c8ff1bf558abf32a8b953a5d169575ae1cd"}, + {file = "ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532"}, ] [[package]] @@ -5176,13 +5581,13 @@ ucall = {version = "*", markers = "python_version >= \"3.9\""} [[package]] name = "uvicorn" -version = "0.23.2" +version = "0.24.0.post1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.23.2-py3-none-any.whl", hash = "sha256:1f9be6558f01239d4fdf22ef8126c39cb1ad0addf76c40e760549d2c2f43ab53"}, - {file = "uvicorn-0.23.2.tar.gz", hash = "sha256:4d3cc12d7727ba72b64d12d3cc7743124074c0a69f7b201512fc50c3e3f1569a"}, + {file = "uvicorn-0.24.0.post1-py3-none-any.whl", hash = "sha256:7c84fea70c619d4a710153482c0d230929af7bcf76c7bfa6de151f0a3a80121e"}, + {file = "uvicorn-0.24.0.post1.tar.gz", hash = "sha256:09c8e5a79dc466bdf28dead50093957db184de356fcdc48697bad3bde4c2588e"}, ] [package.dependencies] @@ -5202,47 +5607,47 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [[package]] name = "uvloop" -version = "0.17.0" +version = "0.19.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8.0" files = [ - {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce9f61938d7155f79d3cb2ffa663147d4a76d16e08f65e2c66b77bd41b356718"}, - {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:68532f4349fd3900b839f588972b3392ee56042e440dd5873dfbbcd2cc67617c"}, - {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d"}, - {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff3d00b70ce95adce264462c930fbaecb29718ba6563db354608f37e49e09024"}, - {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5abddb3558d3f0a78949c750644a67be31e47936042d4f6c888dd6f3c95f4aa"}, - {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8efcadc5a0003d3a6e887ccc1fb44dec25594f117a94e3127954c05cf144d811"}, - {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3378eb62c63bf336ae2070599e49089005771cc651c8769aaad72d1bd9385a7c"}, - {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6aafa5a78b9e62493539456f8b646f85abc7093dd997f4976bb105537cf2635e"}, - {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686a47d57ca910a2572fddfe9912819880b8765e2f01dc0dd12a9bf8573e539"}, - {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:864e1197139d651a76c81757db5eb199db8866e13acb0dfe96e6fc5d1cf45fc4"}, - {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2a6149e1defac0faf505406259561bc14b034cdf1d4711a3ddcdfbaa8d825a05"}, - {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6708f30db9117f115eadc4f125c2a10c1a50d711461699a0cbfaa45b9a78e376"}, - {file = "uvloop-0.17.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:23609ca361a7fc587031429fa25ad2ed7242941adec948f9d10c045bfecab06b"}, - {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2deae0b0fb00a6af41fe60a675cec079615b01d68beb4cc7b722424406b126a8"}, - {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45cea33b208971e87a31c17622e4b440cac231766ec11e5d22c76fab3bf9df62"}, - {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b09e0f0ac29eee0451d71798878eae5a4e6a91aa275e114037b27f7db72702d"}, - {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbbaf9da2ee98ee2531e0c780455f2841e4675ff580ecf93fe5c48fe733b5667"}, - {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a4aee22ece20958888eedbad20e4dbb03c37533e010fb824161b4f05e641f738"}, - {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:307958f9fc5c8bb01fad752d1345168c0abc5d62c1b72a4a8c6c06f042b45b20"}, - {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ebeeec6a6641d0adb2ea71dcfb76017602ee2bfd8213e3fcc18d8f699c5104f"}, - {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1436c8673c1563422213ac6907789ecb2b070f5939b9cbff9ef7113f2b531595"}, - {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8887d675a64cfc59f4ecd34382e5b4f0ef4ae1da37ed665adba0c2badf0d6578"}, - {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3db8de10ed684995a7f34a001f15b374c230f7655ae840964d51496e2f8a8474"}, - {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d37dccc7ae63e61f7b96ee2e19c40f153ba6ce730d8ba4d3b4e9738c1dccc1b"}, - {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cbbe908fda687e39afd6ea2a2f14c2c3e43f2ca88e3a11964b297822358d0e6c"}, - {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d97672dc709fa4447ab83276f344a165075fd9f366a97b712bdd3fee05efae8"}, - {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e507c9ee39c61bfddd79714e4f85900656db1aec4d40c6de55648e85c2799c"}, - {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c092a2c1e736086d59ac8e41f9c98f26bbf9b9222a76f21af9dfe949b99b2eb9"}, - {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30babd84706115626ea78ea5dbc7dd8d0d01a2e9f9b306d24ca4ed5796c66ded"}, - {file = "uvloop-0.17.0.tar.gz", hash = "sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, ] [package.extras] -dev = ["Cython (>=0.29.32,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)", "pytest (>=3.6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] [[package]] name = "validators" @@ -5268,53 +5673,106 @@ tooling-extras = ["pyaml (>=23.7.0)", "pypandoc-binary (>=1.11)", "pytest (>=7.4 [[package]] name = "virtualenv" -version = "20.24.3" +version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.3-py3-none-any.whl", hash = "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"}, - {file = "virtualenv-20.24.3.tar.gz", hash = "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "watchfiles" -version = "0.19.0" +version = "0.21.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"}, - {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"}, - {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"}, - {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"}, - {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"}, - {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"}, - {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"}, - {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"}, + {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, + {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, + {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, + {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, + {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, + {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, + {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, + {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, + {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, + {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, + {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, + {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, + {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, + {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, + {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, + {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, + {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, + {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, + {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, + {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, + {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, + {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, + {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, + {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, + {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, + {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, + {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, + {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, + {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, + {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, + {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, + {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, + {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, ] [package.dependencies] @@ -5322,24 +5780,24 @@ anyio = ">=3.0.0" [[package]] name = "wcwidth" -version = "0.2.6" +version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, + {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, + {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, ] [[package]] name = "weaviate-client" -version = "3.24.2" +version = "3.25.3" description = "A python native Weaviate client" optional = false python-versions = ">=3.8" files = [ - {file = "weaviate-client-3.24.2.tar.gz", hash = "sha256:6914c48c9a7e5ad0be9399271f9cb85d6f59ab77476c6d4e56a3925bf149edaa"}, - {file = "weaviate_client-3.24.2-py3-none-any.whl", hash = "sha256:bc50ca5fcebcd48de0d00f66700b0cf7c31a97c4cd3d29b4036d77c5d1d9479b"}, + {file = "weaviate-client-3.25.3.tar.gz", hash = "sha256:894df700fc1f0a932fa370029f644af4062f8718026fda5ad07647d357d95167"}, + {file = "weaviate_client-3.25.3-py3-none-any.whl", hash = "sha256:cb049ed7b710088ff9038d27b97f28c80206ce9e4d12d622c3597da5790b2be0"}, ] [package.dependencies] @@ -5352,81 +5810,83 @@ grpc = ["grpcio (>=1.57.0,<2.0.0)", "grpcio-tools (>=1.57.0,<2.0.0)"] [[package]] name = "websockets" -version = "11.0.3" +version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"}, - {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"}, - {file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"}, - {file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"}, - {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"}, - {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"}, - {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"}, - {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"}, - {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"}, - {file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"}, - {file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"}, - {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"}, - {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"}, - {file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"}, - {file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"}, - {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"}, - {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"}, - {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"}, - {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"}, - {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"}, - {file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"}, - {file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"}, - {file = "websockets-11.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152"}, - {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f"}, - {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b"}, - {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb"}, - {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007"}, - {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0"}, - {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af"}, - {file = "websockets-11.0.3-cp37-cp37m-win32.whl", hash = "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f"}, - {file = "websockets-11.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de"}, - {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"}, - {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"}, - {file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"}, - {file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"}, - {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"}, - {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"}, - {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"}, - {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"}, - {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"}, - {file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"}, - {file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"}, - {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"}, - {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"}, - {file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"}, - {file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"}, - {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"}, - {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"}, - {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"}, - {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"}, - {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"}, - {file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"}, - {file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"}, - {file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"}, - {file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"}, - {file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"}, - {file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"}, - {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] [[package]] @@ -5446,20 +5906,6 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] -[[package]] -name = "wheel" -version = "0.41.2" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "wheel-0.41.2-py3-none-any.whl", hash = "sha256:75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8"}, - {file = "wheel-0.41.2.tar.gz", hash = "sha256:0c5ac5ff2afb79ac23ab82bab027a0be7b5dbcf2e54dc50efe4bf507de1f7985"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - [[package]] name = "win32-setctime" version = "1.1.0" @@ -5476,85 +5922,101 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "yarl" -version = "1.9.2" +version = "1.9.4" description = "Yet another URL library" optional = false python-versions = ">=3.7" files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, ] [package.dependencies] @@ -5563,20 +6025,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.16.2" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "4ffeecb92b2cee264221652e30a0399088dc073b04a08d83d3c67650cead1ad4" +content-hash = "e723f9e94278f833cc2fde6ab26e84468b929c35318c27f618dd34d19cdf8568" diff --git a/python/poetry.toml b/python/poetry.toml new file mode 100644 index 000000000000..ab1033bd3722 --- /dev/null +++ b/python/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/python/pyproject.toml b/python/pyproject.toml index 6f3b9c2b0b68..7a7db80f4b37 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,31 +1,34 @@ [tool.poetry] name = "semantic-kernel" -version = "0.3.14.dev" -description = "" +version = "0.5.1.dev" +description = "Semantic Kernel Python SDK" authors = ["Microsoft "] readme = "pip/README.md" packages = [{include = "semantic_kernel"}] [tool.poetry.dependencies] python = "^3.8" +aiohttp = "^3.8" numpy = "^1.24.2" -openai = ">=0.27,<0.29" +openai = ">=1.0" aiofiles = "^23.1.0" python-dotenv = "1.0.0" regex = "^2023.6.3" openapi_core = "^0.18.0" prance = "^23.6.21.0" -pydantic = "<2" +pydantic = ">2" motor = "^3.3.1" [tool.poetry.group.dev.dependencies] -pre-commit = "3.3.3" -black = {version = "23.10.1", allow-prereleases = true} +pre-commit = "3.5.0" +# please keep black and ruff version in sync with the ones in .pre-commit-config.yaml +black = "^23.12.0" +ruff = "0.1.8" ipykernel = "^6.21.1" -pytest = "7.4.2" -ruff = "0.0.289" -pytest-asyncio = "0.21.1" +pytest = "7.4.3" +pytest-asyncio = "0.23.2" snoop = "0.4.3" +pytest-cov = "4.1.0" [tool.poetry.group.google_palm.dependencies] google-generativeai = { version = ">=0.1,<0.3", markers = "python_version >= '3.9'" } @@ -34,17 +37,17 @@ grpcio-status = { version = "^1.53.0", markers = "python_version >= '3.9'" } [tool.poetry.group.hugging_face.dependencies] transformers = "^4.28.1" sentence-transformers = "^2.2.2" -torch = "2.0.0" +torch = "2.2.0" [tool.poetry.group.qdrant.dependencies] qdrant-client = {version = "^1.3.2", python = ">=3.8,<3.12"} [tool.poetry.group.chromadb.dependencies] -chromadb = "^0.4.0" +chromadb = "0.4.13" [tool.poetry.group.milvus.dependencies] -pymilvus = "^2.2.11" -milvus = "^2.2.11" +pymilvus = "2.2.16" +milvus = "2.2.16" [tool.poetry.group.weaviate.dependencies] weaviate-client = "^3.18.0" @@ -65,17 +68,24 @@ azure-search-documents = {version = "11.4.0b9", allow-prereleases = true} azure-core = "^1.28.0" azure-identity = "^1.13.0" +[tool.poetry.group.tests.dependencies] +azure-search-documents = {version = "11.4.0b9", allow-prereleases = true} +azure-core = "^1.28.0" + [tool.poetry.group.usearch.dependencies] -usearch = "^1.1.1" -pyarrow = ">=12.0.1,<14.0.0" +usearch = "1.1.1" +pyarrow = ">=12.0.1,<15.0.0" [tool.isort] -profile = "black" +profile = "ruff" [tool.ruff] select = ["E", "F", "I"] line-length = 120 +[tool.black] +line-length = 120 + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/python/samples/kernel-syntax-examples/action_planner.py b/python/samples/kernel-syntax-examples/action_planner.py index 710bf14aac01..5a95453707e5 100644 --- a/python/samples/kernel-syntax-examples/action_planner.py +++ b/python/samples/kernel-syntax-examples/action_planner.py @@ -4,7 +4,12 @@ from semantic_kernel.connectors.ai.open_ai import ( OpenAIChatCompletion, ) -from semantic_kernel.core_skills import FileIOSkill, MathSkill, TextSkill, TimeSkill +from semantic_kernel.core_plugins import ( + FileIOPlugin, + MathPlugin, + TextPlugin, + TimePlugin, +) from semantic_kernel.planning import ActionPlanner @@ -12,13 +17,11 @@ async def main(): kernel = sk.Kernel() api_key, org_id = sk.openai_settings_from_dot_env() - kernel.add_chat_service( - "chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) - ) - kernel.import_skill(MathSkill(), "math") - kernel.import_skill(FileIOSkill(), "fileIO") - kernel.import_skill(TimeSkill(), "time") - kernel.import_skill(TextSkill(), "text") + kernel.add_chat_service("chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)) + kernel.import_plugin(MathPlugin(), "math") + kernel.import_plugin(FileIOPlugin(), "fileIO") + kernel.import_plugin(TimePlugin(), "time") + kernel.import_plugin(TextPlugin(), "text") # create an instance of action planner. planner = ActionPlanner(kernel) @@ -27,10 +30,10 @@ async def main(): ask = "What is the sum of 110 and 990?" # ask the action planner to identify a suitable function from the list of functions available. - plan = await planner.create_plan_async(goal=ask) + plan = await planner.create_plan(goal=ask) # ask the action planner to execute the identified function. - result = await plan.invoke_async() + result = await plan.invoke() print(result) """ Output: diff --git a/python/samples/kernel-syntax-examples/azure_chat_gpt_api.py b/python/samples/kernel-syntax-examples/azure_chat_gpt_api.py index 719730002172..5d0355a7aab3 100644 --- a/python/samples/kernel-syntax-examples/azure_chat_gpt_api.py +++ b/python/samples/kernel-syntax-examples/azure_chat_gpt_api.py @@ -1,13 +1,19 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio +import logging from dotenv import load_dotenv import semantic_kernel as sk import semantic_kernel.connectors.ai.open_ai as sk_oai +from semantic_kernel.connectors.ai.chat_completion_client_base import ( + ChatCompletionClientBase, +) from semantic_kernel.utils.settings import azure_openai_settings_from_dot_env_as_dict +logging.basicConfig(level=logging.INFO) + load_dotenv() system_message = """ @@ -21,26 +27,35 @@ kernel = sk.Kernel() -kernel.add_chat_service( - "chat-gpt", - sk_oai.AzureChatCompletion( - **azure_openai_settings_from_dot_env_as_dict(include_api_version=True) - ), -) +chat_service = sk_oai.AzureChatCompletion(**azure_openai_settings_from_dot_env_as_dict(include_api_version=True)) +kernel.add_chat_service("chat-gpt", chat_service) -prompt_config = sk.PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, temperature=0.7, top_p=0.8 -) +## there are three ways to create the request settings in code: # noqa: E266 -prompt_template = sk.ChatPromptTemplate( - "{{$user_input}}", kernel.prompt_template_engine, prompt_config -) +## 1. create the request settings from the base class: # noqa: E266 +# from semantic_kernel.connectors.ai.chat_completion_client_base import PromptExecutionSettings +# req_settings = PromptExecutionSettings(extension_data = { "max_tokens": 2000, "temperature": 0.7, "top_p": 0.8} ) +## This method (using the PromptExecutionSettings base class) is the most generic, and it allows you to store request settings for different services in the same extension_data field. There are two downsides to this approach: the specific request setting class will be created dynamically for each call, this is overhead when using just a single service. and the request settings are not type checked, so you will receive error messages once the dynamic creation of the request settings class fails. # noqa: E501 E266 + +## 2. create the request settings directly for the service you are using: # noqa: E266 +# req_settings = sk_oai.AzureChatPromptExecutionSettings(max_tokens=2000, temperature=0.7, top_p=0.8) +## The second method is useful when you are using a single service, and you want to have type checking on the request settings or when you are using multiple instances of the same type of service, for instance gpt-35-turbo and gpt-4, both in openai and both for chat. # noqa: E501 E266 + +## 3. create the request settings from the kernel based on the registered service class: # noqa: E266 +req_settings = kernel.get_prompt_execution_settings_from_service(ChatCompletionClientBase, "chat-gpt") +req_settings.max_tokens = 2000 +req_settings.temperature = 0.7 +req_settings.top_p = 0.8 +## The third method is the most specific as the returned request settings class is the one that is registered for the service and has some fields already filled in, like the service_id and ai_model_id. # noqa: E501 E266 + + +prompt_config = sk.PromptTemplateConfig(execution_settings=req_settings) + +prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) prompt_template.add_system_message(system_message) prompt_template.add_user_message("Hi there, who are you?") -prompt_template.add_assistant_message( - "I am Mosscap, a chat bot. I'm trying to figure out what people need." -) +prompt_template.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) @@ -48,10 +63,12 @@ async def chat() -> bool: context_vars = sk.ContextVariables() - try: user_input = input("User:> ") - context_vars["user_input"] = user_input + if user_input == "": + context_vars["user_input"] = "what is openai?" + else: + context_vars["user_input"] = user_input except KeyboardInterrupt: print("\n\nExiting chat...") return False @@ -63,7 +80,15 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - answer = await kernel.run_async(chat_function, input_vars=context_vars) + stream = False + if stream: + answer = kernel.run_stream(chat_function, input_vars=context_vars) + print("Mosscap:> ", end="") + async for message in answer: + print(str(message[0]), end="") + print("\n") + return True + answer = await kernel.run(chat_function, input_vars=context_vars) print(f"Mosscap:> {answer}") return True diff --git a/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api.py b/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api.py new file mode 100644 index 000000000000..b9c8460ec342 --- /dev/null +++ b/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api.py @@ -0,0 +1,101 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.open_ai as sk_oai +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureAISearchDataSources, + AzureChatPromptExecutionSettings, + AzureDataSources, + ExtraBody, +) + +kernel = sk.Kernel() + +# Load Azure OpenAI Settings +deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env() + +# For example, AI Search index may contain the following document: + +# Emily and David, two passionate scientists, met during a research expedition to Antarctica. +# Bonded by their love for the natural world and shared curiosity, they uncovered a +# groundbreaking phenomenon in glaciology that could potentially reshape our understanding of climate change. + +azure_ai_search_settings = sk.azure_aisearch_settings_from_dot_env_as_dict() + +# Our example index has fields "source_title", "source_text", "source_url", and "source_file". +# Add fields mapping to the settings to indicate which fields to use for the title, content, URL, and file path. +azure_ai_search_settings["fieldsMapping"] = { + "titleField": "source_title", + "urlField": "source_url", + "contentFields": ["source_text"], + "filepathField": "source_file", +} + +# Create the data source settings +az_source = AzureAISearchDataSources(**azure_ai_search_settings) +az_data = AzureDataSources(type="AzureCognitiveSearch", parameters=az_source) +extra = ExtraBody(dataSources=[az_data]) +req_settings = AzureChatPromptExecutionSettings(extra_body=extra) +prompt_config = sk.PromptTemplateConfig(execution_settings=req_settings) + +# When using data, set use_extensions=True and use the 2023-12-01-preview API version. +chat_service = sk_oai.AzureChatCompletion( + deployment_name=deployment, + api_key=api_key, + endpoint=endpoint, + api_version="2023-12-01-preview", + use_extensions=True, +) +kernel.add_chat_service("chat-gpt", chat_service) + +prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) + +prompt_template.add_user_message("Hi there, who are you?") +prompt_template.add_assistant_message("I am an AI assistant here to answer your questions.") + +function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) +chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) +context = kernel.create_new_context() + + +async def chat() -> bool: + context_vars = sk.ContextVariables() + + try: + user_input = input("User:> ") + context_vars["user_input"] = user_input + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + + # Non streaming + # answer = await kernel.run(chat_function, input_vars=context_vars) + # print(f"Assistant:> {answer}") + + answer = kernel.run_stream(chat_function, input_vars=context_vars, input_context=context) + print("Assistant:> ", end="") + async for message in answer: + print(message, end="") + print("\n") + # The tool message containing cited sources is available in the context + print(f"Tool:> {context.objects.get('tool_message')}") + return True + + +async def main() -> None: + chatting = True + while chatting: + chatting = await chat() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api_function_calling.py b/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api_function_calling.py new file mode 100644 index 000000000000..3811ff7ec145 --- /dev/null +++ b/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api_function_calling.py @@ -0,0 +1,121 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os +from typing import Tuple + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.open_ai as sk_oai +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureAISearchDataSources, + AzureChatPromptExecutionSettings, + AzureDataSources, + ExtraBody, +) +from semantic_kernel.connectors.ai.open_ai.semantic_functions.open_ai_chat_prompt_template import ( + OpenAIChatPromptTemplate, +) +from semantic_kernel.connectors.ai.open_ai.utils import ( + chat_completion_with_function_call, + get_tool_call_object, +) +from semantic_kernel.core_plugins.time_plugin import TimePlugin + +kernel = sk.Kernel() + +# Load Azure OpenAI Settings +deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env() + +# Create the data source settings +azure_ai_search_settings = sk.azure_aisearch_settings_from_dot_env_as_dict() +az_source = AzureAISearchDataSources(**azure_ai_search_settings) +az_data = AzureDataSources(type="AzureCognitiveSearch", parameters=az_source) +extra = ExtraBody(dataSources=[az_data]) +req_settings = AzureChatPromptExecutionSettings(extra_body=extra) + +# For example, AI Search index may contain the following document: + +# Emily and David, two passionate scientists, met during a research expedition to Antarctica. +# Bonded by their love for the natural world and shared curiosity, they uncovered a +# groundbreaking phenomenon in glaciology that could potentially reshape our understanding of climate change. + +chat_service = sk_oai.AzureChatCompletion( + deployment_name=deployment, + api_key=api_key, + endpoint=endpoint, + api_version="2023-12-01-preview", + use_extensions=True, +) +kernel.add_chat_service( + "chat-gpt", + chat_service, +) + +plugins_directory = os.path.join(__file__, "../../../../samples/plugins") +# adding plugins to the kernel +# the joke plugin in the FunPlugins is a semantic plugin and has the function calling disabled. +kernel.import_semantic_plugin_from_directory(plugins_directory, "FunPlugin") +# the math plugin is a core plugin and has the function calling enabled. +kernel.import_plugin(TimePlugin(), plugin_name="time") + +# enabling or disabling function calling is done by setting the tool_choice parameter for the completion. +# when the tool_choice parameter is set to "auto" the model will decide which function to use, if any. +# if you only want to use a specific tool, set the name of that tool in this parameter, +# the format for that is 'PluginName-FunctionName', (i.e. 'math-Add'). +# if the model or api version do not support this you will get an error. +req_settings.tool_choice = "auto" +prompt_config = sk.PromptTemplateConfig(execution_settings=req_settings) +prompt_template = OpenAIChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) +prompt_template.add_user_message("Hi there, who are you?") +prompt_template.add_assistant_message("I am an AI assistant here to answer your questions.") + +function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) +chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) + +# calling the chat, you could add a overloaded version of the settings here, +# to enable or disable function calling or set the function calling to a specific plugin. +# see the openai_function_calling example for how to use this with a unrelated function definition +filter = {"exclude_plugin": ["ChatBot"]} +functions = get_tool_call_object(kernel, filter) + + +async def chat(context: sk.KernelContext) -> Tuple[bool, sk.KernelContext]: + try: + user_input = input("User:> ") + context.variables["user_input"] = user_input + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False, None + except EOFError: + print("\n\nExiting chat...") + return False, None + + if user_input == "exit": + print("\n\nExiting chat...") + return False, None + + context = await chat_completion_with_function_call( + kernel, + chat_plugin_name="ChatBot", + chat_function_name="Chat", + context=context, + functions=functions, + ) + print(f"Assistant:> {context.result}") + return True, context + + +async def main() -> None: + chatting = True + context = kernel.create_new_context() + print( + "Welcome to the chat bot!\ +\n Type 'exit' to exit.\ +\n Try a time question to see the function calling in action (i.e. what day is it?)." + ) + while chatting: + chatting, context = await chat(context) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api_vector_search.py b/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api_vector_search.py new file mode 100644 index 000000000000..ffd1bef23a80 --- /dev/null +++ b/python/samples/kernel-syntax-examples/azure_chat_gpt_with_data_api_vector_search.py @@ -0,0 +1,107 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.open_ai as sk_oai +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureAISearchDataSources, + AzureChatPromptExecutionSettings, + AzureDataSources, + ExtraBody, +) + +kernel = sk.Kernel() + +# Load Azure OpenAI Settings +deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env() + +# For example, AI Search index may contain the following document: + +# Emily and David, two passionate scientists, met during a research expedition to Antarctica. +# Bonded by their love for the natural world and shared curiosity, they uncovered a +# groundbreaking phenomenon in glaciology that could potentially reshape our understanding of climate change. + +azure_ai_search_settings = sk.azure_aisearch_settings_from_dot_env_as_dict() + +# This example index has fields "title", "chunk", and "vector". +# Add fields mapping to the settings. +azure_ai_search_settings["fieldsMapping"] = { + "titleField": "title", + "contentFields": ["chunk"], + "vectorFields": ["vector"], +} +# Add Ada embedding deployment name to the settings and use vector search. +azure_ai_search_settings["embeddingDependency"] = { + "type": "DeploymentName", + "deploymentName": "ada-002", +} +azure_ai_search_settings["queryType"] = "vector" + +# Create the data source settings +az_source = AzureAISearchDataSources(**azure_ai_search_settings) +az_data = AzureDataSources(type="AzureCognitiveSearch", parameters=az_source) +extra = ExtraBody(dataSources=[az_data]) +req_settings = AzureChatPromptExecutionSettings(extra_body=extra) +prompt_config = sk.PromptTemplateConfig(execution_settings=req_settings) + +# When using data, set use_extensions=True and use the 2023-12-01-preview API version. +chat_service = sk_oai.AzureChatCompletion( + deployment_name=deployment, + api_key=api_key, + endpoint=endpoint, + api_version="2023-12-01-preview", + use_extensions=True, +) +kernel.add_chat_service("chat-gpt", chat_service) + + +prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) + +prompt_template.add_user_message("Hi there, who are you?") +prompt_template.add_assistant_message("I am an AI assistant here to answer your questions.") + +function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) +chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) +context = kernel.create_new_context() + + +async def chat() -> bool: + context_vars = sk.ContextVariables() + + try: + user_input = input("User:> ") + context_vars["user_input"] = user_input + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + + # Non streaming + # answer = await kernel.run(chat_function, input_vars=context_vars) + # print(f"Assistant:> {answer}") + + answer = kernel.run_stream(chat_function, input_vars=context_vars, input_context=context) + print("Assistant:> ", end="") + async for message in answer: + print(message, end="") + print("\n") + # The tool message containing cited sources is available in the context + print(f"Tool:> {context.objects.get('tool_message')}") + return True + + +async def main() -> None: + chatting = True + while chatting: + chatting = await chat() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/azure_cognitive_search_memory.py b/python/samples/kernel-syntax-examples/azure_cognitive_search_memory.py index 7f7e9363bd31..5542efeeb22b 100644 --- a/python/samples/kernel-syntax-examples/azure_cognitive_search_memory.py +++ b/python/samples/kernel-syntax-examples/azure_cognitive_search_memory.py @@ -18,23 +18,15 @@ async def populate_memory(kernel: sk.Kernel) -> None: # Add some documents to the ACS semantic memory - await kernel.memory.save_information_async( - COLLECTION_NAME, id="info1", text="My name is Andrea" - ) - await kernel.memory.save_information_async( - COLLECTION_NAME, id="info2", text="I currently work as a tour guide" - ) - await kernel.memory.save_information_async( - COLLECTION_NAME, id="info3", text="I've been living in Seattle since 2005" - ) - await kernel.memory.save_information_async( + await kernel.memory.save_information(COLLECTION_NAME, id="info1", text="My name is Andrea") + await kernel.memory.save_information(COLLECTION_NAME, id="info2", text="I currently work as a tour guide") + await kernel.memory.save_information(COLLECTION_NAME, id="info3", text="I've been living in Seattle since 2005") + await kernel.memory.save_information( COLLECTION_NAME, id="info4", text="I visited France and Italy five times since 2015", ) - await kernel.memory.save_information_async( - COLLECTION_NAME, id="info5", text="My family is from New York" - ) + await kernel.memory.save_information(COLLECTION_NAME, id="info5", text="My family is from New York") async def search_acs_memory_questions(kernel: sk.Kernel) -> None: @@ -48,7 +40,7 @@ async def search_acs_memory_questions(kernel: sk.Kernel) -> None: for question in questions: print(f"Question: {question}") - result = await kernel.memory.search_async(COLLECTION_NAME, question) + result = await kernel.memory.search(COLLECTION_NAME, question) print(f"Answer: {result[0].text}\n") @@ -94,7 +86,7 @@ async def main() -> None: print("Asking questions... (manually)") await search_acs_memory_questions(kernel) - await connector.close_async() + await connector.close() if __name__ == "__main__": diff --git a/python/samples/kernel-syntax-examples/bing_search_plugin.py b/python/samples/kernel-syntax-examples/bing_search_plugin.py new file mode 100644 index 000000000000..d1d2f4cf3af4 --- /dev/null +++ b/python/samples/kernel-syntax-examples/bing_search_plugin.py @@ -0,0 +1,66 @@ +import os + +from dotenv import load_dotenv + +import semantic_kernel as sk +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.connectors.search_engine import BingConnector +from semantic_kernel.core_plugins import WebSearchEnginePlugin + +load_dotenv() + + +async def main(): + kernel = sk.Kernel() + deployment, key, endpoint, api_version = sk.azure_openai_settings_from_dot_env(include_api_version=True) + kernel.add_chat_service( + "chat-gpt", + AzureChatCompletion( + deployment_name=deployment, + api_key=key, + endpoint=endpoint, + api_version=api_version, + ), + ) + connector = BingConnector(api_key=os.getenv("BING_API_KEY")) + web_plugin = kernel.import_plugin(WebSearchEnginePlugin(connector), "WebSearch") + + prompt = "Who is Leonardo DiCaprio's current girlfriend?" + search = web_plugin["searchAsync"] + result = await search.invoke(prompt) + print(result) + + """ + Output: + ["Celebrity Celebrity News Everything You Need to Know About Leonardo DiCaprio and Camila Morrone's + Relationship From the beginning of their romance to today, we track their relationship here. By..."] + """ + + prompt = """ + Answer the question using only the data that is provided in the data section. + Do not use any prior knowledge to answer the question. + Data: {{WebSearch.SearchAsync "What is semantic kernel?"}} + Question: What is semantic kernel? + Answer: + """ + + qna = kernel.create_semantic_function(prompt, temperature=0.2) + context = kernel.create_new_context() + context["num_results"] = "10" + context["offset"] = "0" + result = await qna.invoke(context=context) + print(result) + + """ + Output: + Semantic Kernel is an open-source SDK that lets you easily combine AI services like OpenAI, + Azure OpenAI, and Hugging Face with conventional programming languages like C# and Python. + By doing so, you can create AI apps that combine the best of both worlds. + Semantic Kernel is at the center of the copilot stack. + """ + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/bing_search_skill.py b/python/samples/kernel-syntax-examples/bing_search_skill.py deleted file mode 100644 index af1107d36e4d..000000000000 --- a/python/samples/kernel-syntax-examples/bing_search_skill.py +++ /dev/null @@ -1,60 +0,0 @@ -import os - -from dotenv import load_dotenv - -import semantic_kernel as sk -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion -from semantic_kernel.connectors.search_engine import BingConnector -from semantic_kernel.core_skills import WebSearchEngineSkill - -load_dotenv() - - -async def main(): - kernel = sk.Kernel() - api_key, org_id = sk.openai_settings_from_dot_env() - kernel.add_chat_service( - "chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) - ) - connector = BingConnector(api_key=os.getenv("BING_API_KEY")) - web_skill = kernel.import_skill(WebSearchEngineSkill(connector), "WebSearch") - - prompt = "Who is Leonardo DiCaprio's current girlfriend?" - search_async = web_skill["searchAsync"] - result = await search_async.invoke_async(prompt) - print(result) - - """ - Output: - ["Celebrity Celebrity News Everything You Need to Know About Leonardo DiCaprio and Camila Morrone's - Relationship From the beginning of their romance to today, we track their relationship here. By..."] - """ - - prompt = """ - Answer the question using only the data that is provided in the data section. - Do not use any prior knowledge to answer the question. - Data: {{WebSearch.SearchAsync "What is semantic kernel?"}} - Question: What is semantic kernel? - Answer: - """ - - qna = kernel.create_semantic_function(prompt, temperature=0.2) - context = kernel.create_new_context() - context["num_results"] = "10" - context["offset"] = "0" - result = await qna.invoke_async(context=context) - print(result) - - """ - Output: - Semantic Kernel is an open-source SDK that lets you easily combine AI services like OpenAI, - Azure OpenAI, and Hugging Face with conventional programming languages like C# and Python. - By doing so, you can create AI apps that combine the best of both worlds. - Semantic Kernel is at the center of the copilot stack. - """ - - -if __name__ == "__main__": - import asyncio - - asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/chat.py b/python/samples/kernel-syntax-examples/chat.py index ee149b00b383..81848315ea50 100644 --- a/python/samples/kernel-syntax-examples/chat.py +++ b/python/samples/kernel-syntax-examples/chat.py @@ -18,17 +18,11 @@ kernel = sk.Kernel() api_key, org_id = sk.openai_settings_from_dot_env() -kernel.add_chat_service( - "chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) -) +kernel.add_chat_service("chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)) -prompt_config = sk.PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, temperature=0.7, top_p=0.4 -) +prompt_config = sk.PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.4) -prompt_template = sk.PromptTemplate( - sk_prompt, kernel.prompt_template_engine, prompt_config -) +prompt_template = sk.PromptTemplate(sk_prompt, kernel.prompt_template_engine, prompt_config) function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) @@ -49,7 +43,7 @@ async def chat(context_vars: sk.ContextVariables) -> bool: print("\n\nExiting chat...") return False - answer = await kernel.run_async(chat_function, input_vars=context_vars) + answer = await kernel.run(chat_function, input_vars=context_vars) context_vars["chat_history"] += f"\nUser:> {user_input}\nChatBot:> {answer}\n" print(f"ChatBot:> {answer}") diff --git a/python/samples/kernel-syntax-examples/chat_gpt_api.py b/python/samples/kernel-syntax-examples/chat_gpt_api.py index 4ac3ec61726a..7e4fa2816786 100644 --- a/python/samples/kernel-syntax-examples/chat_gpt_api.py +++ b/python/samples/kernel-syntax-examples/chat_gpt_api.py @@ -17,23 +17,15 @@ kernel = sk.Kernel() api_key, org_id = sk.openai_settings_from_dot_env() -kernel.add_chat_service( - "chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) -) +kernel.add_chat_service("chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)) -prompt_config = sk.PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, temperature=0.7, top_p=0.8 -) +prompt_config = sk.PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) -prompt_template = sk.ChatPromptTemplate( - "{{$user_input}}", kernel.prompt_template_engine, prompt_config -) +prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) prompt_template.add_system_message(system_message) prompt_template.add_user_message("Hi there, who are you?") -prompt_template.add_assistant_message( - "I am Mosscap, a chat bot. I'm trying to figure out what people need." -) +prompt_template.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) @@ -56,7 +48,7 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - answer = await kernel.run_async(chat_function, input_vars=context_vars) + answer = await kernel.run(chat_function, input_vars=context_vars) print(f"Mosscap:> {answer}") return True diff --git a/python/samples/kernel-syntax-examples/chat_gpt_api_function_calling.py b/python/samples/kernel-syntax-examples/chat_gpt_api_function_calling.py index df0d3d2e9c21..ee3af7cca908 100644 --- a/python/samples/kernel-syntax-examples/chat_gpt_api_function_calling.py +++ b/python/samples/kernel-syntax-examples/chat_gpt_api_function_calling.py @@ -11,9 +11,9 @@ ) from semantic_kernel.connectors.ai.open_ai.utils import ( chat_completion_with_function_call, - get_function_calling_object, + get_tool_call_object, ) -from semantic_kernel.core_skills import MathSkill +from semantic_kernel.core_plugins import MathPlugin system_message = """ You are a chat bot. Your name is Mosscap and @@ -30,56 +30,53 @@ kernel = sk.Kernel() +# Note: the underlying gpt-35/gpt-4 model version needs to be at least version 0613 to support tools. deployment_name, api_key, endpoint = sk.azure_openai_settings_from_dot_env() -api_version = "2023-07-01-preview" +api_version = "2023-12-01-preview" kernel.add_chat_service( "chat-gpt", sk_oai.AzureChatCompletion( deployment_name, endpoint, - api_key, + api_key=api_key, api_version=api_version, ), ) -skills_directory = os.path.join(__file__, "../../../../samples/skills") -# adding skills to the kernel -# the joke skill in the FunSkills is a semantic skill and has the function calling disabled. -kernel.import_semantic_skill_from_directory(skills_directory, "FunSkill") -# the math skill is a core skill and has the function calling enabled. -kernel.import_skill(MathSkill(), skill_name="math") +plugins_directory = os.path.join(__file__, "../../../../samples/plugins") +# adding plugins to the kernel +# the joke plugin in the FunPlugins is a semantic plugin and has the function calling disabled. +kernel.import_semantic_plugin_from_directory(plugins_directory, "FunPlugin") +# the math plugin is a core plugin and has the function calling enabled. +kernel.import_plugin(MathPlugin(), plugin_name="math") # enabling or disabling function calling is done by setting the function_call parameter for the completion. # when the function_call parameter is set to "auto" the model will decide which function to use, if any. # if you only want to use a specific function, set the name of that function in this parameter, -# the format for that is 'SkillName-FunctionName', (i.e. 'math-Add'). +# the format for that is 'PluginName-FunctionName', (i.e. 'math-Add'). # if the model or api version do not support this you will get an error. -prompt_config = sk.PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, - temperature=0.7, - top_p=0.8, - function_call="auto", - chat_system_prompt=system_message, -) -prompt_template = OpenAIChatPromptTemplate( - "{{$user_input}}", kernel.prompt_template_engine, prompt_config +prompt_config = sk.PromptTemplateConfig( + execution_settings=sk_oai.AzureChatPromptExecutionSettings( + service_id="chat-gpt", + ai_model_id=deployment_name, + max_tokens=2000, + temperature=0.7, + top_p=0.8, + tool_choice="auto", + tools=get_tool_call_object(kernel, {"exclude_plugin": ["ChatBot"]}), + ) ) + +prompt_template = OpenAIChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) +prompt_template.add_system_message(system_message) prompt_template.add_user_message("Hi there, who are you?") -prompt_template.add_assistant_message( - "I am Mosscap, a chat bot. I'm trying to figure out what people need." -) +prompt_template.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) -# calling the chat, you could add a overloaded version of the settings here, -# to enable or disable function calling or set the function calling to a specific skill. -# see the openai_function_calling example for how to use this with a unrelated function definition -filter = {"exclude_skill": ["ChatBot"]} -functions = get_function_calling_object(kernel, filter) - -async def chat(context: sk.SKContext) -> Tuple[bool, sk.SKContext]: +async def chat(context: sk.KernelContext) -> Tuple[bool, sk.KernelContext]: try: user_input = input("User:> ") context.variables["user_input"] = user_input @@ -96,10 +93,9 @@ async def chat(context: sk.SKContext) -> Tuple[bool, sk.SKContext]: context = await chat_completion_with_function_call( kernel, - chat_skill_name="ChatBot", + chat_plugin_name="ChatBot", chat_function_name="Chat", context=context, - functions=functions, ) print(f"Mosscap:> {context.result}") return True, context diff --git a/python/samples/kernel-syntax-examples/google_palm_chat.py b/python/samples/kernel-syntax-examples/google_palm_chat.py index 36d2752d0682..cce6661e4ca7 100644 --- a/python/samples/kernel-syntax-examples/google_palm_chat.py +++ b/python/samples/kernel-syntax-examples/google_palm_chat.py @@ -4,24 +4,24 @@ import semantic_kernel as sk import semantic_kernel.connectors.ai.google_palm as sk_gp -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings +from semantic_kernel.connectors.ai.google_palm.gp_prompt_execution_settings import ( + GooglePalmChatPromptExecutionSettings, +) async def chat_request_example(api_key): - palm_chat_completion = sk_gp.GooglePalmChatCompletion( - "models/chat-bison-001", api_key - ) - settings = ChatRequestSettings() + palm_chat_completion = sk_gp.GooglePalmChatCompletion("models/chat-bison-001", api_key) + settings = GooglePalmChatPromptExecutionSettings() settings.temperature = 1 chat_messages = list() user_mssg = "I'm planning a vacation. Which are some must-visit places in Europe?" chat_messages.append(("user", user_mssg)) - answer = await palm_chat_completion.complete_chat_async(chat_messages, settings) + answer = await palm_chat_completion.complete_chat(chat_messages, settings) chat_messages.append(("assistant", str(answer))) user_mssg = "Where should I go in France?" chat_messages.append(("user", user_mssg)) - answer = await palm_chat_completion.complete_chat_async(chat_messages, settings) + answer = await palm_chat_completion.complete_chat(chat_messages, settings) chat_messages.append(("assistant", str(answer))) context_vars = sk.ContextVariables() diff --git a/python/samples/kernel-syntax-examples/google_palm_chat_with_memory.py b/python/samples/kernel-syntax-examples/google_palm_chat_with_memory.py index 13e4c71afa3f..b279da4b7fef 100644 --- a/python/samples/kernel-syntax-examples/google_palm_chat_with_memory.py +++ b/python/samples/kernel-syntax-examples/google_palm_chat_with_memory.py @@ -13,26 +13,16 @@ palm_chat_completion = sk_gp.GooglePalmChatCompletion("models/chat-bison-001", apikey) kernel.add_chat_service("models/chat-bison-001", palm_chat_completion) kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) -kernel.import_skill(sk.core_skills.TextMemorySkill()) +kernel.import_plugin(sk.core_plugins.TextMemoryPlugin(), "TextMemoryPlugin") async def populate_memory(kernel: sk.Kernel) -> None: # Add some documents to the semantic memory - await kernel.memory.save_information_async( - "aboutMe", id="info1", text="My name is Andrea" - ) - await kernel.memory.save_information_async( - "aboutMe", id="info2", text="I currently work as a tour guide" - ) - await kernel.memory.save_information_async( - "aboutMe", id="info3", text="My favorite hobby is hiking" - ) - await kernel.memory.save_information_async( - "aboutMe", id="info4", text="I visitied Iceland last year." - ) - await kernel.memory.save_information_async( - "aboutMe", id="info5", text="My family is from New York" - ) + await kernel.memory.save_information("aboutMe", id="info1", text="My name is Andrea") + await kernel.memory.save_information("aboutMe", id="info2", text="I currently work as a tour guide") + await kernel.memory.save_information("aboutMe", id="info3", text="My favorite hobby is hiking") + await kernel.memory.save_information("aboutMe", id="info4", text="I visitied Iceland last year.") + await kernel.memory.save_information("aboutMe", id="info5", text="My family is from New York") async def search_memory_examples(kernel: sk.Kernel) -> None: @@ -46,13 +36,13 @@ async def search_memory_examples(kernel: sk.Kernel) -> None: for question in questions: print(f"Question: {question}") - result = await kernel.memory.search_async("aboutMe", question) + result = await kernel.memory.search("aboutMe", question) print(f"Answer: {result}\n") async def setup_chat_with_memory( kernel: sk.Kernel, -) -> Tuple[sk.SKFunctionBase, sk.SKContext]: +) -> Tuple[sk.KernelFunction, sk.KernelContext]: """ When using Google PaLM to chat with memories, a chat prompt template is essential; otherwise, the kernel will send text prompts to the Google PaLM @@ -77,17 +67,13 @@ async def setup_chat_with_memory( """.strip() - prompt_config = sk.PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, temperature=0.7, top_p=0.8 - ) + prompt_config = sk.PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) prompt_template = sk.ChatPromptTemplate( # Create the chat prompt template "{{$user_input}}", kernel.prompt_template_engine, prompt_config ) prompt_template.add_system_message(sk_prompt) # Add the memory as a system message function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) - chat_func = kernel.register_semantic_function( - None, "ChatWithMemory", function_config - ) + chat_func = kernel.register_semantic_function(None, "ChatWithMemory", function_config) context = kernel.create_new_context() context["fact1"] = "what is my name?" @@ -96,17 +82,15 @@ async def setup_chat_with_memory( context["fact4"] = "where did I travel last year?" context["fact5"] = "what do I do for work?" - context[sk.core_skills.TextMemorySkill.COLLECTION_PARAM] = "aboutMe" - context[sk.core_skills.TextMemorySkill.RELEVANCE_PARAM] = 0.6 + context[sk.core_plugins.TextMemoryPlugin.COLLECTION_PARAM] = "aboutMe" + context[sk.core_plugins.TextMemoryPlugin.RELEVANCE_PARAM] = 0.6 context["chat_history"] = "" return chat_func, context -async def chat( - kernel: sk.Kernel, chat_func: sk.SKFunctionBase, context: sk.SKContext -) -> bool: +async def chat(kernel: sk.Kernel, chat_func: sk.KernelFunction, context: sk.KernelContext) -> bool: try: user_input = input("User:> ") context["user_input"] = user_input @@ -121,7 +105,7 @@ async def chat( print("\n\nExiting chat...") return False - answer = await kernel.run_async(chat_func, input_vars=context.variables) + answer = await kernel.run(chat_func, input_vars=context.variables) context["chat_history"] += f"\nUser:> {user_input}\nChatBot:> {answer}\n" print(f"ChatBot:> {answer}") diff --git a/python/samples/kernel-syntax-examples/google_palm_chat_with_plugin.py b/python/samples/kernel-syntax-examples/google_palm_chat_with_plugin.py new file mode 100644 index 000000000000..f2725b0263bd --- /dev/null +++ b/python/samples/kernel-syntax-examples/google_palm_chat_with_plugin.py @@ -0,0 +1,71 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.google_palm as sk_gp + +""" +System messages prime the assistant with different personalities or behaviors. +The system message is added to the prompt template, and a chat history can be +added as well to provide further context. +A system message can only be used once at the start of the conversation, and +conversation history persists with the instance of GooglePalmChatCompletion. To +overwrite the system message and start a new conversation, you must create a new +instance of GooglePalmChatCompletion. +Sometimes, PaLM struggles to use the information in the prompt template. In this +case, it is recommended to experiment with the messages in the prompt template +or ask different questions. +""" + +system_message = """ +You are a chat bot. Your name is Blackbeard +and you speak in the style of a swashbuckling +pirate. You reply with brief, to-the-point answers +with no elaboration. Your full name is Captain +Bartholomew "Blackbeard" Thorne. +""" + +kernel = sk.Kernel() +api_key = sk.google_palm_settings_from_dot_env() +palm_chat_completion = sk_gp.GooglePalmChatCompletion("models/chat-bison-001", api_key) +kernel.add_chat_service("models/chat-bison-001", palm_chat_completion) +prompt_config = sk.PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) +prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) +prompt_template.add_system_message(system_message) # Add the system message for context +prompt_template.add_user_message("Hi there, my name is Andrea, who are you?") # Include a chat history +prompt_template.add_assistant_message("I am Blackbeard.") +function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) +chat_function = kernel.register_semantic_function("PiratePlugin", "Chat", function_config) + + +async def chat() -> bool: + context_vars = sk.ContextVariables() + + try: + user_input = input("User:> ") + context_vars["user_input"] = user_input + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + + answer = await kernel.run(chat_function, input_vars=context_vars) + print(f"Blackbeard:> {answer}") + return True + + +async def main() -> None: + chatting = True + while chatting: + chatting = await chat() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/google_palm_chat_with_skill.py b/python/samples/kernel-syntax-examples/google_palm_chat_with_skill.py deleted file mode 100644 index 042495699446..000000000000 --- a/python/samples/kernel-syntax-examples/google_palm_chat_with_skill.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -import semantic_kernel as sk -import semantic_kernel.connectors.ai.google_palm as sk_gp - -""" -System messages prime the assistant with different personalities or behaviors. -The system message is added to the prompt template, and a chat history can be -added as well to provide further context. -A system message can only be used once at the start of the conversation, and -conversation history persists with the instance of GooglePalmChatCompletion. To -overwrite the system message and start a new conversation, you must create a new -instance of GooglePalmChatCompletion. -Sometimes, PaLM struggles to use the information in the prompt template. In this -case, it is recommended to experiment with the messages in the prompt template -or ask different questions. -""" - -system_message = """ -You are a chat bot. Your name is Blackbeard -and you speak in the style of a swashbuckling -pirate. You reply with brief, to-the-point answers -with no elaboration. Your full name is Captain -Bartholomew "Blackbeard" Thorne. -""" - -kernel = sk.Kernel() -api_key = sk.google_palm_settings_from_dot_env() -palm_chat_completion = sk_gp.GooglePalmChatCompletion("models/chat-bison-001", api_key) -kernel.add_chat_service("models/chat-bison-001", palm_chat_completion) -prompt_config = sk.PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, temperature=0.7, top_p=0.8 -) -prompt_template = sk.ChatPromptTemplate( - "{{$user_input}}", kernel.prompt_template_engine, prompt_config -) -prompt_template.add_system_message(system_message) # Add the system message for context -prompt_template.add_user_message( - "Hi there, my name is Andrea, who are you?" -) # Include a chat history -prompt_template.add_assistant_message("I am Blackbeard.") -function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) -chat_function = kernel.register_semantic_function( - "PirateSkill", "Chat", function_config -) - - -async def chat() -> bool: - context_vars = sk.ContextVariables() - - try: - user_input = input("User:> ") - context_vars["user_input"] = user_input - except KeyboardInterrupt: - print("\n\nExiting chat...") - return False - except EOFError: - print("\n\nExiting chat...") - return False - - if user_input == "exit": - print("\n\nExiting chat...") - return False - - answer = await kernel.run_async(chat_function, input_vars=context_vars) - print(f"Blackbeard:> {answer}") - return True - - -async def main() -> None: - chatting = True - while chatting: - chatting = await chat() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/google_palm_text_completion.py b/python/samples/kernel-syntax-examples/google_palm_text_completion.py index 664e1194bcf6..7a74e76faf98 100644 --- a/python/samples/kernel-syntax-examples/google_palm_text_completion.py +++ b/python/samples/kernel-syntax-examples/google_palm_text_completion.py @@ -4,27 +4,25 @@ import semantic_kernel as sk import semantic_kernel.connectors.ai.google_palm as sk_gp -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from semantic_kernel.connectors.ai.google_palm.gp_prompt_execution_settings import ( + GooglePalmPromptExecutionSettings, ) -async def text_completion_example_complete_async(kernel, api_key, user_mssg, settings): +async def text_completion_example_complete(kernel, api_key, user_mssg, settings): """ Complete a text prompt using the Google PaLM model and print the results. """ - palm_text_completion = sk_gp.GooglePalmTextCompletion( - "models/text-bison-001", api_key - ) + palm_text_completion = sk_gp.GooglePalmTextCompletion("models/text-bison-001", api_key) kernel.add_text_completion_service("models/text-bison-001", palm_text_completion) - answer = await palm_text_completion.complete_async(user_mssg, settings) + answer = await palm_text_completion.complete(user_mssg, settings) return answer async def main() -> None: kernel = sk.Kernel() apikey = sk.google_palm_settings_from_dot_env() - settings = CompleteRequestSettings() + settings = GooglePalmPromptExecutionSettings() user_mssg1 = ( "Sam has three boxes, each containing a certain number of coins. " @@ -33,19 +31,13 @@ async def main() -> None: "boxes have 98 coins in total. How many coins are there in each box? " "Think about it step by step, and show your work." ) - response = await text_completion_example_complete_async( - kernel, apikey, user_mssg1, settings - ) + response = await text_completion_example_complete(kernel, apikey, user_mssg1, settings) print(f"User:> {user_mssg1}\n\nChatBot:> {response}\n") # Use temperature to influence the variance of the responses settings.number_of_responses = 3 settings.temperature = 1 - user_mssg2 = ( - "I need a concise answer. A common method for traversing a binary tree is" - ) - response = await text_completion_example_complete_async( - kernel, apikey, user_mssg2, settings - ) + user_mssg2 = "I need a concise answer. A common method for traversing a binary tree is" + response = await text_completion_example_complete(kernel, apikey, user_mssg2, settings) print(f"User:> {user_mssg2}\n\nChatBot:> {response}") return diff --git a/python/samples/kernel-syntax-examples/google_search_plugin.py b/python/samples/kernel-syntax-examples/google_search_plugin.py new file mode 100644 index 000000000000..00557f67e069 --- /dev/null +++ b/python/samples/kernel-syntax-examples/google_search_plugin.py @@ -0,0 +1,88 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os + +from dotenv import load_dotenv + +import semantic_kernel as sk +from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion +from semantic_kernel.connectors.search_engine import GoogleConnector +from semantic_kernel.core_plugins import WebSearchEnginePlugin + +load_dotenv() + + +async def main(): + kernel = sk.Kernel() + api_key, org_id = sk.openai_settings_from_dot_env() + kernel.add_chat_service("chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)) + + """ + Instantiate a Google Connector + Make sure to have the following keys in a .env file or set as environment variables + - GOOGLE_API_KEY + - GOOGLE_SEARCH_ENGINE_ID + + A Google Custom Search API has to be created in order to have an API key and a search engine ID. + To create a Google Custom Search API, follow the guide - https://developers.google.com/custom-search/v1/overview. + If you have already created the service, the credentials can be found in the Credentials tab on the page + https://console.cloud.google.com/apis/api/customsearch.googleapis.com + """ + connector = GoogleConnector( + api_key=os.getenv("GOOGLE_API_KEY"), + search_engine_id=os.getenv("GOOGLE_SEARCH_ENGINE_ID"), + ) + + # Import the WebSearchEnginePlugin and pass the Google Connector to it. + web_plugin = kernel.import_plugin(WebSearchEnginePlugin(connector), "WebSearch") + + # The search query + prompt = "Who is Leonardo DiCaprio's current girlfriend?" + search = web_plugin["searchAsync"] + + # By default, only one search result is provided + result = await search.invoke(prompt) + print(result) + + """ + Output: + ["Celebrity Celebrity News Everything You Need to Know About Leonardo DiCaprio and Camila Morrone's + Relationship From the beginning of their romance to today, we track their relationship here. By..."] + """ + + # Following example demonstrates the use of the plugin within a semantic function + prompt = """ + Answer the question using only the data that is provided in the data section. + Do not use any prior knowledge to answer the question. + Data: {{WebSearch.SearchAsync "What is semantic kernel?"}} + Question: What is semantic kernel? + Answer: + """ + + qna = kernel.create_semantic_function(prompt, temperature=0.2) + context = kernel.create_new_context() + + """ + Two context parameters can be passed to the search engine plugin. + - num_results controls the number of results returned by the web search. + - offset controls the number of results to omit. + """ + context["num_results"] = "10" + context["offset"] = "0" + + result = await qna.invoke(context=context) + print(result) + + """ + Output: + Semantic Kernel is an open-source SDK that lets you easily combine AI services like OpenAI, + Azure OpenAI, and Hugging Face with conventional programming languages like C# and Python. + By doing so, you can create AI apps that combine the best of both worlds. + Semantic Kernel is at the center of the copilot stack. + """ + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/google_search_skill.py b/python/samples/kernel-syntax-examples/google_search_skill.py deleted file mode 100644 index 14ec3542e858..000000000000 --- a/python/samples/kernel-syntax-examples/google_search_skill.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os - -from dotenv import load_dotenv - -import semantic_kernel as sk -from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion -from semantic_kernel.connectors.search_engine import GoogleConnector -from semantic_kernel.core_skills import WebSearchEngineSkill - -load_dotenv() - - -async def main(): - kernel = sk.Kernel() - api_key, org_id = sk.openai_settings_from_dot_env() - kernel.add_chat_service( - "chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) - ) - - """ - Instantiate a Google Connector - Make sure to have the following keys in a .env file or set as environment variables - - GOOGLE_API_KEY - - GOOGLE_SEARCH_ENGINE_ID - - A Google Custom Search API has to be created in order to have an API key and a search engine ID. - To create a Google Custom Search API, follow the guide - https://developers.google.com/custom-search/v1/overview. - If you have already created the service, the credentials can be found in the Credentials tab on the page - https://console.cloud.google.com/apis/api/customsearch.googleapis.com - """ - connector = GoogleConnector( - api_key=os.getenv("GOOGLE_API_KEY"), - search_engine_id=os.getenv("GOOGLE_SEARCH_ENGINE_ID"), - ) - - # Import the WebSearchEngineSkill and pass the Google Connector to it. - web_skill = kernel.import_skill(WebSearchEngineSkill(connector), "WebSearch") - - # The search query - prompt = "Who is Leonardo DiCaprio's current girlfriend?" - search_async = web_skill["searchAsync"] - - # By default, only one search result is provided - result = await search_async.invoke_async(prompt) - print(result) - - """ - Output: - ["Celebrity Celebrity News Everything You Need to Know About Leonardo DiCaprio and Camila Morrone's - Relationship From the beginning of their romance to today, we track their relationship here. By..."] - """ - - # Following example demonstrates the use of the skill within a semantic function - prompt = """ - Answer the question using only the data that is provided in the data section. - Do not use any prior knowledge to answer the question. - Data: {{WebSearch.SearchAsync "What is semantic kernel?"}} - Question: What is semantic kernel? - Answer: - """ - - qna = kernel.create_semantic_function(prompt, temperature=0.2) - context = kernel.create_new_context() - - """ - Two context parameters can be passed to the search engine skill. - - num_results controls the number of results returned by the web search. - - offset controls the number of results to omit. - """ - context["num_results"] = "10" - context["offset"] = "0" - - result = await qna.invoke_async(context=context) - print(result) - - """ - Output: - Semantic Kernel is an open-source SDK that lets you easily combine AI services like OpenAI, - Azure OpenAI, and Hugging Face with conventional programming languages like C# and Python. - By doing so, you can create AI apps that combine the best of both worlds. - Semantic Kernel is at the center of the copilot stack. - """ - - -if __name__ == "__main__": - import asyncio - - asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/grounded.py b/python/samples/kernel-syntax-examples/grounded.py new file mode 100644 index 000000000000..4baec7b2ac34 --- /dev/null +++ b/python/samples/kernel-syntax-examples/grounded.py @@ -0,0 +1,181 @@ +import asyncio +import logging + +import semantic_kernel as sk +from samples.utils import Colors +from semantic_kernel.connectors.ai.open_ai import ( + AzureChatCompletion, + OpenAIChatCompletion, +) + + +def get_grounding_text(): + return """I am by birth a Genevese, and my family is one of the most distinguished of that republic. +My ancestors had been for many years counsellors and syndics, and my father had filled several public situations +with honour and reputation. He was respected by all who knew him for his integrity and indefatigable attention +to public business. He passed his younger days perpetually occupied by the affairs of his country; a variety +of circumstances had prevented his marrying early, nor was it until the decline of life that he became a husband +and the father of a family. + +As the circumstances of his marriage illustrate his character, I cannot refrain from relating them. One of his +most intimate friends was a merchant who, from a flourishing state, fell, through numerous mischances, into poverty. +This man, whose name was Beaufort, was of a proud and unbending disposition and could not bear to live in poverty +and oblivion in the same country where he had formerly been distinguished for his rank and magnificence. Having +paid his debts, therefore, in the most honourable manner, he retreated with his daughter to the town of Lucerne, +where he lived unknown and in wretchedness. My father loved Beaufort with the truest friendship and was deeply +grieved by his retreat in these unfortunate circumstances. He bitterly deplored the false pride which led his friend +to a conduct so little worthy of the affection that united them. He lost no time in endeavouring to seek him out, +with the hope of persuading him to begin the world again through his credit and assistance. + +Beaufort had taken effectual measures to conceal himself, and it was ten months before my father discovered his +abode. Overjoyed at this discovery, he hastened to the house, which was situated in a mean street near the Reuss. +But when he entered, misery and despair alone welcomed him. Beaufort had saved but a very small sum of money from +the wreck of his fortunes, but it was sufficient to provide him with sustenance for some months, and in the meantime +he hoped to procure some respectable employment in a merchant's house. The interval was, consequently, spent in +inaction; his grief only became more deep and rankling when he had leisure for reflection, and at length it took +so fast hold of his mind that at the end of three months he lay on a bed of sickness, incapable of any exertion. + +His daughter attended him with the greatest tenderness, but she saw with despair that their little fund was +rapidly decreasing and that there was no other prospect of support. But Caroline Beaufort possessed a mind of an +uncommon mould, and her courage rose to support her in her adversity. She procured plain work; she plaited straw +and by various means contrived to earn a pittance scarcely sufficient to support life. + +Several months passed in this manner. Her father grew worse; her time was more entirely occupied in attending him; +her means of subsistence decreased; and in the tenth month her father died in her arms, leaving her an orphan and +a beggar. This last blow overcame her, and she knelt by Beaufort's coffin weeping bitterly, when my father entered +the chamber. He came like a protecting spirit to the poor girl, who committed herself to his care; and after the +interment of his friend he conducted her to Geneva and placed her under the protection of a relation. Two years +after this event Caroline became his wife.""" + + +def setup(use_azure: bool = False): + logger = logging.getLogger() + logger.setLevel(logging.DEBUG) + kernel = sk.Kernel(log=logger) + + useAzureOpenAI = use_azure + + # Configure AI service used by the kernel + if useAzureOpenAI: + deployment, api_key, endpoint = sk.azure_openai_settings_from_dot_env() + kernel.add_chat_service( + "chat_completion", + AzureChatCompletion( + deployment_name=deployment, + endpoint=endpoint, + api_key=api_key, + api_version="2023-12-01-preview", + log=logger, + ), + ) + else: + api_key, org_id = sk.openai_settings_from_dot_env() + kernel.add_chat_service( + "chat-gpt", + OpenAIChatCompletion(ai_model_id="gpt-3.5-turbo", api_key=api_key, org_id=org_id), + ) + + # note: using plugins from the samples folder + plugins_directory = "../samples/plugins/" + + grounding_semantic_functions = kernel.import_semantic_plugin_from_directory(plugins_directory, "GroundingPlugin") + + # entity_extraction = grounding_semantic_functions["ExtractEntities"] + # reference_check = grounding_semantic_functions["ReferenceCheckEntities"] + # entity_excision = grounding_semantic_functions["ExciseEntities"] + return kernel, grounding_semantic_functions + + +def get_summary_text(): + summary_text = """My father, a respected resident of Milan, was a close friend of a merchant named Beaufort who, after a series of misfortunes, moved to Zurich in poverty. My father was upset by his friend's troubles and sought him out, finding him in a mean street. Beaufort had saved a small sum of money, but it was not enough to support him and his daughter, Mary. Mary procured work to eek out a living, but after ten months her father died, leaving her a beggar. My father came to her aid and two years later they married when they visited Rome.""" # noqa: E501 + + summary_text = summary_text.replace("\n", " ").replace(" ", " ") + return summary_text + + +async def run_entity_extraction(kernel, semantic_functions, summary_text): + context = kernel.create_new_context() + context["topic"] = "people and places" + context["example_entities"] = "John, Jane, mother, brother, Paris, Rome" + + extraction_result = semantic_functions["ExtractEntities"](summary_text, context=context) + + return extraction_result, context + + +async def run_reference_check(semantic_functions, extraction_result, context): + context["reference_context"] = get_grounding_text() + + grounding_result = semantic_functions["ReferenceCheckEntities"](extraction_result.result, context=context) + context["ungrounded_entities"] = grounding_result.result + return grounding_result, context + + +async def run_entity_excision(semantic_functions, summary_text, context): + excision_result = semantic_functions["ExciseEntities"](summary_text, context=context) + return excision_result, context + + +async def run_grounding(use_azure: bool = False): + kernel, semantic_functions = setup(use_azure) + print(f"\n{Colors.CBOLD}Groundingsness Checking Plugins\n{Colors.CEND}") + print(f"\n{ '-'*80 }\n") + print( + f"""{Colors.CGREEN}A well-known problem with large language models (LLMs) is that they make things up. These are sometimes called 'hallucinations' but a safer (and less anthropomorphic) term is 'ungrounded addition' - something in the text which cannot be firmly established. When attempting to establish whether or not something in an LLM response is 'true' we can either check for it in the supplied prompt (this is called 'narrow grounding') or use our general knowledge ('broad grounding'). Note that narrow grounding can lead to things being classified as 'true, but ungrounded.' For example "I live in Switzerland" is **not** _narrowly_ grounded in "I live in Geneva" even though it must be true (it **is** _broadly_ grounded). # noqa: E501 + +In this sample we run a simple grounding pipeline, to see if a summary text has any ungrounded additions as compared to the original, and use this information to improve the summary text. This can be done in three stages: # noqa: E501 + +1. Make a list of the entities in the summary text +1. Check to see if these entities appear in the original (grounding) text +1. Remove the ungrounded entities from the summary text + +What is an 'entity' in this context? In its simplest form, it's a named object such as a person or place (so 'Dean' or 'Seattle'). However, the idea could be a _claim_ which relates concepts (such as 'Dean lives near Seattle'). In this sample, we will keep to the simpler case of named objects.""" # noqa: E501 + ) + + print(f"\nThe grounding text: \n{Colors.CGREY}{get_grounding_text()}{Colors.CEND}") + + print(f"\n{ '-'*80 }\n") + summary_text = get_summary_text() + print(f"Summary text: \n{Colors.CBLUE}{summary_text}{Colors.CEND}") + print(f"\n{ '-'*80 }\n") + print( + f"""{Colors.CGREEN}Some things to note: + +- The implied residence of Geneva has been changed to Milan +- Lucerne has been changed to Zurich +- Caroline has been renamed as Mary +- A reference to Rome has been added + + +The grounding plugin has three stages: + +1. Extract entities from a summary text +2. Perform a reference check against the grounding text +3. Excise any entities which failed the reference check from the summary + +Now, let us start calling individual semantic functions.{Colors.CEND}""" + ) + print(f"\n{ '-'*80 }\n") + print( + f"{Colors.CGREEN}First we run the extraction function on the summary, this results in all the extracted entities.{Colors.CEND}" # noqa: E501 + ) + extraction_result, context = await run_entity_extraction(kernel, semantic_functions, summary_text) + print(f"Extraction result: \n{Colors.CBLUE}{extraction_result.result}{Colors.CEND}") + print(f"\n{ '-'*80 }\n") + print( + f"{Colors.CGREEN}Next we run the reference check function on the summary, this loads the grounding text as part of it in order to know the 'truth'. This returns a list of ungrounded entities.{Colors.CEND}" # noqa: E501 + ) + grounding_result, context = await run_reference_check(semantic_functions, extraction_result, context) + print(f"Grounding result: \n{Colors.CBLUE}{grounding_result.result}{Colors.CEND}") + print(f"\n{ '-'*80 }\n") + print( + f"{Colors.CGREEN}Finally we run the excision function on the summary, this removes the ungrounded entities from the summary.{Colors.CEND}" # noqa: E501 + ) + excision_result, context = await run_entity_excision(semantic_functions, summary_text, context) + print(f"The final summary text: \n{Colors.CBLUE}{excision_result.result}{Colors.CEND}") + print(f"\n{ '-'*80 }\n") + print(f"{Colors.CBOLD}Finished!{Colors.CEND}") + + +if __name__ == "__main__": + asyncio.run(run_grounding(use_azure=True)) diff --git a/python/samples/kernel-syntax-examples/memory.py b/python/samples/kernel-syntax-examples/memory.py index b4376a846e0e..4050e3bb95a8 100644 --- a/python/samples/kernel-syntax-examples/memory.py +++ b/python/samples/kernel-syntax-examples/memory.py @@ -9,21 +9,11 @@ async def populate_memory(kernel: sk.Kernel) -> None: # Add some documents to the semantic memory - await kernel.memory.save_information_async( - "aboutMe", id="info1", text="My name is Andrea" - ) - await kernel.memory.save_information_async( - "aboutMe", id="info2", text="I currently work as a tour guide" - ) - await kernel.memory.save_information_async( - "aboutMe", id="info3", text="I've been living in Seattle since 2005" - ) - await kernel.memory.save_information_async( - "aboutMe", id="info4", text="I visited France and Italy five times since 2015" - ) - await kernel.memory.save_information_async( - "aboutMe", id="info5", text="My family is from New York" - ) + await kernel.memory.save_information("aboutMe", id="info1", text="My name is Andrea") + await kernel.memory.save_information("aboutMe", id="info2", text="I currently work as a tour guide") + await kernel.memory.save_information("aboutMe", id="info3", text="I've been living in Seattle since 2005") + await kernel.memory.save_information("aboutMe", id="info4", text="I visited France and Italy five times since 2015") + await kernel.memory.save_information("aboutMe", id="info5", text="My family is from New York") async def search_memory_examples(kernel: sk.Kernel) -> None: @@ -37,13 +27,13 @@ async def search_memory_examples(kernel: sk.Kernel) -> None: for question in questions: print(f"Question: {question}") - result = await kernel.memory.search_async("aboutMe", question) + result = await kernel.memory.search("aboutMe", question) print(f"Answer: {result[0].text}\n") async def setup_chat_with_memory( kernel: sk.Kernel, -) -> Tuple[sk.SKFunctionBase, sk.SKContext]: +) -> Tuple[sk.KernelFunction, sk.KernelContext]: sk_prompt = """ ChatBot can have a conversation with you about any topic. It can give explicit instructions or say 'I don't know' if @@ -61,9 +51,7 @@ async def setup_chat_with_memory( User: {{$user_input}} ChatBot: """.strip() - chat_func = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0.8 - ) + chat_func = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0.8) context = kernel.create_new_context() context["fact1"] = "what is my name?" @@ -72,17 +60,15 @@ async def setup_chat_with_memory( context["fact4"] = "where have I traveled?" context["fact5"] = "what do I do for work?" - context[sk.core_skills.TextMemorySkill.COLLECTION_PARAM] = "aboutMe" - context[sk.core_skills.TextMemorySkill.RELEVANCE_PARAM] = 0.8 + context[sk.core_plugins.TextMemoryPlugin.COLLECTION_PARAM] = "aboutMe" + context[sk.core_plugins.TextMemoryPlugin.RELEVANCE_PARAM] = "0.8" context["chat_history"] = "" return chat_func, context -async def chat( - kernel: sk.Kernel, chat_func: sk.SKFunctionBase, context: sk.SKContext -) -> bool: +async def chat(kernel: sk.Kernel, chat_func: sk.KernelFunction, context: sk.KernelContext) -> bool: try: user_input = input("User:> ") context["user_input"] = user_input @@ -97,7 +83,7 @@ async def chat( print("\n\nExiting chat...") return False - answer = await kernel.run_async(chat_func, input_vars=context.variables) + answer = await kernel.run(chat_func, input_vars=context.variables) context["chat_history"] += f"\nUser:> {user_input}\nChatBot:> {answer}\n" print(f"ChatBot:> {answer}") @@ -108,15 +94,13 @@ async def main() -> None: kernel = sk.Kernel() api_key, org_id = sk.openai_settings_from_dot_env() - kernel.add_chat_service( - "chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) - ) + kernel.add_chat_service("chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)) kernel.add_text_embedding_generation_service( "ada", sk_oai.OpenAITextEmbedding("text-embedding-ada-002", api_key, org_id) ) kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) - kernel.import_skill(sk.core_skills.TextMemorySkill()) + kernel.import_plugin(sk.core_plugins.TextMemoryPlugin(), "TextMemoryPlugin") print("Populating memory...") await populate_memory(kernel) diff --git a/python/samples/kernel-syntax-examples/openai_function_calling.py b/python/samples/kernel-syntax-examples/openai_function_calling.py index ecce09438df2..ef8ba18444fd 100644 --- a/python/samples/kernel-syntax-examples/openai_function_calling.py +++ b/python/samples/kernel-syntax-examples/openai_function_calling.py @@ -5,7 +5,10 @@ import semantic_kernel as sk import semantic_kernel.connectors.ai.open_ai as sk_oai -from semantic_kernel.core_skills import MathSkill +from semantic_kernel.connectors.ai.open_ai.models.chat.open_ai_chat_message import ( + OpenAIChatMessage, +) +from semantic_kernel.core_plugins import MathPlugin system_message = """ You are a chat bot. Your name is Mosscap and @@ -22,84 +25,92 @@ kernel = sk.Kernel() -deployment_name, api_key, endpoint = sk.azure_openai_settings_from_dot_env() -api_version = "2023-07-01-preview" +api_key, org_id = sk.openai_settings_from_dot_env() kernel.add_chat_service( - "chat-gpt", - sk_oai.AzureChatCompletion( - deployment_name, - endpoint, - api_key, - api_version=api_version, + "gpt-3.5-turbo", + sk_oai.OpenAIChatCompletion( + ai_model_id="gpt-3.5-turbo-1106", + api_key=api_key, ), ) -skills_directory = os.path.join(__file__, "../../../../samples/skills") -# adding skills to the kernel -# the joke skill in the FunSkills is a semantic skill and has the function calling disabled. -kernel.import_semantic_skill_from_directory(skills_directory, "FunSkill") -# the math skill is a core skill and has the function calling enabled. -kernel.import_skill(MathSkill(), skill_name="math") +plugins_directory = os.path.join(__file__, "../../../../samples/plugins") +# adding plugins to the kernel +# the joke plugin in the FunPlugins is a semantic plugin and has the function calling disabled. +kernel.import_semantic_plugin_from_directory(plugins_directory, "FunPlugin") +# the math plugin is a core plugin and has the function calling enabled. +kernel.import_plugin(MathPlugin(), plugin_name="math") # enabling or disabling function calling is done by setting the function_call parameter for the completion. # when the function_call parameter is set to "auto" the model will decide which function to use, if any. # if you only want to use a specific function, set the name of that function in this parameter, -# the format for that is 'SkillName-FunctionName', (i.e. 'math-Add'). +# the format for that is 'PluginName-FunctionName', (i.e. 'math-Add'). # if the model or api version do not support this you will get an error. -prompt_config = sk.PromptTemplateConfig.from_completion_parameters( +tools = [ + { + "type": "function", + "function": { + "name": "search_hotels", + "description": "Retrieves hotels from the search index based on the parameters provided", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The location of the hotel (i.e. Seattle, WA)", + }, + "max_price": { + "type": "number", + "description": "The maximum price for the hotel", + }, + "features": { + "type": "string", + "description": "A comma separated list of features (i.e. beachfront, free wifi, etc.)", + }, + }, + "required": ["location"], + }, + }, + } +] + +prompt_config = sk.PromptTemplateConfig.from_execution_settings( max_tokens=2000, temperature=0.7, top_p=0.8, - function_call="auto", - chat_system_prompt=system_message, + tool_choice="auto", + tools=tools, ) -prompt_template = sk.ChatPromptTemplate( +prompt_template = sk.ChatPromptTemplate[OpenAIChatMessage]( "{{$user_input}}", kernel.prompt_template_engine, prompt_config ) +prompt_template.add_system_message(system_message) prompt_template.add_user_message("Hi there, who are you?") -prompt_template.add_assistant_message( - "I am Mosscap, a chat bot. I'm trying to figure out what people need." -) +prompt_template.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) # define the functions available -functions = [ - { - "name": "search_hotels", - "description": "Retrieves hotels from the search index based on the parameters provided", - "parameters": { - "type": "object", - "properties": { - "location": { - "type": "string", - "description": "The location of the hotel (i.e. Seattle, WA)", - }, - "max_price": { - "type": "number", - "description": "The maximum price for the hotel", - }, - "features": { - "type": "string", - "description": "A comma separated list of features (i.e. beachfront, free wifi, etc.)", - }, - }, - "required": ["location"], - }, - } -] async def main() -> None: context = kernel.create_new_context() - context.variables[ - "user_input" - ] = "I want to find a hotel in Seattle with free wifi and a pool." + context.variables["user_input"] = "I want to find a hotel in Seattle with free wifi and a pool." + messages = [] + tool_call = None + response = chat_function.invoke_stream_async(context=context) + async for message in response: + current = message[0] + messages.append(current) + if current.tool_calls: + if tool_call is None: + tool_call = current.tool_calls[0] + else: + tool_call += current.tool_calls[0] - context = await chat_function.invoke_async(context=context, functions=functions) - if function_call := context.pop_function_call(): - print(f"Function to be called: {function_call.name}") - print(f"Function parameters: \n{function_call.arguments}") + if tool_call: + print(f"Function to be called: {tool_call.function.name}") + print(f"Function parameters: \n{tool_call.function.parse_arguments()}") return print("No function was called") print(f"Output was: {str(context)}") diff --git a/python/samples/kernel-syntax-examples/openai_logit_bias.py b/python/samples/kernel-syntax-examples/openai_logit_bias.py index 7a2803ba0c98..646475eab5aa 100644 --- a/python/samples/kernel-syntax-examples/openai_logit_bias.py +++ b/python/samples/kernel-syntax-examples/openai_logit_bias.py @@ -1,12 +1,16 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio +from typing import Any, Dict import semantic_kernel as sk import semantic_kernel.connectors.ai.open_ai as sk_oai -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from semantic_kernel.connectors.ai.chat_completion_client_base import ( + ChatCompletionClientBase, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.connectors.ai.text_completion_client_base import ( + TextCompletionClientBase, ) """ @@ -17,22 +21,16 @@ """ -def _config_ban_tokens(settings_type, keys): - settings = ( - ChatRequestSettings() if settings_type == "chat" else CompleteRequestSettings() - ) - +def _config_ban_tokens(settings: PromptExecutionSettings, keys: Dict[Any, Any]): # Map each token in the keys list to a bias value from -100 (a potential ban) to 100 (exclusive selection) for k in keys: # -100 to potentially ban all tokens in the list - settings.token_selection_biases[k] = -100 + settings.logit_bias[k] = -100 return settings async def chat_request_example(kernel, api_key, org_id): - openai_chat_completion = sk_oai.OpenAIChatCompletion( - "gpt-3.5-turbo", api_key, org_id - ) + openai_chat_completion = sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) kernel.add_chat_service("chat_service", openai_chat_completion) # Spaces and capitalization affect the token ids. @@ -67,14 +65,11 @@ async def chat_request_example(kernel, api_key, org_id): ] # Model will try its best to avoid using any of the above words - settings = _config_ban_tokens("chat", keys) + settings = kernel.get_prompt_execution_settings_from_service(ChatCompletionClientBase, "chat_service") + settings = _config_ban_tokens(settings, keys) - prompt_config = sk.PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, temperature=0.7, top_p=0.8 - ) - prompt_template = sk.ChatPromptTemplate( - "{{$user_input}}", kernel.prompt_template_engine, prompt_config - ) + prompt_config = sk.PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) + prompt_template = sk.ChatPromptTemplate("{{$user_input}}", kernel.prompt_template_engine, prompt_config) # Setup chat with prompt prompt_template.add_system_message("You are a basketball expert") @@ -83,15 +78,18 @@ async def chat_request_example(kernel, api_key, org_id): function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) kernel.register_semantic_function("ChatBot", "Chat", function_config) - chat_messages = list() + chat_messages = [] + messages = [{"role": "user", "content": user_mssg}] + chat_messages.append(("user", user_mssg)) - answer = await openai_chat_completion.complete_chat_async(chat_messages, settings) - chat_messages.append(("assistant", str(answer))) + answer = await openai_chat_completion.complete_chat(messages=messages, settings=settings) + chat_messages.append(("assistant", str(answer[0]))) user_mssg = "What are his best all-time stats?" + messages = [{"role": "user", "content": user_mssg}] chat_messages.append(("user", user_mssg)) - answer = await openai_chat_completion.complete_chat_async(chat_messages, settings) - chat_messages.append(("assistant", str(answer))) + answer = await openai_chat_completion.complete_chat(messages=messages, settings=settings) + chat_messages.append(("assistant", str(answer[0]))) context_vars = sk.ContextVariables() context_vars["chat_history"] = "" @@ -108,9 +106,7 @@ async def chat_request_example(kernel, api_key, org_id): async def text_complete_request_example(kernel, api_key, org_id): - openai_text_completion = sk_oai.OpenAITextCompletion( - "text-davinci-002", api_key, org_id - ) + openai_text_completion = sk_oai.OpenAITextCompletion("gpt-3.5-turbo-instruct", api_key, org_id) kernel.add_text_completion_service("text_service", openai_text_completion) # Spaces and capitalization affect the token ids. @@ -154,10 +150,11 @@ async def text_complete_request_example(kernel, api_key, org_id): ] # Model will try its best to avoid using any of the above words - settings = _config_ban_tokens("complete", keys) + settings = kernel.get_prompt_execution_settings_from_service(TextCompletionClientBase, "text_service") + settings = _config_ban_tokens(settings, keys) user_mssg = "The best pie flavor to have in autumn is" - answer = await openai_text_completion.complete_async(user_mssg, settings) + answer = await openai_text_completion.complete(user_mssg, settings) context_vars = sk.ContextVariables() context_vars["chat_history"] = f"User:> {user_mssg}\nChatBot:> {answer}\n" diff --git a/python/samples/kernel-syntax-examples/openapi_example/README.md b/python/samples/kernel-syntax-examples/openapi_example/README.md index 1304a61bc4c0..4688b77be5f7 100644 --- a/python/samples/kernel-syntax-examples/openapi_example/README.md +++ b/python/samples/kernel-syntax-examples/openapi_example/README.md @@ -1,4 +1,4 @@ -### Running the OpenApi syntax example +### Running the OpenAPI syntax example 1. In a terminal, navigate to `semantic_kernel/python/samples/kernel-syntax-examples/openapi_example`. @@ -6,4 +6,4 @@ 3. Start the server by running `python openapi_server.py`. -4. In another terminal, do steps 1 & 2. Then, run `python openapi_client.py`, which will register a skill representing the API defined in openapi.yaml +4. In another terminal, do steps 1 & 2. Then, run `python openapi_client.py`, which will register a plugin representing the API defined in openapi.yaml diff --git a/python/samples/kernel-syntax-examples/openapi_example/openapi_client.py b/python/samples/kernel-syntax-examples/openapi_example/openapi_client.py index f8e51051f03e..806169b4ffe0 100644 --- a/python/samples/kernel-syntax-examples/openapi_example/openapi_client.py +++ b/python/samples/kernel-syntax-examples/openapi_example/openapi_client.py @@ -1,13 +1,13 @@ import asyncio import semantic_kernel as sk -from semantic_kernel.connectors.openapi import register_openapi_skill +from semantic_kernel.connectors.openapi import register_openapi_plugin if __name__ == "__main__": """Client""" kernel = sk.Kernel() - openapi_skill = register_openapi_skill(kernel, "openApiSkill", "openapi.yaml") + openapi_plugin = register_openapi_plugin(kernel, "openApiPlugin", "openapi.yaml") context_variables = sk.ContextVariables( variables={ @@ -19,6 +19,6 @@ ) result = asyncio.run( # Call the function defined in openapi.yaml - openapi_skill["helloWorld"].invoke_async(variables=context_variables) + openapi_plugin["helloWorld"].invoke(variables=context_variables) ) print(result) diff --git a/python/samples/kernel-syntax-examples/plugins_from_dir.py b/python/samples/kernel-syntax-examples/plugins_from_dir.py new file mode 100644 index 000000000000..08378fa44c9d --- /dev/null +++ b/python/samples/kernel-syntax-examples/plugins_from_dir.py @@ -0,0 +1,34 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.open_ai as sk_oai + +kernel = sk.Kernel() + +useAzureOpenAI = False +model = "gpt-35-turbo-instruct" if useAzureOpenAI else "gpt-3.5-turbo-instruct" +service_id = model + +# Configure AI service used by the kernel +if useAzureOpenAI: + deployment_name, api_key, endpoint = sk.azure_openai_settings_from_dot_env() + kernel.add_text_completion_service( + service_id, + sk_oai.AzureTextCompletion(deployment_name=model, api_key=api_key, endpoint=endpoint), + ) +else: + api_key, org_id = sk.openai_settings_from_dot_env() + kernel.add_text_completion_service( + service_id, + sk_oai.OpenAITextCompletion(ai_model_id=model, api_key=api_key, org_id=org_id), + ) + +# note: using plugins from the samples folder +plugins_directory = os.path.join(__file__, "../../../../samples/plugins") +plugin = kernel.import_semantic_plugin_from_directory(plugins_directory, "FunPlugin") + +result = asyncio.run(kernel.run(plugin["Joke"], input_str="time travel to dinosaur age")) +print(result) diff --git a/python/samples/kernel-syntax-examples/self-critique_rag.py b/python/samples/kernel-syntax-examples/self-critique_rag.py new file mode 100644 index 000000000000..05b12dd3f524 --- /dev/null +++ b/python/samples/kernel-syntax-examples/self-critique_rag.py @@ -0,0 +1,134 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from dotenv import dotenv_values + +import semantic_kernel as sk +from semantic_kernel.connectors.ai.open_ai import ( + AzureTextCompletion, + AzureTextEmbedding, +) +from semantic_kernel.connectors.memory.azure_cognitive_search import ( + AzureCognitiveSearchMemoryStore, +) +from semantic_kernel.core_plugins.text_memory_plugin import TextMemoryPlugin +from semantic_kernel.orchestration.context_variables import ContextVariables + +COLLECTION_NAME = "generic" + + +async def populate_memory(kernel: sk.Kernel) -> None: + # Add some documents to the ACS semantic memory + await kernel.memory.save_information(COLLECTION_NAME, id="info1", text="My name is Andrea") + await kernel.memory.save_information(COLLECTION_NAME, id="info2", text="I currently work as a tour guide") + await kernel.memory.save_information(COLLECTION_NAME, id="info3", text="I've been living in Seattle since 2005") + await kernel.memory.save_information( + COLLECTION_NAME, + id="info4", + text="I visited France and Italy five times since 2015", + ) + await kernel.memory.save_information(COLLECTION_NAME, id="info5", text="My family is from New York") + + +async def main() -> None: + kernel = sk.Kernel() + tms = TextMemoryPlugin() + kernel.import_plugin(tms, "memory") + + config = dotenv_values(".env") + + AZURE_COGNITIVE_SEARCH_ENDPOINT = config["AZURE_AISEARCH_URL"] + AZURE_COGNITIVE_SEARCH_ADMIN_KEY = config["AZURE_AISEARCH_API_KEY"] + AZURE_OPENAI_API_KEY = config["AZURE_OPENAI_API_KEY"] + AZURE_OPENAI_ENDPOINT = config["AZURE_OPENAI_ENDPOINT"] + vector_size = 1536 + + # Setting up OpenAI services for text completion and text embedding + kernel.add_text_completion_service( + "dv", + AzureTextCompletion( + # Note: text-davinci-003 is deprecated and will be replaced by + # AzureOpenAI's gpt-35-turbo-instruct model. + deployment_name="gpt-35-turbo-instruct", + endpoint=AZURE_OPENAI_ENDPOINT, + api_key=AZURE_OPENAI_API_KEY, + ), + ) + kernel.add_text_embedding_generation_service( + "ada", + AzureTextEmbedding( + deployment_name="text-embedding-ada-002", + endpoint=AZURE_OPENAI_ENDPOINT, + api_key=AZURE_OPENAI_API_KEY, + ), + ) + + connector = AzureCognitiveSearchMemoryStore( + vector_size, AZURE_COGNITIVE_SEARCH_ENDPOINT, AZURE_COGNITIVE_SEARCH_ADMIN_KEY + ) + + # Register the memory store with the kernel + kernel.register_memory_store(memory_store=connector) + + print("Populating memory...") + # await populate_memory(kernel) + + sk_prompt_rag = """ +Assistant can have a conversation with you about any topic. +It can give explicit instructions or say 'I don't know' if +it does not have an answer. + +Here is some background information about the user that you should use to answer the question below: +{{ memory.recall $user_input }} +User: {{$user_input}} +Assistant: """.strip() + sk_prompt_rag_sc = """ +You will get a question, background information to be used with that question and a answer that was given. +You have to answer Grounded or Ungrounded or Unclear. +Grounded if the answer is based on the background information and clearly answers the question. +Ungrounded if the answer could be true but is not based on the background information. +Unclear if the answer does not answer the question at all. +Question: {{$user_input}} +Background: {{ memory.recall $user_input }} +Answer: {{ $input }} +Remember, just answer Grounded or Ungrounded or Unclear: """.strip() + + user_input = "Do I live in New York City?" + print(f"Question: {user_input}") + chat_func = kernel.create_semantic_function(sk_prompt_rag, max_tokens=1000, temperature=0.5) + self_critique_func = kernel.create_semantic_function(sk_prompt_rag_sc, max_tokens=4, temperature=0.0) + + answer = await kernel.run( + chat_func, + input_vars=ContextVariables( + variables={ + "user_input": user_input, + "collection": COLLECTION_NAME, + "limit": "2", + } + ), + ) + print(f"Answer: {str(answer).strip()}") + check = await kernel.run(self_critique_func, input_context=answer) + print(f"The answer was {str(check).strip()}") + + print("-" * 50) + print(" Let's pretend the answer was wrong...") + answer.variables.variables["input"] = "Yes, you live in New York City." + print(f"Answer: {str(answer).strip()}") + check = await kernel.run(self_critique_func, input_context=answer) + print(f"The answer was {str(check).strip()}") + + print("-" * 50) + print(" Let's pretend the answer is not related...") + answer.variables.variables["input"] = "Yes, the earth is not flat." + print(f"Answer: {str(answer).strip()}") + check = await kernel.run(self_critique_func, input_context=answer) + print(f"The answer was {str(check).strip()}") + + await connector.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/sequential_planner.py b/python/samples/kernel-syntax-examples/sequential_planner.py index 82c3e32184f7..8440ddf89861 100644 --- a/python/samples/kernel-syntax-examples/sequential_planner.py +++ b/python/samples/kernel-syntax-examples/sequential_planner.py @@ -2,7 +2,12 @@ import semantic_kernel as sk from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion -from semantic_kernel.core_skills import FileIOSkill, MathSkill, TextSkill, TimeSkill +from semantic_kernel.core_plugins import ( + FileIOPlugin, + MathPlugin, + TextPlugin, + TimePlugin, +) from semantic_kernel.planning import SequentialPlanner @@ -10,13 +15,11 @@ async def main(): kernel = sk.Kernel() api_key, org_id = sk.openai_settings_from_dot_env() - kernel.add_chat_service( - "gpt-3.5", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) - ) - kernel.import_skill(MathSkill(), "math") - kernel.import_skill(FileIOSkill(), "fileIO") - kernel.import_skill(TimeSkill(), "time") - kernel.import_skill(TextSkill(), "text") + kernel.add_chat_service("gpt-3.5", OpenAIChatCompletion("gpt-3.5-turbo", api_key=api_key, org_id=org_id)) + kernel.import_plugin(MathPlugin(), "math") + kernel.import_plugin(FileIOPlugin(), "fileIO") + kernel.import_plugin(TimePlugin(), "time") + kernel.import_plugin(TextPlugin(), "text") # create an instance of sequential planner. planner = SequentialPlanner(kernel) @@ -25,10 +28,10 @@ async def main(): ask = "What day of the week is today, all uppercase?" # ask the sequential planner to identify a suitable function from the list of functions available. - plan = await planner.create_plan_async(goal=ask) + plan = await planner.create_plan(goal=ask) # ask the sequential planner to execute the identified function. - result = await plan.invoke_async() + result = await plan.invoke() for step in plan._steps: print(step.description, ":", step._state.__dict__) diff --git a/python/samples/kernel-syntax-examples/setup_logging.py b/python/samples/kernel-syntax-examples/setup_logging.py new file mode 100644 index 000000000000..2da0a5f397ce --- /dev/null +++ b/python/samples/kernel-syntax-examples/setup_logging.py @@ -0,0 +1,31 @@ +# Copyright (c) Microsoft. All rights reserved. +import logging + +import semantic_kernel as sk +from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion +from semantic_kernel.utils.logging import setup_logging + + +async def main(): + setup_logging() + + # Set the logging level for semantic_kernel.kernel to DEBUG. + logging.getLogger("kernel").setLevel(logging.DEBUG) + + kernel = sk.Kernel() + + api_key, org_id = sk.openai_settings_from_dot_env() + + kernel.add_chat_service("chat-gpt", OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id)) + + plugin = kernel.import_semantic_plugin_from_directory("../../samples/plugins", "FunPlugin") + + joke_function = plugin["Joke"] + + print(joke_function("time travel to dinosaur age")) + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/python/samples/kernel-syntax-examples/skills_from_dir.py b/python/samples/kernel-syntax-examples/skills_from_dir.py deleted file mode 100644 index 5d463af69d78..000000000000 --- a/python/samples/kernel-syntax-examples/skills_from_dir.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import os - -import semantic_kernel as sk -import semantic_kernel.connectors.ai.open_ai as sk_oai - -kernel = sk.Kernel() - -useAzureOpenAI = False -model = "text-davinci-002" -service_id = model - -# Configure AI service used by the kernel -if useAzureOpenAI: - api_key, endpoint = sk.azure_openai_settings_from_dot_env() - kernel.add_text_completion_service( - service_id, sk_oai.AzureTextCompletion(model, api_key, endpoint) - ) -else: - api_key, org_id = sk.openai_settings_from_dot_env() - kernel.add_text_completion_service( - service_id, sk_oai.OpenAITextCompletion(model, api_key, org_id) - ) - -# note: using skills from the samples folder -skills_directory = os.path.join(__file__, "../../../../samples/skills") -skill = kernel.import_semantic_skill_from_directory(skills_directory, "FunSkill") - -result = asyncio.run( - kernel.run_async(skill["Joke"], input_str="time travel to dinosaur age") -) -print(result) diff --git a/python/samples/utils.py b/python/samples/utils.py new file mode 100644 index 000000000000..582b82001b14 --- /dev/null +++ b/python/samples/utils.py @@ -0,0 +1,47 @@ +from enum import Enum + + +class Colors(str, Enum): + CEND = "\33[0m" + CBOLD = "\33[1m" + CITALIC = "\33[3m" + CURL = "\33[4m" + CBLINK = "\33[5m" + CBLINK2 = "\33[6m" + CSELECTED = "\33[7m" + + CBLACK = "\33[30m" + CRED = "\33[31m" + CGREEN = "\33[32m" + CYELLOW = "\33[33m" + CBLUE = "\33[34m" + CVIOLET = "\33[35m" + CBEIGE = "\33[36m" + CWHITE = "\33[37m" + + CBLACKBG = "\33[40m" + CREDBG = "\33[41m" + CGREENBG = "\33[42m" + CYELLOWBG = "\33[43m" + CBLUEBG = "\33[44m" + CVIOLETBG = "\33[45m" + CBEIGEBG = "\33[46m" + CWHITEBG = "\33[47m" + + CGREY = "\33[90m" + CRED2 = "\33[91m" + CGREEN2 = "\33[92m" + CYELLOW2 = "\33[93m" + CBLUE2 = "\33[94m" + CVIOLET2 = "\33[95m" + CBEIGE2 = "\33[96m" + CWHITE2 = "\33[97m" + + CGREYBG = "\33[100m" + CREDBG2 = "\33[101m" + CGREENBG2 = "\33[102m" + CYELLOWBG2 = "\33[103m" + CBLUEBG2 = "\33[104m" + CVIOLETBG2 = "\33[105m" + CBEIGEBG2 = "\33[106m" + CWHITEBG2 = "\33[107m" diff --git a/python/semantic_kernel/__init__.py b/python/semantic_kernel/__init__.py index 24526dbe03d6..d750c9fef82d 100644 --- a/python/semantic_kernel/__init__.py +++ b/python/semantic_kernel/__init__.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel import core_skills, memory +from semantic_kernel import core_plugins, memory from semantic_kernel.kernel import Kernel from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate from semantic_kernel.semantic_functions.prompt_template import PromptTemplate from semantic_kernel.semantic_functions.prompt_template_config import ( @@ -13,8 +13,13 @@ from semantic_kernel.semantic_functions.semantic_function_config import ( SemanticFunctionConfig, ) +from semantic_kernel.utils.logging import setup_logging from semantic_kernel.utils.null_logger import NullLogger from semantic_kernel.utils.settings import ( + astradb_settings_from_dot_env, + azure_aisearch_settings_from_dot_env, + azure_aisearch_settings_from_dot_env_as_dict, + azure_cosmos_db_settings_from_dot_env, azure_openai_settings_from_dot_env, bing_search_settings_from_dot_env, google_palm_settings_from_dot_env, @@ -28,10 +33,14 @@ __all__ = [ "Kernel", "NullLogger", + "azure_cosmos_db_settings_from_dot_env", "openai_settings_from_dot_env", "azure_openai_settings_from_dot_env", + "azure_aisearch_settings_from_dot_env", + "azure_aisearch_settings_from_dot_env_as_dict", "postgres_settings_from_dot_env", "pinecone_settings_from_dot_env", + "astradb_settings_from_dot_env", "bing_search_settings_from_dot_env", "mongodb_atlas_settings_from_dot_env", "google_palm_settings_from_dot_env", @@ -41,8 +50,9 @@ "ChatPromptTemplate", "SemanticFunctionConfig", "ContextVariables", - "SKFunctionBase", - "SKContext", + "KernelFunction", + "KernelContext", "memory", - "core_skills", + "core_plugins", + "setup_logging", ] diff --git a/python/semantic_kernel/connectors/ai/__init__.py b/python/semantic_kernel/connectors/ai/__init__.py index 85fa6ce9724e..2ac8f8c67be7 100644 --- a/python/semantic_kernel/connectors/ai/__init__.py +++ b/python/semantic_kernel/connectors/ai/__init__.py @@ -3,13 +3,10 @@ from semantic_kernel.connectors.ai.chat_completion_client_base import ( ChatCompletionClientBase, ) -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, -) from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( EmbeddingGeneratorBase, ) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.connectors.ai.text_completion_client_base import ( TextCompletionClientBase, ) @@ -18,6 +15,5 @@ "ChatCompletionClientBase", "TextCompletionClientBase", "EmbeddingGeneratorBase", - "ChatRequestSettings", - "CompleteRequestSettings", + "PromptExecutionSettings", ] diff --git a/python/semantic_kernel/connectors/ai/ai_exception.py b/python/semantic_kernel/connectors/ai/ai_exception.py index 255e4a87ccfc..8b8647338985 100644 --- a/python/semantic_kernel/connectors/ai/ai_exception.py +++ b/python/semantic_kernel/connectors/ai/ai_exception.py @@ -28,6 +28,8 @@ class ErrorCodes(Enum): InvalidConfiguration = 8 # The function is not supported. FunctionTypeNotSupported = 9 + # The LLM raised an error due to improper content. + BadContentError = 10 # The error code. _error_code: ErrorCodes diff --git a/python/semantic_kernel/connectors/ai/ai_service_client_base.py b/python/semantic_kernel/connectors/ai/ai_service_client_base.py new file mode 100644 index 000000000000..a2c8d0c00b20 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/ai_service_client_base.py @@ -0,0 +1,37 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from abc import ABC + +if sys.version_info >= (3, 9): + from typing import Annotated +else: + from typing_extensions import Annotated + +from pydantic import StringConstraints + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class AIServiceClientBase(KernelBaseModel, ABC): + """Base class for all AI Services. + + Has a ai_model_id, any other fields have to be defined by the subclasses. + + The ai_model_id can refer to a specific model, like 'gpt-35-turbo' for OpenAI, + or can just be a string that is used to identify the service. + """ + + ai_model_id: Annotated[str, StringConstraints(strip_whitespace=True, min_length=1)] + + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Get the request settings class.""" + return PromptExecutionSettings # pragma: no cover + + def instantiate_prompt_execution_settings(self, **kwargs) -> "PromptExecutionSettings": + """Create a request settings object. + + All arguments are passed to the constructor of the request settings object. + """ + return self.get_prompt_execution_settings_class()(**kwargs) diff --git a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py index 118c84c8ac7f..504f5c473c03 100644 --- a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py +++ b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py @@ -1,30 +1,30 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC, abstractmethod -from logging import Logger -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Any, AsyncIterable, List, Optional if TYPE_CHECKING: - from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings + from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.models.chat.chat_message import ChatMessage + from semantic_kernel.models.contents import ChatMessageContent, StreamingChatMessageContent class ChatCompletionClientBase(ABC): @abstractmethod - async def complete_chat_async( + async def complete_chat( self, messages: List["ChatMessage"], - settings: "ChatRequestSettings", - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: + settings: "PromptExecutionSettings", + logger: Optional[Any] = None, + ) -> List["ChatMessageContent"]: """ This is the method that is called from the kernel to get a response from a chat-optimized LLM. Arguments: messages {List[ChatMessage]} -- A list of chat messages, that can be rendered into a set of messages, from system, user, assistant and function. - settings {ChatRequestSettings} -- Settings for the request. - logger {Logger} -- A logger to use for logging. + settings {PromptExecutionSettings} -- Settings for the request. + logger {Logger} -- A logger to use for logging. (Deprecated) Returns: Union[str, List[str]] -- A string or list of strings representing the response(s) from the LLM. @@ -32,20 +32,20 @@ async def complete_chat_async( pass @abstractmethod - async def complete_chat_stream_async( + async def complete_chat_stream( self, messages: List["ChatMessage"], - settings: "ChatRequestSettings", - logger: Optional[Logger] = None, - ): + settings: "PromptExecutionSettings", + logger: Optional[Any] = None, + ) -> AsyncIterable[List["StreamingChatMessageContent"]]: """ This is the method that is called from the kernel to get a stream response from a chat-optimized LLM. Arguments: messages {List[ChatMessage]} -- A list of chat messages, that can be rendered into a set of messages, from system, user, assistant and function. - settings {ChatRequestSettings} -- Settings for the request. - logger {Logger} -- A logger to use for logging. + settings {PromptExecutionSettings} -- Settings for the request. + logger {Logger} -- A logger to use for logging. (Deprecated) Yields: A stream representing the response(s) from the LLM. diff --git a/python/semantic_kernel/connectors/ai/chat_request_settings.py b/python/semantic_kernel/connectors/ai/chat_request_settings.py deleted file mode 100644 index 5718f28fbe30..000000000000 --- a/python/semantic_kernel/connectors/ai/chat_request_settings.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Dict, List, Optional - -if TYPE_CHECKING: - from semantic_kernel.semantic_functions.prompt_template_config import ( - PromptTemplateConfig, - ) - - -@dataclass -class ChatRequestSettings: - temperature: float = 0.0 - top_p: float = 1.0 - presence_penalty: float = 0.0 - frequency_penalty: float = 0.0 - number_of_responses: int = 1 - max_tokens: int = 256 - token_selection_biases: Dict[int, int] = field(default_factory=dict) - stop_sequences: List[str] = field(default_factory=list) - function_call: Optional[str] = None - - def update_from_completion_config( - self, completion_config: "PromptTemplateConfig.CompletionConfig" - ): - self.temperature = completion_config.temperature - self.top_p = completion_config.top_p - self.number_of_responses = completion_config.number_of_responses - self.stop_sequences = completion_config.stop_sequences - self.max_tokens = completion_config.max_tokens - self.presence_penalty = completion_config.presence_penalty - self.frequency_penalty = completion_config.frequency_penalty - self.token_selection_biases = completion_config.token_selection_biases - self.function_call = ( - completion_config.function_call - if hasattr(completion_config, "function_call") - else None - ) - - @staticmethod - def from_completion_config( - completion_config: "PromptTemplateConfig.CompletionConfig", - ) -> "ChatRequestSettings": - settings = ChatRequestSettings() - settings.update_from_completion_config(completion_config) - return settings diff --git a/python/semantic_kernel/connectors/ai/complete_request_settings.py b/python/semantic_kernel/connectors/ai/complete_request_settings.py deleted file mode 100644 index 4669499c2010..000000000000 --- a/python/semantic_kernel/connectors/ai/complete_request_settings.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Dict, List - -if TYPE_CHECKING: - from semantic_kernel.semantic_functions.prompt_template_config import ( - PromptTemplateConfig, - ) - - -@dataclass -class CompleteRequestSettings: - temperature: float = 0.0 - top_p: float = 1.0 - presence_penalty: float = 0.0 - frequency_penalty: float = 0.0 - max_tokens: int = 256 - stop_sequences: List[str] = field(default_factory=list) - number_of_responses: int = 1 - logprobs: int = 0 - token_selection_biases: Dict[int, int] = field(default_factory=dict) - chat_system_prompt: str = "Assistant is a large language model." - - def update_from_completion_config( - self, completion_config: "PromptTemplateConfig.CompletionConfig" - ): - self.temperature = completion_config.temperature - self.top_p = completion_config.top_p - self.presence_penalty = completion_config.presence_penalty - self.frequency_penalty = completion_config.frequency_penalty - self.max_tokens = completion_config.max_tokens - self.stop_sequences = completion_config.stop_sequences - self.number_of_responses = completion_config.number_of_responses - self.token_selection_biases = completion_config.token_selection_biases - - if completion_config.chat_system_prompt: - self.chat_system_prompt = completion_config.chat_system_prompt - - @staticmethod - def from_completion_config( - completion_config: "PromptTemplateConfig.CompletionConfig", - ) -> "CompleteRequestSettings": - settings = CompleteRequestSettings() - settings.update_from_completion_config(completion_config) - return settings diff --git a/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py b/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py index ef84709bdeeb..dfffcb29506d 100644 --- a/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py +++ b/python/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py @@ -1,12 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC, abstractmethod -from typing import List +from typing import TYPE_CHECKING, List -from numpy import ndarray +if TYPE_CHECKING: + from numpy import ndarray class EmbeddingGeneratorBase(ABC): @abstractmethod - async def generate_embeddings_async(self, texts: List[str]) -> ndarray: + async def generate_embeddings(self, texts: List[str]) -> "ndarray": pass diff --git a/python/semantic_kernel/connectors/ai/google_palm/__init__.py b/python/semantic_kernel/connectors/ai/google_palm/__init__.py index 249ff10e1e34..2b661bc933fa 100644 --- a/python/semantic_kernel/connectors/ai/google_palm/__init__.py +++ b/python/semantic_kernel/connectors/ai/google_palm/__init__.py @@ -1,5 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.ai.google_palm.gp_prompt_execution_settings import ( + GooglePalmChatPromptExecutionSettings, + GooglePalmTextPromptExecutionSettings, +) from semantic_kernel.connectors.ai.google_palm.services.gp_chat_completion import ( GooglePalmChatCompletion, ) @@ -14,4 +18,6 @@ "GooglePalmTextCompletion", "GooglePalmChatCompletion", "GooglePalmTextEmbedding", + "GooglePalmChatPromptExecutionSettings", + "GooglePalmTextPromptExecutionSettings", ] diff --git a/python/semantic_kernel/connectors/ai/google_palm/gp_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/google_palm/gp_prompt_execution_settings.py new file mode 100644 index 000000000000..02dc5bc3b7c6 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/google_palm/gp_prompt_execution_settings.py @@ -0,0 +1,41 @@ +from typing import Any, Dict, Iterable, List, Optional, Union + +from pydantic import Field, model_validator + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + +# TODO: replace back with google types once pydantic issue is fixed. +MessagesOptions = List[Dict[str, Any]] + +MessagePromptOption = Union[str, Dict[str, Any]] +MessagePromptOptions = Union[MessagePromptOption, List[MessagePromptOption]] + +ExampleOptions = Union[Dict[str, Any], List[Dict[str, Any]]] + + +class GooglePalmPromptExecutionSettings(PromptExecutionSettings): + ai_model_id: Optional[str] = Field(None, serialization_alias="model") + temperature: float = Field(0.0, ge=0.0, le=1.0) + top_p: float = 1.0 + top_k: int = 1 + candidate_count: int = Field(1, ge=1, le=8) + safety_settings: Optional[Dict[str, Any]] = None + prompt: Optional[MessagePromptOptions] = None + + +class GooglePalmTextPromptExecutionSettings(GooglePalmPromptExecutionSettings): + max_output_tokens: int = Field(256, gt=0) + stop_sequences: Optional[Union[str, Iterable[str]]] = None + + +class GooglePalmChatPromptExecutionSettings(GooglePalmPromptExecutionSettings): + messages: Optional[MessagesOptions] = None + examples: Optional[ExampleOptions] = None + context: Optional[str] = None + token_selection_biases: Optional[Dict[int, int]] = None + + @model_validator(mode="after") + def validate_input(self): + if self.prompt is not None: + if self.messages or self.context or self.examples: + raise ValueError("Prompt cannot be used with messages, context or examples") diff --git a/python/semantic_kernel/connectors/ai/google_palm/services/gp_chat_completion.py b/python/semantic_kernel/connectors/ai/google_palm/services/gp_chat_completion.py index 6286687fdbe8..e5f701289f43 100644 --- a/python/semantic_kernel/connectors/ai/google_palm/services/gp_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/google_palm/services/gp_chat_completion.py @@ -1,145 +1,188 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import List, Optional, Tuple, Union +import logging +import sys +from typing import Any, Dict, List, Optional, Tuple + +from semantic_kernel.models.contents.chat_message_content import ChatMessageContent +from semantic_kernel.models.contents.text_content import TextContent + +if sys.version_info >= (3, 9): + from typing import Annotated +else: + from typing_extensions import Annotated import google.generativeai as palm -from google.generativeai.types import ChatResponse, ExampleOptions, MessagePromptOptions +from google.generativeai.types import ChatResponse, MessageDict +from pydantic import PrivateAttr, StringConstraints from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase from semantic_kernel.connectors.ai.chat_completion_client_base import ( ChatCompletionClientBase, ) -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from semantic_kernel.connectors.ai.google_palm.gp_prompt_execution_settings import ( + GooglePalmChatPromptExecutionSettings, + GooglePalmPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.connectors.ai.text_completion_client_base import ( TextCompletionClientBase, ) +logger: logging.Logger = logging.getLogger(__name__) + -class GooglePalmChatCompletion(ChatCompletionClientBase, TextCompletionClientBase): - _model_id: str - _api_key: str - _message_history: ChatResponse +class GooglePalmChatCompletion(ChatCompletionClientBase, TextCompletionClientBase, AIServiceClientBase): + api_key: Annotated[str, StringConstraints(strip_whitespace=True, min_length=1)] + _message_history: Optional[ChatResponse] = PrivateAttr() def __init__( self, - model_id: str, + ai_model_id: str, api_key: str, - ) -> None: + message_history: Optional[ChatResponse] = None, + log: Optional[Any] = None, + ): """ Initializes a new instance of the GooglePalmChatCompletion class. Arguments: - model_id {str} -- GooglePalm model name, see - https://developers.generativeai.google/models/language + ai_model_id {str} -- GooglePalm model name, see + https://developers.generativeai.google/models/language api_key {str} -- GooglePalm API key, see - https://developers.generativeai.google/products/palm + https://developers.generativeai.google/products/palm + message_history {Optional[ChatResponse]} -- The message history to use for context. (Optional) + log {Optional[Any]} -- A logger to use for logging. (Optional) """ - if not api_key: - raise ValueError("The Google PaLM API key cannot be `None` or empty`") - - self._model_id = model_id - self._api_key = api_key - self._message_history = None + super().__init__( + ai_model_id=ai_model_id, + api_key=api_key, + ) + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + self._message_history = message_history - async def complete_chat_async( + async def complete_chat( self, - messages: List[Tuple[str, str]], - request_settings: ChatRequestSettings, - context: Optional[str] = None, - examples: Optional[ExampleOptions] = None, - prompt: Optional[MessagePromptOptions] = None, - ) -> Union[str, List[str]]: - response = await self._send_chat_request( - messages, request_settings, context, examples, prompt - ) + messages: List[Dict[str, str]], + settings: GooglePalmPromptExecutionSettings, + ) -> List[ChatMessageContent]: + """ + This is the method that is called from the kernel to get a response from a chat-optimized LLM. - if request_settings.number_of_responses > 1: - return [ - candidate["output"] - if candidate["output"] is not None - else "I don't know." - for candidate in response.candidates - ] - else: - if response.last is None: - return "I don't know." # PaLM returns None if it doesn't know - else: - return response.last + Arguments: + messages {List[ChatMessage]} -- A list of chat messages, that can be rendered into a + set of messages, from system, user, assistant and function. + settings {GooglePalmPromptExecutionSettings} -- Settings for the request. + + Returns: + List[ChatMessageContent] -- A list of ChatMessageContent objects representing the response(s) from the LLM. + """ + settings.messages = [{"author": message["role"], "content": message["content"]} for message in messages] + if not settings.ai_model_id: + settings.ai_model_id = self.ai_model_id + response = await self._send_chat_request(settings) + return [ + self._create_chat_message_content(response, candidate, index) + for index, candidate in enumerate(response.candidates) + ] + + def _create_chat_message_content( + self, response: ChatResponse, candidate: MessageDict, index: int + ) -> ChatMessageContent: + """Create a chat message content object from a response. + + Arguments: + response {ChatResponse} -- The response to create the content from. + + Returns: + ChatMessageContent -- The created chat message content. + """ + metadata = {"citation_metadata": candidate.get("citation_metadata"), "filters": response.filters} + return ChatMessageContent( + choice_index=index, + inner_content=response, + ai_model_id=self.ai_model_id, + metadata=metadata, + role=candidate.get("author"), + content=candidate.get("content"), + ) - async def complete_chat_stream_async( + async def complete_chat_stream( self, messages: List[Tuple[str, str]], - request_settings: ChatRequestSettings, - context: Optional[str] = None, + settings: GooglePalmPromptExecutionSettings, ): - raise NotImplementedError( - "Google Palm API does not currently support streaming" - ) + raise NotImplementedError("Google Palm API does not currently support streaming") - async def complete_async( + async def complete( self, prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: - prompt_to_message = [("user", prompt)] - chat_settings = ChatRequestSettings( - temperature=request_settings.temperature, - top_p=request_settings.top_p, - presence_penalty=request_settings.presence_penalty, - frequency_penalty=request_settings.frequency_penalty, - max_tokens=request_settings.max_tokens, - number_of_responses=request_settings.number_of_responses, - token_selection_biases=request_settings.token_selection_biases, + settings: GooglePalmPromptExecutionSettings, + **kwargs, + ) -> List[TextContent]: + """ + This is the method that is called from the kernel to get a response from a text-optimized LLM. + + Arguments: + prompt {str} -- The prompt to send to the LLM. + settings {GooglePalmPromptExecutionSettings} -- Settings for the request. + + Returns: + List[TextContent] -- A list of TextContent objects representing the response(s) from the LLM. + """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + settings.messages = [{"author": "user", "content": prompt}] + if not settings.ai_model_id: + settings.ai_model_id = self.ai_model_id + response = await self._send_chat_request(settings) + + return [self._create_text_content(response, candidate) for candidate in response.candidates] + + def _create_text_content(self, response: ChatResponse, candidate: MessageDict) -> TextContent: + """Create a text content object from a response. + + Arguments: + response {ChatResponse} -- The response to create the content from. + + Returns: + TextContent -- The created text content. + """ + metadata = {"citation_metadata": candidate.get("citation_metadata"), "filters": response.filters} + return TextContent( + inner_content=response, + ai_model_id=self.ai_model_id, + metadata=metadata, + text=candidate.get("content"), ) - response = await self._send_chat_request(prompt_to_message, chat_settings) - - if chat_settings.number_of_responses > 1: - return [ - candidate["output"] - if candidate["output"] is not None - else "I don't know." - for candidate in response.candidates - ] - else: - if response.last is None: - return "I don't know." # PaLM returns None if it doesn't know - else: - return response.last - async def complete_stream_async( + async def complete_stream( self, prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, + settings: GooglePalmPromptExecutionSettings, + **kwargs, ): - raise NotImplementedError( - "Google Palm API does not currently support streaming" - ) + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + raise NotImplementedError("Google Palm API does not currently support streaming") async def _send_chat_request( self, - messages: List[Tuple[str, str]], - request_settings: ChatRequestSettings, - context: Optional[str] = None, - examples: Optional[ExampleOptions] = None, - prompt: Optional[MessagePromptOptions] = None, + settings: GooglePalmPromptExecutionSettings, ): """ Completes the given user message. If len(messages) > 1, and a conversation has not been initiated yet, it is assumed that chat history is needed for context. All messages preceding the last message will be utilized for context. This also enables Google PaLM to utilize memory - and skills, which should be stored in the messages parameter as system + and plugins, which should be stored in the messages parameter as system messages. Arguments: messages {str} -- The message (from a user) to respond to. - request_settings {ChatRequestSettings} -- The request settings. + settings {GooglePalmPromptExecutionSettings} -- The request settings. context {str} -- Text that should be provided to the model first, to ground the response. If a system message is provided, it will be used as context. @@ -159,60 +202,27 @@ async def _send_chat_request( Returns: str -- The completed text. """ - if request_settings is None: + if settings is None: raise ValueError("The request settings cannot be `None`") - if request_settings.max_tokens < 1: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "The max tokens must be greater than 0, " - f"but was {request_settings.max_tokens}", - ) - - if len(messages) <= 0: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "To complete a chat you need at least one message", - ) - - if messages[-1][0] != "user": + if settings.messages[-1]["author"] != "user": raise AIException( AIException.ErrorCodes.InvalidRequest, "The last message must be from the user", ) try: - palm.configure(api_key=self._api_key) + palm.configure(api_key=self.api_key) except Exception as ex: raise PermissionError( "Google PaLM service failed to configure. Invalid API key provided.", ex, ) - if ( - self._message_history is None and context is None - ): # If the conversation hasn't started yet and no context is provided - context = "" - if len(messages) > 1: # Check if we need context from messages - for index, (role, message) in enumerate(messages): - if index < len(messages) - 1: - if role == "system": - context += message + "\n" - else: - context += role + ": " + message + "\n" try: if self._message_history is None: - response = palm.chat( # Start a new conversation - model=self._model_id, - context=context, - examples=examples, - temperature=request_settings.temperature, - candidate_count=request_settings.number_of_responses, - top_p=request_settings.top_p, - prompt=prompt, - messages=messages[-1][1], - ) + response = palm.chat(**settings.prepare_settings_dict()) # Start a new conversation else: response = self._message_history.reply( # Continue the conversation - messages[-1][1], + settings.messages[-1]["content"], ) self._message_history = response # Store response object for future use except Exception as ex: @@ -222,3 +232,7 @@ async def _send_chat_request( ex, ) return response + + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Create a request settings object.""" + return GooglePalmChatPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_completion.py b/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_completion.py index 09ca400a28ab..45317c9825a3 100644 --- a/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_completion.py +++ b/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_completion.py @@ -1,112 +1,106 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import List, Optional, Union +import logging +import sys +from typing import Any, List, Optional +from semantic_kernel.models.contents.text_content import TextContent + +if sys.version_info >= (3, 9): + from typing import Annotated +else: + from typing_extensions import Annotated import google.generativeai as palm +from google.generativeai.types import Completion +from google.generativeai.types.text_types import TextCompletion +from pydantic import StringConstraints from semantic_kernel.connectors.ai.ai_exception import AIException -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase +from semantic_kernel.connectors.ai.google_palm.gp_prompt_execution_settings import ( + GooglePalmTextPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.connectors.ai.text_completion_client_base import ( TextCompletionClientBase, ) +logger: logging.Logger = logging.getLogger(__name__) + -class GooglePalmTextCompletion(TextCompletionClientBase): - _model_id: str - _api_key: str +class GooglePalmTextCompletion(TextCompletionClientBase, AIServiceClientBase): + api_key: Annotated[str, StringConstraints(strip_whitespace=True, min_length=1)] - def __init__(self, model_id: str, api_key: str) -> None: + def __init__(self, ai_model_id: str, api_key: str, log: Optional[Any] = None): """ Initializes a new instance of the GooglePalmTextCompletion class. Arguments: - model_id {str} -- GooglePalm model name, see - https://developers.generativeai.google/models/language + ai_model_id {str} -- GooglePalm model name, see + https://developers.generativeai.google/models/language api_key {str} -- GooglePalm API key, see - https://developers.generativeai.google/products/palm + https://developers.generativeai.google/products/palm + log {Optional[Any]} -- The logger instance to use. (Optional) (Deprecated) """ - if not api_key: - raise ValueError("The Google PaLM API key cannot be `None` or empty`") - - self._model_id = model_id - self._api_key = api_key - - async def complete_async( - self, - prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: - response = await self._send_completion_request(prompt, request_settings) - - if request_settings.number_of_responses > 1: - return [candidate["output"] for candidate in response.candidates] - else: - return response.result - - async def complete_stream_async( - self, - prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ): - raise NotImplementedError( - "Google Palm API does not currently support streaming" - ) + super().__init__(ai_model_id=ai_model_id, api_key=api_key) + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") - async def _send_completion_request( - self, prompt: str, request_settings: CompleteRequestSettings - ): + async def complete( + self, prompt: str, settings: GooglePalmTextPromptExecutionSettings, **kwargs + ) -> List[TextContent]: """ - Completes the given prompt. Returns a single string completion. - Cannot return multiple completions. Cannot return logprobs. + This is the method that is called from the kernel to get a response from a text-optimized LLM. Arguments: - prompt {str} -- The prompt to complete. - request_settings {CompleteRequestSettings} -- The request settings. + prompt {str} -- The prompt to send to the LLM. + settings {GooglePalmTextPromptExecutionSettings} -- Settings for the request. Returns: - str -- The completed text. + List[TextContent] -- A list of TextContent objects representing the response(s) from the LLM. """ - if not prompt: - raise ValueError("Prompt cannot be `None` or empty") - if request_settings is None: - raise ValueError("Request settings cannot be `None`") - if request_settings.max_tokens < 1: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "The max tokens must be greater than 0, " - f"but was {request_settings.max_tokens}", - ) + settings.prompt = prompt + if not settings.ai_model_id: + settings.ai_model_id = self.ai_model_id try: - palm.configure(api_key=self._api_key) + palm.configure(api_key=self.api_key) except Exception as ex: raise PermissionError( "Google PaLM service failed to configure. Invalid API key provided.", ex, ) try: - response = palm.generate_text( - model=self._model_id, - prompt=prompt, - temperature=request_settings.temperature, - max_output_tokens=request_settings.max_tokens, - stop_sequences=( - request_settings.stop_sequences - if request_settings.stop_sequences is not None - and len(request_settings.stop_sequences) > 0 - else None - ), - candidate_count=request_settings.number_of_responses, - top_p=request_settings.top_p, - ) + response = palm.generate_text(**settings.prepare_settings_dict()) except Exception as ex: raise AIException( AIException.ErrorCodes.ServiceError, "Google PaLM service failed to complete the prompt", ex, ) - return response + return [self._create_text_content(response, candidate) for candidate in response.candidates] + + def _create_text_content(self, response: Completion, candidate: TextCompletion) -> TextContent: + """Create a text content object from a candidate.""" + return TextContent( + inner_content=response, + ai_model_id=self.ai_model_id, + text=candidate.get("output"), + metadata={ + "filters": response.filters, + "safety_feedback": response.safety_feedback, + "citation_metadata": candidate.get("citation_metadata"), + "safety_ratings": candidate.get("safety_ratings"), + }, + ) + + async def complete_stream( + self, + prompt: str, + settings: GooglePalmTextPromptExecutionSettings, + logger: Optional[Any] = None, + ): + raise NotImplementedError("Google Palm API does not currently support streaming") + + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Create a request settings object.""" + return GooglePalmTextPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_embedding.py b/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_embedding.py index aa1f19a7cf52..483eaa1509af 100644 --- a/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/google_palm/services/gp_text_embedding.py @@ -1,38 +1,40 @@ # Copyright (c) Microsoft. All rights reserved. +import sys from typing import List +if sys.version_info >= (3, 9): + from typing import Annotated +else: + from typing_extensions import Annotated import google.generativeai as palm from numpy import array, ndarray +from pydantic import StringConstraints from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( EmbeddingGeneratorBase, ) -class GooglePalmTextEmbedding(EmbeddingGeneratorBase): - _model_id: str - _api_key: str +class GooglePalmTextEmbedding(EmbeddingGeneratorBase, AIServiceClientBase): + api_key: Annotated[str, StringConstraints(strip_whitespace=True, min_length=1)] - def __init__(self, model_id: str, api_key: str) -> None: + def __init__(self, ai_model_id: str, api_key: str) -> None: """ Initializes a new instance of the GooglePalmTextEmbedding class. Arguments: - model_id {str} -- GooglePalm model name, see + ai_model_id {str} -- GooglePalm model name, see https://developers.generativeai.google/models/language api_key {str} -- GooglePalm API key, see https://developers.generativeai.google/products/palm """ - if not api_key: - raise ValueError("The Google PaLM API key cannot be `None` or empty`") + super().__init__(ai_model_id=ai_model_id, api_key=api_key) - self._model_id = model_id - self._api_key = api_key - - async def generate_embeddings_async(self, texts: List[str]) -> ndarray: + async def generate_embeddings(self, texts: List[str]) -> ndarray: """ Generates embeddings for a list of texts. @@ -43,7 +45,7 @@ async def generate_embeddings_async(self, texts: List[str]) -> ndarray: ndarray -- Embeddings for the texts. """ try: - palm.configure(api_key=self._api_key) + palm.configure(api_key=self.api_key) except Exception as ex: raise PermissionError( "Google PaLM service failed to configure. Invalid API key provided.", @@ -53,7 +55,7 @@ async def generate_embeddings_async(self, texts: List[str]) -> ndarray: for text in texts: try: response = palm.generate_embeddings( - model=self._model_id, + model=self.ai_model_id, text=text, ) embeddings.append(array(response["embedding"])) diff --git a/python/semantic_kernel/connectors/ai/hugging_face/__init__.py b/python/semantic_kernel/connectors/ai/hugging_face/__init__.py index fe1cab0b6bbc..529763f35c59 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/__init__.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/__init__.py @@ -1,5 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.ai.hugging_face.hf_prompt_execution_settings import ( + HuggingFacePromptExecutionSettings, +) from semantic_kernel.connectors.ai.hugging_face.services.hf_text_completion import ( HuggingFaceTextCompletion, ) @@ -7,4 +10,8 @@ HuggingFaceTextEmbedding, ) -__all__ = ["HuggingFaceTextCompletion", "HuggingFaceTextEmbedding"] +__all__ = [ + "HuggingFaceTextCompletion", + "HuggingFaceTextEmbedding", + "HuggingFacePromptExecutionSettings", +] diff --git a/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py new file mode 100644 index 000000000000..d4e9c1067ecd --- /dev/null +++ b/python/semantic_kernel/connectors/ai/hugging_face/hf_prompt_execution_settings.py @@ -0,0 +1,35 @@ +from typing import Any, Dict + +from transformers import GenerationConfig + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + + +class HuggingFacePromptExecutionSettings(PromptExecutionSettings): + do_sample: bool = True + max_new_tokens: int = 256 + num_return_sequences: int = 1 + stop_sequences: Any = None + pad_token_id: int = 50256 + temperature: float = 0.0 + top_p: float = 1.0 + + def get_generation_config(self) -> GenerationConfig: + return GenerationConfig( + **self.model_dump( + include={"max_new_tokens", "pad_token_id", "temperature", "top_p"}, + exclude_unset=True, + exclude_none=True, + by_alias=True, + ) + ) + + def prepare_settings_dict(self, **kwargs) -> Dict[str, Any]: + gen_config = self.get_generation_config() + settings = { + "generation_config": gen_config, + "num_return_sequences": self.num_return_sequences, + "do_sample": self.do_sample, + } + settings.update(kwargs) + return settings diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/__init__.py b/python/semantic_kernel/connectors/ai/hugging_face/services/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py index 8ce42cfb9565..9e14ad65b64a 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py @@ -1,39 +1,47 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger +import logging from threading import Thread -from typing import Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, AsyncIterable, Dict, List, Literal, Optional + +import torch +from transformers import AutoTokenizer, TextIteratorStreamer, pipeline from semantic_kernel.connectors.ai.ai_exception import AIException -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase +from semantic_kernel.connectors.ai.hugging_face.hf_prompt_execution_settings import ( + HuggingFacePromptExecutionSettings, ) from semantic_kernel.connectors.ai.text_completion_client_base import ( TextCompletionClientBase, ) -from semantic_kernel.utils.null_logger import NullLogger +from semantic_kernel.models.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.models.contents.text_content import TextContent + +if TYPE_CHECKING: + from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +logger: logging.Logger = logging.getLogger(__name__) -class HuggingFaceTextCompletion(TextCompletionClientBase): - _model_id: str - _task: str - _device: int - _log: Logger + +class HuggingFaceTextCompletion(TextCompletionClientBase, AIServiceClientBase): + task: Literal["summarization", "text-generation", "text2text-generation"] + device: str + generator: Any def __init__( self, - model_id: str, - device: Optional[int] = None, - task: Optional[str] = None, - log: Optional[Logger] = None, - model_kwargs: Dict[str, Any] = None, - pipeline_kwargs: Dict[str, Any] = {}, + ai_model_id: str, + task: Optional[str] = "text2text-generation", + device: Optional[int] = -1, + model_kwargs: Optional[Dict[str, Any]] = None, + pipeline_kwargs: Optional[Dict[str, Any]] = None, ) -> None: """ Initializes a new instance of the HuggingFaceTextCompletion class. Arguments: - model_id {str} -- Hugging Face model card string, see + ai_model_id {str} -- Hugging Face model card string, see https://huggingface.co/models device {Optional[int]} -- Device to run the model on, defaults to CPU, 0+ for GPU, -- None if using device_map instead. (If both device and device_map @@ -44,7 +52,7 @@ def __init__( - text-generation: takes incomplete text and returns a set of completion candidates. - text2text-generation (default): takes an input prompt and returns a completion. text2text-generation is the default as it behaves more like GPT-3+. - log {Optional[Logger]} -- Logger instance. + log -- Logger instance. (Deprecated) model_kwargs {Optional[Dict[str, Any]]} -- Additional dictionary of keyword arguments passed along to the model's `from_pretrained(..., **model_kwargs)` function. pipeline_kwargs {Optional[Dict[str, Any]]} -- Additional keyword arguments passed along @@ -53,139 +61,98 @@ def __init__( Note that this model will be downloaded from the Hugging Face model hub. """ - self._model_id = model_id - self._task = "text2text-generation" if task is None else task - self._log = log if log is not None else NullLogger() - self._model_kwargs = model_kwargs - self._pipeline_kwargs = pipeline_kwargs - - try: - import torch - import transformers - except (ImportError, ModuleNotFoundError): - raise ImportError( - "Please ensure that torch and transformers are installed to use HuggingFaceTextCompletion" - ) - - device_map = self._pipeline_kwargs.get("device_map", None) - if device is None: - self.device = "cpu" if device_map is None else None - else: - self.device = ( - "cuda:" + str(device) - if device >= 0 and torch.cuda.is_available() - else "cpu" - ) - - self.generator = transformers.pipeline( - task=self._task, - model=self._model_id, - device=self.device, - model_kwargs=self._model_kwargs, - **self._pipeline_kwargs + generator = pipeline( + task=task, + model=ai_model_id, + device=device, + model_kwargs=model_kwargs, + **pipeline_kwargs or {}, + ) + super().__init__( + ai_model_id=ai_model_id, + task=task, + device=(f"cuda:{device}" if device >= 0 and torch.cuda.is_available() else "cpu"), + generator=generator, ) - async def complete_async( + async def complete( self, prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: - try: - import transformers - - generation_config = transformers.GenerationConfig( - temperature=request_settings.temperature, - top_p=request_settings.top_p, - max_new_tokens=request_settings.max_tokens, - pad_token_id=50256, # EOS token - ) - - results = self.generator( - prompt, - do_sample=True, - num_return_sequences=request_settings.number_of_responses, - generation_config=generation_config, - ) + settings: HuggingFacePromptExecutionSettings, + **kwargs, + ) -> List[TextContent]: + """ + This is the method that is called from the kernel to get a response from a text-optimized LLM. - completions = list() - if self._task == "text-generation" or self._task == "text2text-generation": - for response in results: - completions.append(response["generated_text"]) - if len(completions) == 1: - return completions[0] - else: - return completions - - elif self._task == "summarization": - for response in results: - completions.append(response["summary_text"]) - if len(completions) == 1: - return completions[0] - else: - return completions - - else: - raise AIException( - AIException.ErrorCodes.InvalidConfiguration, - "Unsupported hugging face pipeline task: only \ - text-generation, text2text-generation, and summarization are supported.", - ) + Arguments: + prompt {str} -- The prompt to send to the LLM. + settings {HuggingFacePromptExecutionSettings} -- Settings for the request. + Returns: + List[TextContent] -- A list of TextContent objects representing the response(s) from the LLM. + """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + try: + results = self.generator(prompt, **settings.prepare_settings_dict()) except Exception as e: raise AIException("Hugging Face completion failed", e) + if isinstance(results, list): + return [self._create_text_content(results, result) for result in results] + return [self._create_text_content(results, results)] + + def _create_text_content(self, response: Any, candidate: Dict[str, str]) -> TextContent: + return TextContent( + inner_content=response, + ai_model_id=self.ai_model_id, + text=candidate["summary_text" if self.task == "summarization" else "generated_text"], + ) - async def complete_stream_async( + async def complete_stream( self, prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ): + settings: HuggingFacePromptExecutionSettings, + **kwargs, + ) -> AsyncIterable[List[StreamingTextContent]]: """ Streams a text completion using a Hugging Face model. Note that this method does not support multiple responses. Arguments: prompt {str} -- Prompt to complete. - request_settings {CompleteRequestSettings} -- Request settings. + settings {HuggingFacePromptExecutionSettings} -- Request settings. Yields: - str -- Completion result. + List[StreamingTextContent] -- List of StreamingTextContent objects. """ - if request_settings.number_of_responses > 1: + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + if settings.num_return_sequences > 1: raise AIException( AIException.ErrorCodes.InvalidConfiguration, "HuggingFace TextIteratorStreamer does not stream multiple responses in a parseable format. \ - If you need multiple responses, please use the complete_async method.", + If you need multiple responses, please use the complete method.", ) try: - import transformers - - generation_config = transformers.GenerationConfig( - temperature=request_settings.temperature, - top_p=request_settings.top_p, - max_new_tokens=request_settings.max_tokens, - pad_token_id=50256, # EOS token - ) - - tokenizer = transformers.AutoTokenizer.from_pretrained(self._model_id) - streamer = transformers.TextIteratorStreamer(tokenizer) - args = {prompt} - kwargs = { - "num_return_sequences": request_settings.number_of_responses, - "generation_config": generation_config, - "streamer": streamer, - "do_sample": True, - } - + streamer = TextIteratorStreamer(AutoTokenizer.from_pretrained(self.ai_model_id)) # See https://github.com/huggingface/transformers/blob/main/src/transformers/generation/streamers.py#L159 - thread = Thread(target=self.generator, args=args, kwargs=kwargs) + thread = Thread( + target=self.generator, args={prompt}, kwargs=settings.prepare_settings_dict(streamer=streamer) + ) thread.start() for new_text in streamer: - yield new_text + yield [ + StreamingTextContent( + choice_index=0, inner_content=new_text, text=new_text, ai_model_id=self.ai_model_id + ) + ] thread.join() except Exception as e: raise AIException("Hugging Face completion failed", e) + + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Create a request settings object.""" + return HuggingFacePromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py index 97fae9cace4c..b7419c0ac2a0 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py @@ -1,60 +1,52 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import List, Optional +import logging +from typing import Any, List, Optional +import sentence_transformers +import torch from numpy import array, ndarray from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( EmbeddingGeneratorBase, ) -from semantic_kernel.utils.null_logger import NullLogger +logger: logging.Logger = logging.getLogger(__name__) -class HuggingFaceTextEmbedding(EmbeddingGeneratorBase): - _model_id: str - _device: int - _log: Logger + +class HuggingFaceTextEmbedding(EmbeddingGeneratorBase, AIServiceClientBase): + device: str + generator: Any def __init__( self, - model_id: str, + ai_model_id: str, device: Optional[int] = -1, - log: Optional[Logger] = None, + log: Optional[Any] = None, ) -> None: """ Initializes a new instance of the HuggingFaceTextEmbedding class. Arguments: - model_id {str} -- Hugging Face model card string, see + ai_model_id {str} -- Hugging Face model card string, see https://huggingface.co/sentence-transformers device {Optional[int]} -- Device to run the model on, -1 for CPU, 0+ for GPU. - log {Optional[Logger]} -- Logger instance. + log -- The logger instance to use. (Optional) (Deprecated) Note that this model will be downloaded from the Hugging Face model hub. """ - self._model_id = model_id - self._log = log if log is not None else NullLogger() - - try: - import sentence_transformers - import torch - except ImportError: - raise ImportError( - "Please ensure that torch and sentence-transformers are installed to use HuggingFaceTextEmbedding" - ) - - self.device = ( - "cuda:" + str(device) - if device >= 0 and torch.cuda.is_available() - else "cpu" - ) - self.generator = sentence_transformers.SentenceTransformer( - model_name_or_path=self._model_id, device=self.device + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + resolved_device = f"cuda:{device}" if device >= 0 and torch.cuda.is_available() else "cpu" + super().__init__( + ai_model_id=ai_model_id, + device=resolved_device, + generator=sentence_transformers.SentenceTransformer(model_name_or_path=ai_model_id, device=resolved_device), ) - async def generate_embeddings_async(self, texts: List[str]) -> ndarray: + async def generate_embeddings(self, texts: List[str]) -> ndarray: """ Generates embeddings for a list of texts. @@ -65,7 +57,7 @@ async def generate_embeddings_async(self, texts: List[str]) -> ndarray: ndarray -- Embeddings for the texts. """ try: - self._log.info(f"Generating embeddings for {len(texts)} texts") + logger.info(f"Generating embeddings for {len(texts)} texts") embeddings = self.generator.encode(texts) return array(embeddings) except Exception as e: diff --git a/python/semantic_kernel/connectors/ai/ollama/__init__.py b/python/semantic_kernel/connectors/ai/ollama/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/python/semantic_kernel/connectors/ai/ollama/ollama_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/ollama/ollama_prompt_execution_settings.py new file mode 100644 index 000000000000..bad8d43d08e1 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/ollama/ollama_prompt_execution_settings.py @@ -0,0 +1,24 @@ +from typing import Any, Dict, List, Literal, Optional + +from pydantic import Field + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + + +class OllamaPromptExecutionSettings(PromptExecutionSettings): + ai_model_id: str = Field("", alias="model") + format: Optional[Literal["json"]] = None + options: Optional[Dict[str, Any]] = None + stream: bool = False + + +class OllamaTextPromptExecutionSettings(OllamaPromptExecutionSettings): + prompt: Optional[str] = None + context: Optional[str] = None + system: Optional[str] = None + template: Optional[str] = None + raw: bool = False + + +class OllamaChatPromptExecutionSettings(OllamaPromptExecutionSettings): + messages: Optional[List[Dict[str, str]]] = None diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py new file mode 100644 index 000000000000..8e54734892d0 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py @@ -0,0 +1,185 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +import logging +from typing import AsyncIterable, Dict, List, Optional + +import aiohttp +from pydantic import HttpUrl + +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase +from semantic_kernel.connectors.ai.chat_completion_client_base import ( + ChatCompletionClientBase, +) +from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import ( + OllamaChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.ollama.utils import AsyncSession +from semantic_kernel.connectors.ai.text_completion_client_base import ( + TextCompletionClientBase, +) +from semantic_kernel.models.contents.chat_message_content import ChatMessageContent +from semantic_kernel.models.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.models.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.models.contents.text_content import TextContent + +logger: logging.Logger = logging.getLogger(__name__) + + +class OllamaChatCompletion(TextCompletionClientBase, ChatCompletionClientBase, AIServiceClientBase): + """ + Initializes a new instance of the OllamaChatCompletion class. + + Make sure to have the ollama service running either locally or remotely. + + Arguments: + ai_model_id {str} -- Ollama model name, see https://ollama.ai/library + url {Optional[Union[str, HttpUrl]]} -- URL of the Ollama server, defaults to http://localhost:11434/api/chat + session {Optional[aiohttp.ClientSession]} -- Optional client session to use for requests. + """ + + url: HttpUrl = "http://localhost:11434/api/chat" + session: Optional[aiohttp.ClientSession] = None + + async def complete_chat( + self, + messages: List[Dict[str, str]], + settings: OllamaChatPromptExecutionSettings, + **kwargs, + ) -> List[ChatMessageContent]: + """ + This is the method that is called from the kernel to get a response from a chat-optimized LLM. + + Arguments: + messages {List[ChatMessage]} -- A list of chat messages, that can be rendered into a + set of messages, from system, user, assistant and function. + settings {PromptExecutionSettings} -- Settings for the request. + logger {Logger} -- A logger to use for logging. (Deprecated) + + Returns: + List[ChatMessageContent] -- A list of ChatMessageContent objects representing the response(s) from the LLM. + """ + settings.messages = messages + settings.stream = False + async with AsyncSession(self.session) as session: + async with session.post(str(self.url), json=settings.prepare_settings_dict()) as response: + response.raise_for_status() + response_object = await response.json() + return [ + ChatMessageContent( + inner_content=response_object, + ai_model_id=self.ai_model_id, + role="assistant", + content=response_object.get("message", {"content": None}).get("content", None), + ) + ] + + async def complete_chat_stream( + self, + messages: List[Dict[str, str]], + settings: OllamaChatPromptExecutionSettings, + **kwargs, + ) -> AsyncIterable[List[StreamingChatMessageContent]]: + """ + Streams a text completion using a Ollama model. + Note that this method does not support multiple responses. + + Arguments: + prompt {str} -- Prompt to complete. + settings {OllamaChatPromptExecutionSettings} -- Request settings. + + Yields: + List[StreamingChatMessageContent] -- Stream of StreamingChatMessageContent objects. + """ + settings.messages = messages + settings.stream = True + async with AsyncSession(self.session) as session: + async with session.post(str(self.url), json=settings.prepare_settings_dict()) as response: + response.raise_for_status() + async for line in response.content: + body = json.loads(line) + if body.get("done") and body.get("message", {}).get("content") is None: + break + yield [ + StreamingChatMessageContent( + choice_index=0, + inner_content=body, + ai_model_id=self.ai_model_id, + content=body.get("message", {"content": None}).get("content", None), + ) + ] + if body.get("done"): + break + + async def complete( + self, + prompt: str, + settings: OllamaChatPromptExecutionSettings, + **kwargs, + ) -> List[TextContent]: + """ + This is the method that is called from the kernel to get a response from a text-optimized LLM. + + Arguments: + prompt {str} -- The prompt to send to the LLM. + settings {OllamaChatPromptExecutionSettings} -- Settings for the request. + logger {Logger} -- A logger to use for logging (deprecated). + + Returns: + List["TextContent"] -- The completion result(s). + """ + settings.messages = [{"role": "user", "content": prompt}] + settings.stream = False + async with AsyncSession(self.session) as session: + async with session.post(str(self.url), json=settings.prepare_settings_dict()) as response: + response.raise_for_status() + response_object = await response.json() + return [ + TextContent( + inner_content=response_object, + ai_model_id=self.ai_model_id, + text=response_object.get("message", {"content": None}).get("content", None), + ) + ] + + async def complete_stream( + self, + prompt: str, + settings: OllamaChatPromptExecutionSettings, + **kwargs, + ) -> AsyncIterable[List[StreamingTextContent]]: + """ + Streams a text completion using a Ollama model. + Note that this method does not support multiple responses. + + Arguments: + prompt {str} -- Prompt to complete. + settings {OllamaChatPromptExecutionSettings} -- Request settings. + + Yields: + List["StreamingTextContent"] -- The result stream made up of StreamingTextContent objects. + """ + + settings.messages = [{"role": "user", "content": prompt}] + settings.stream = True + async with AsyncSession(self.session) as session: + async with session.post(str(self.url), json=settings.prepare_settings_dict()) as response: + response.raise_for_status() + async for line in response.content: + body = json.loads(line) + if body.get("done") and body.get("message", {}).get("content") is None: + break + yield [ + StreamingTextContent( + choice_index=0, + inner_content=body, + ai_model_id=self.ai_model_id, + text=body.get("message", {"content": None}).get("content", None), + ) + ] + if body.get("done"): + break + + def get_prompt_execution_settings_class(self) -> "OllamaChatPromptExecutionSettings": + """Get the request settings class.""" + return OllamaChatPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_completion.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_completion.py new file mode 100644 index 000000000000..a44540e6aef4 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_completion.py @@ -0,0 +1,99 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +import logging +from typing import AsyncIterable, List, Optional + +import aiohttp +from pydantic import HttpUrl + +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase +from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import ( + OllamaTextPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.ollama.utils import AsyncSession +from semantic_kernel.connectors.ai.text_completion_client_base import ( + TextCompletionClientBase, +) +from semantic_kernel.models.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.models.contents.text_content import TextContent + +logger: logging.Logger = logging.getLogger(__name__) + + +class OllamaTextCompletion(TextCompletionClientBase, AIServiceClientBase): + """ + Initializes a new instance of the OllamaTextCompletion class. + + Make sure to have the ollama service running either locally or remotely. + + Arguments: + ai_model_id {str} -- Ollama model name, see https://ollama.ai/library + url {Optional[Union[str, HttpUrl]]} -- URL of the Ollama server, defaults to http://localhost:11434/api/generate + """ + + url: HttpUrl = "http://localhost:11434/api/generate" + session: Optional[aiohttp.ClientSession] = None + + async def complete( + self, + prompt: str, + settings: OllamaTextPromptExecutionSettings, + **kwargs, + ) -> List[TextContent]: + """ + This is the method that is called from the kernel to get a response from a text-optimized LLM. + + Arguments: + prompt {str} -- The prompt to send to the LLM. + settings {OllamaTextPromptExecutionSettings} -- Settings for the request. + + Returns: + List[TextContent] -- A list of TextContent objects representing the response(s) from the LLM. + """ + settings.prompt = prompt + settings.stream = False + async with AsyncSession(self.session) as session: + async with session.post(self.url, json=settings.prepare_settings_dict()) as response: + response.raise_for_status() + text = await response.text() + return [TextContent(inner_content=text, ai_model_id=self.ai_model_id, text=text)] + + async def complete_stream( + self, + prompt: str, + settings: OllamaTextPromptExecutionSettings, + **kwargs, + ) -> AsyncIterable[List[StreamingTextContent]]: + """ + Streams a text completion using a Ollama model. + Note that this method does not support multiple responses, + but the result will be a list anyway. + + Arguments: + prompt {str} -- Prompt to complete. + settings {OllamaTextPromptExecutionSettings} -- Request settings. + + Yields: + List[StreamingTextContent] -- Completion result. + """ + settings.prompt = prompt + settings.stream = True + async with AsyncSession(self.session) as session: + async with session.post(self.url, json=settings.prepare_settings_dict()) as response: + response.raise_for_status() + async for line in response.content: + body = json.loads(line) + if body.get("done") and body.get("response") is None: + break + yield [ + StreamingTextContent( + choice_index=0, inner_content=body, ai_model_id=self.ai_model_id, text=body.get("response") + ) + ] + if body.get("done"): + break + + def get_prompt_execution_settings_class(self) -> "OllamaTextPromptExecutionSettings": + """Get the request settings class.""" + return OllamaTextPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py new file mode 100644 index 000000000000..95955b2c8343 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_embedding.py @@ -0,0 +1,49 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import List, Optional + +import aiohttp +from numpy import array, ndarray +from pydantic import HttpUrl + +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase +from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( + EmbeddingGeneratorBase, +) +from semantic_kernel.connectors.ai.ollama.utils import AsyncSession + +logger: logging.Logger = logging.getLogger(__name__) + + +class OllamaTextEmbedding(EmbeddingGeneratorBase, AIServiceClientBase): + """Ollama embeddings client. + + Make sure to have the ollama service running either locally or remotely. + + Arguments: + ai_model_id {str} -- Ollama model name, see https://ollama.ai/library + url {Optional[Union[str, HttpUrl]]} -- URL of the Ollama server, defaults to http://localhost:11434/api/embeddings + session {Optional[aiohttp.ClientSession]} -- Optional client session to use for requests. + """ + + url: HttpUrl = "http://localhost:11434/api/embeddings" + session: Optional[aiohttp.ClientSession] = None + + async def generate_embeddings(self, texts: List[str], **kwargs) -> ndarray: + """ + Generates embeddings for a list of texts. + + Arguments: + texts {List[str]} -- Texts to generate embeddings for. + + Returns: + ndarray -- Embeddings for the texts. + """ + async with AsyncSession(self.session) as session: + async with session.post( + self.url, + json={"model": self.ai_model_id, "texts": texts, "options": kwargs}, + ) as response: + response.raise_for_status() + return array(await response.json()) diff --git a/python/semantic_kernel/connectors/ai/ollama/utils.py b/python/semantic_kernel/connectors/ai/ollama/utils.py new file mode 100644 index 000000000000..b78c0e7ef886 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/ollama/utils.py @@ -0,0 +1,12 @@ +import aiohttp + + +class AsyncSession: + def __init__(self, session: aiohttp.ClientSession = None): + self._session = session if session else aiohttp.ClientSession() + + async def __aenter__(self): + return await self._session.__aenter__() + + async def __aexit__(self, *args, **kwargs): + await self._session.close() diff --git a/python/semantic_kernel/connectors/ai/open_ai/__init__.py b/python/semantic_kernel/connectors/ai/open_ai/__init__.py index f7a981b188a0..9fd5ecae90b2 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/__init__.py +++ b/python/semantic_kernel/connectors/ai/open_ai/__init__.py @@ -1,5 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, + OpenAIPromptExecutionSettings, + OpenAITextPromptExecutionSettings, +) from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import ( AzureChatCompletion, ) @@ -20,6 +28,10 @@ ) __all__ = [ + "OpenAIPromptExecutionSettings", + "OpenAIChatPromptExecutionSettings", + "OpenAITextPromptExecutionSettings", + "AzureChatPromptExecutionSettings", "OpenAITextCompletion", "OpenAIChatCompletion", "OpenAITextEmbedding", diff --git a/python/semantic_kernel/connectors/ai/open_ai/const.py b/python/semantic_kernel/connectors/ai/open_ai/const.py new file mode 100644 index 000000000000..1d9ce6ad89fd --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/const.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Final + +DEFAULT_AZURE_API_VERSION: Final[str] = "2023-05-15" +USER_AGENT: Final[str] = "User-Agent" +DEFAULT_CHAT_SYSTEM_PROMPT: Final[str] = "Assistant is a large language model." diff --git a/python/semantic_kernel/connectors/ai/open_ai/contents/__init__.py b/python/semantic_kernel/connectors/ai/open_ai/contents/__init__.py new file mode 100644 index 000000000000..c8fe66798004 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/contents/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.ai.open_ai.contents.azure_chat_message_content import ( + AzureChatMessageContent, +) +from semantic_kernel.connectors.ai.open_ai.contents.azure_streaming_chat_message_content import ( + AzureStreamingChatMessageContent, +) +from semantic_kernel.connectors.ai.open_ai.contents.open_ai_chat_message_content import OpenAIChatMessageContent +from semantic_kernel.connectors.ai.open_ai.contents.open_ai_streaming_chat_message_content import ( + OpenAIStreamingChatMessageContent, +) + +__all__ = [ + "OpenAIChatMessageContent", + "OpenAIStreamingChatMessageContent", + "AzureChatMessageContent", + "AzureStreamingChatMessageContent", +] diff --git a/python/semantic_kernel/connectors/ai/open_ai/contents/azure_chat_message_content.py b/python/semantic_kernel/connectors/ai/open_ai/contents/azure_chat_message_content.py new file mode 100644 index 000000000000..7a724a62bda8 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/contents/azure_chat_message_content.py @@ -0,0 +1,27 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Optional + +from semantic_kernel.connectors.ai.open_ai.contents.open_ai_chat_message_content import OpenAIChatMessageContent + + +class AzureChatMessageContent(OpenAIChatMessageContent): + """This is the class for Azure OpenAI chat message response content. + + Args: + inner_content: ChatCompletion - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + role: ChatRole - The role of the chat message. + content: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + function_call: Optional[FunctionCall] - The function call that was generated by this response. + tool_calls: Optional[List[ToolCall]] - The tool calls that were generated by this response. + tool_message: Optional[str] - The content of the tool message generated by the extensions API. + + Methods: + __str__: Returns the content of the response. + """ + + tool_message: Optional[str] = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/contents/azure_streaming_chat_message_content.py b/python/semantic_kernel/connectors/ai/open_ai/contents/azure_streaming_chat_message_content.py new file mode 100644 index 000000000000..cecca180fceb --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/contents/azure_streaming_chat_message_content.py @@ -0,0 +1,75 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Optional + +from semantic_kernel.connectors.ai.open_ai.contents.open_ai_streaming_chat_message_content import ( + OpenAIStreamingChatMessageContent, +) + + +class AzureStreamingChatMessageContent(OpenAIStreamingChatMessageContent): + """This is the class for Azure OpenAI streaming chat message response content. + + The end-user will have to either do something directly or gather them and combine them into a + new instance. + + Args: + choice_index: int - The index of the choice that generated this response. + inner_content: ChatCompletionChunk - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + role: Optional[ChatRole] - The role of the chat message, defaults to ASSISTANT. + content: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + function_call: Optional[FunctionCall] - The function call that was generated by this response. + tool_calls: Optional[List[ToolCall]] - The tool calls that were generated by this response. + tool_message: Optional[str] - The content of the tool message generated by the extensions API. + + Methods: + __str__: Returns the content of the response. + __bytes__: Returns the content of the response encoded in the encoding. + __add__: Combines two StreamingChatMessageContent instances. + """ + + tool_message: Optional[str] = None + + def __add__(self, other: "AzureStreamingChatMessageContent") -> "AzureStreamingChatMessageContent": + """When combining two AzureOpenAIStreamingChatMessageContent instances, + the content fields are combined, as well as the arguments of the function or tool calls. + + The inner_content of the first one is used, ai_model_id and encoding should be the same, + if role is set, they should be the same. + """ + if self.choice_index != other.choice_index: + raise ValueError("Cannot add StreamingChatMessageContent with different choice_index") + if self.ai_model_id != other.ai_model_id: + raise ValueError("Cannot add StreamingChatMessageContent from different ai_model_id") + if self.encoding != other.encoding: + raise ValueError("Cannot add StreamingChatMessageContent with different encoding") + if self.role and other.role and self.role != other.role: + raise ValueError("Cannot add StreamingChatMessageContent with different role") + fc = (self.function_call + other.function_call) if self.function_call else other.function_call + if self.tool_calls: + tc = [] + for index, tool in self.tool_calls: + if other.tool_calls: + tc.append(tool + other.tool_calls[index]) + else: + tc.append(tool) + else: + tc = other.tool_calls + + return AzureStreamingChatMessageContent( + choice_index=self.choice_index, + inner_content=self.inner_content, + ai_model_id=self.ai_model_id, + metadata=self.metadata, + role=self.role, + content=(self.content or "") + (other.content or ""), + encoding=self.encoding, + finish_reason=self.finish_reason or other.finish_reason, + function_call=fc, + tool_calls=tc, + tool_message=(self.tool_message or "") + (other.tool_message or ""), + ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/contents/open_ai_chat_message_content.py b/python/semantic_kernel/connectors/ai/open_ai/contents/open_ai_chat_message_content.py new file mode 100644 index 000000000000..d725b699a845 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/contents/open_ai_chat_message_content.py @@ -0,0 +1,32 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import List, Optional + +from openai.types.chat import ChatCompletion + +from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import FunctionCall +from semantic_kernel.connectors.ai.open_ai.models.chat.tool_calls import ToolCall +from semantic_kernel.models.contents import ChatMessageContent + + +class OpenAIChatMessageContent(ChatMessageContent): + """This is the class for OpenAI chat message response content. + + Args: + inner_content: ChatCompletion - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + role: ChatRole - The role of the chat message. + content: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + function_call: Optional[FunctionCall] - The function call that was generated by this response. + tool_calls: Optional[List[ToolCall]] - The tool calls that were generated by this response. + + Methods: + __str__: Returns the content of the response. + """ + + inner_content: ChatCompletion + function_call: Optional[FunctionCall] = None + tool_calls: Optional[List[ToolCall]] = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/contents/open_ai_streaming_chat_message_content.py b/python/semantic_kernel/connectors/ai/open_ai/contents/open_ai_streaming_chat_message_content.py new file mode 100644 index 000000000000..06f5749a64e8 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/contents/open_ai_streaming_chat_message_content.py @@ -0,0 +1,77 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import List, Optional + +from openai.types.chat.chat_completion_chunk import ChatCompletionChunk + +from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import FunctionCall +from semantic_kernel.connectors.ai.open_ai.models.chat.tool_calls import ToolCall +from semantic_kernel.models.contents import StreamingChatMessageContent + + +class OpenAIStreamingChatMessageContent(StreamingChatMessageContent): + """This is the class for OpenAI streaming chat message response content. + + The end-user will have to either do something directly or gather them and combine them into a + new instance. + + Args: + choice_index: int - The index of the choice that generated this response. + inner_content: ChatCompletionChunk - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + role: Optional[ChatRole] - The role of the chat message, defaults to ASSISTANT. + content: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + function_call: Optional[FunctionCall] - The function call that was generated by this response. + tool_calls: Optional[List[ToolCall]] - The tool calls that were generated by this response. + + Methods: + __str__: Returns the content of the response. + __bytes__: Returns the content of the response encoded in the encoding. + __add__: Combines two StreamingChatMessageContent instances. + """ + + inner_content: ChatCompletionChunk + function_call: Optional[FunctionCall] = None + tool_calls: Optional[List[ToolCall]] = None + + def __add__(self, other: "OpenAIStreamingChatMessageContent") -> "OpenAIStreamingChatMessageContent": + """When combining two OpenAIStreamingChatMessageContent instances, + the content fields are combined, as well as the arguments of the function or tool calls. + + The inner_content of the first one is used, ai_model_id and encoding should be the same, + if role is set, they should be the same. + """ + if self.choice_index != other.choice_index: + raise ValueError("Cannot add StreamingChatMessageContent with different choice_index") + if self.ai_model_id != other.ai_model_id: + raise ValueError("Cannot add StreamingChatMessageContent from different ai_model_id") + if self.encoding != other.encoding: + raise ValueError("Cannot add StreamingChatMessageContent with different encoding") + if self.role and other.role and self.role != other.role: + raise ValueError("Cannot add StreamingChatMessageContent with different role") + fc = (self.function_call + other.function_call) if self.function_call else other.function_call + if self.tool_calls: + tc = [] + for index, tool in self.tool_calls: + if other.tool_calls: + tc.append(tool + other.tool_calls[index]) + else: + tc.append(tool) + else: + tc = other.tool_calls + + return OpenAIStreamingChatMessageContent( + choice_index=self.choice_index, + inner_content=self.inner_content, + ai_model_id=self.ai_model_id, + metadata=self.metadata, + role=self.role, + content=(self.content or "") + (other.content or ""), + encoding=self.encoding, + finish_reason=self.finish_reason or other.finish_reason, + function_call=fc, + tool_calls=tc, + ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/exceptions/__init__.py b/python/semantic_kernel/connectors/ai/open_ai/exceptions/__init__.py new file mode 100644 index 000000000000..2a50eae89411 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/exceptions/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Microsoft. All rights reserved. diff --git a/python/semantic_kernel/connectors/ai/open_ai/exceptions/content_filter_ai_exception.py b/python/semantic_kernel/connectors/ai/open_ai/exceptions/content_filter_ai_exception.py new file mode 100644 index 000000000000..4c20c57b6f9e --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/exceptions/content_filter_ai_exception.py @@ -0,0 +1,114 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict + +from openai import BadRequestError + +from semantic_kernel.connectors.ai.ai_exception import AIException + + +class ContentFilterResultSeverity(Enum): + HIGH = "high" + MEDIUM = "medium" + SAFE = "safe" + + +@dataclass +class ContentFilterResult: + filtered: bool = False + detected: bool = False + severity: ContentFilterResultSeverity = ContentFilterResultSeverity.SAFE + + @classmethod + def from_inner_error_result(cls, inner_error_results: Dict[str, Any]) -> "ContentFilterResult": + """Creates a ContentFilterResult from the inner error results. + + Arguments: + key {str} -- The key to get the inner error result from. + inner_error_results {Dict[str, Any]} -- The inner error results. + + Returns: + ContentFilterResult -- The ContentFilterResult. + """ + return cls( + filtered=inner_error_results.get("filtered", False), + detected=inner_error_results.get("detected", False), + severity=ContentFilterResultSeverity( + inner_error_results.get("severity", ContentFilterResultSeverity.SAFE.value) + ), + ) + + +class ContentFilterCodes(Enum): + RESPONSIBLE_AI_POLICY_VIOLATION = "ResponsibleAIPolicyViolation" + + +class ContentFilterAIException(AIException): + """AI exception for an error from Azure OpenAI's content filter""" + + # The parameter that caused the error. + _param: str + + # The error code specific to the content filter. + _content_filter_code: ContentFilterCodes + + # The results of the different content filter checks. + _content_filter_result: Dict[str, ContentFilterResult] + + def __init__( + self, + error_code: AIException.ErrorCodes, + message: str, + inner_exception: BadRequestError, + ) -> None: + """Initializes a new instance of the ContentFilterAIException class. + + Arguments: + error_code {ErrorCodes} -- The error code. + message {str} -- The error message. + inner_exception {Exception} -- The inner exception. + """ + super().__init__(error_code, message, inner_exception) + + self._param = inner_exception.param + + inner_error = inner_exception.body.get("innererror", {}) + self._content_filter_code = ContentFilterCodes( + inner_error.get("code", ContentFilterCodes.RESPONSIBLE_AI_POLICY_VIOLATION.value) + ) + self._content_filter_result = dict( + [ + key, + ContentFilterResult.from_inner_error_result(values), + ] + for key, values in inner_error.get("content_filter_result", {}).items() + ) + + @property + def param(self) -> str: + """Gets the parameter that caused the error. + + Returns: + str -- The parameter that caused the error. + """ + return self._param + + @property + def content_filter_code(self) -> ContentFilterCodes: + """Gets the error code specific to the content filter. + + Returns: + ContentFilterCode -- The error code specific to the content filter. + """ + return self._content_filter_code + + @property + def content_filter_result(self) -> Dict[str, ContentFilterResult]: + """Gets the result of the content filter checks. + + Returns: + Dict[str, ContentFilterResult] -- The result of the content filter checks. + """ + return self._content_filter_result diff --git a/python/semantic_kernel/connectors/ai/open_ai/models/chat/function_call.py b/python/semantic_kernel/connectors/ai/open_ai/models/chat/function_call.py index 6a645ba89e51..af2b6b609543 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/models/chat/function_call.py +++ b/python/semantic_kernel/connectors/ai/open_ai/models/chat/function_call.py @@ -1,16 +1,28 @@ """Class to hold chat messages.""" import json -from typing import Dict, Tuple +from typing import Dict, Optional, Tuple +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.sk_pydantic import SKBaseModel -class FunctionCall(SKBaseModel): +class FunctionCall(KernelBaseModel): """Class to hold a function call response.""" - name: str - arguments: str + name: Optional[str] = None + arguments: Optional[str] = None + # TODO: check if needed + id: Optional[str] = None + + def __add__(self, other: Optional["FunctionCall"]) -> "FunctionCall": + """Add two function calls together, combines the arguments, ignores the name.""" + if not other: + return self + return FunctionCall( + name=self.name or other.name, + arguments=(self.arguments or "") + (other.arguments or ""), + id=self.id or other.id, + ) def parse_arguments(self) -> Dict[str, str]: """Parse the arguments into a dictionary.""" @@ -22,15 +34,15 @@ def parse_arguments(self) -> Dict[str, str]: def to_context_variables(self) -> ContextVariables: """Return the arguments as a ContextVariables instance.""" args = self.parse_arguments() - return ContextVariables(variables={k.lower(): v for k, v in args.items()}) + return ContextVariables(variables={k.lower(): str(v) for k, v in args.items()}) def split_name(self) -> Tuple[str, str]: - """Split the name into a skill and function name.""" + """Split the name into a plugin and function name.""" if "-" not in self.name: return None, self.name return self.name.split("-") def split_name_dict(self) -> dict: - """Split the name into a skill and function name.""" + """Split the name into a plugin and function name.""" parts = self.split_name() - return {"skill_name": parts[0], "function_name": parts[1]} + return {"plugin_name": parts[0], "function_name": parts[1]} diff --git a/python/semantic_kernel/connectors/ai/open_ai/models/chat/open_ai_chat_message.py b/python/semantic_kernel/connectors/ai/open_ai/models/chat/open_ai_chat_message.py index 4e0d90c2088c..13684e3f88aa 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/models/chat/open_ai_chat_message.py +++ b/python/semantic_kernel/connectors/ai/open_ai/models/chat/open_ai_chat_message.py @@ -4,6 +4,7 @@ from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import ( FunctionCall, ) +from semantic_kernel.connectors.ai.open_ai.models.chat.tool_calls import ToolCall from semantic_kernel.models.chat.chat_message import ChatMessage @@ -11,4 +12,7 @@ class OpenAIChatMessage(ChatMessage): """Class to hold openai chat messages, which might include name and function_call fields.""" name: Optional[str] = None + # TODO: handle tool_calls function_call: Optional[FunctionCall] = None + tool_calls: Optional[ToolCall] = None + tool_call_id: Optional[str] = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/models/chat/tool_calls.py b/python/semantic_kernel/connectors/ai/open_ai/models/chat/tool_calls.py new file mode 100644 index 000000000000..e72a2aa655b5 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/models/chat/tool_calls.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Literal, Optional + +from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import FunctionCall +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class ToolCall(KernelBaseModel): + """Class to hold a tool call response.""" + + id: Optional[str] = None + type: Optional[Literal["function"]] = "function" + function: Optional[FunctionCall] = None + + def __add__(self, other: Optional["ToolCall"]) -> "ToolCall": + """Add two tool calls together, combines the function calls, ignores the id.""" + if not other: + return self + return ToolCall( + id=self.id or other.id, + type=self.type or other.type, + function=self.function + other.function if self.function else other.function, + ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py new file mode 100644 index 000000000000..f5d4fd4509f2 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py @@ -0,0 +1,88 @@ +import logging +from typing import Any, Dict, List, Literal, Optional, Union + +from pydantic import Field, SerializeAsAny +from pydantic.dataclasses import dataclass + +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, +) +from semantic_kernel.kernel_pydantic import KernelBaseModel + +logger = logging.getLogger(__name__) + + +@dataclass +class ConnectionStringAuthentication: + type: Literal["ConnectionString"] = "ConnectionString" + connectionString: Optional[str] = None + + +@dataclass +class ApiKeyAuthentication: + type: Literal["APIKey"] = "APIKey" + key: Optional[str] = None + + +@dataclass +class AzureEmbeddingDependency: + type: Literal["DeploymentName"] = "DeploymentName" + deploymentName: Optional[str] = None + + +@dataclass +class AzureDataSourceParameters: + indexName: str + indexLanguage: Optional[str] = None + fieldsMapping: Dict[str, Any] = Field(default_factory=dict) + inScope: Optional[bool] = True + topNDocuments: Optional[int] = 5 + semanticConfiguration: Optional[str] = None + roleInformation: Optional[str] = None + filter: Optional[str] = None + embeddingKey: Optional[str] = None + embeddingEndpoint: Optional[str] = None + embeddingDeploymentName: Optional[str] = None + strictness: int = 3 + embeddingDependency: Optional[AzureEmbeddingDependency] = None + + +@dataclass +class AzureCosmosDBDataSource(AzureDataSourceParameters): + authentication: Optional[ConnectionStringAuthentication] = None + databaseName: Optional[str] = None + containerName: Optional[str] = None + embeddingDependencyType: Optional[AzureEmbeddingDependency] = None + + +@dataclass +class AzureAISearchDataSources(AzureDataSourceParameters): + endpoint: Optional[str] = None + key: Optional[str] = None + queryType: Literal["simple", "semantic", "vector", "vectorSimpleHybrid", "vectorSemanticHybrid"] = "simple" + authentication: Optional[ApiKeyAuthentication] = None + + +@dataclass +class AzureDataSources: + """Class to hold Azure AI data source parameters.""" + + type: Literal["AzureCognitiveSearch", "AzureCosmosDB"] = "AzureCognitiveSearch" + parameters: Optional[SerializeAsAny[AzureDataSourceParameters]] = None + + +# @dataclass +class ExtraBody(KernelBaseModel): + data_sources: Optional[List[AzureDataSources]] = Field(None, alias="dataSources") + input_language: Optional[str] = Field(None, serialization_alias="inputLanguage") + output_language: Optional[str] = Field(None, serialization_alias="outputLanguage") + + def __getitem__(self, item): + return getattr(self, item) + + +class AzureChatPromptExecutionSettings(OpenAIChatPromptExecutionSettings): + """Specific settings for the Azure OpenAI Chat Completion endpoint.""" + + response_format: Optional[str] = None + extra_body: Optional[Union[Dict[str, Any], ExtraBody]] = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py new file mode 100644 index 000000000000..62fff9b3251b --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py @@ -0,0 +1,81 @@ +import logging +from typing import Any, Dict, List, Literal, Optional, Union + +from pydantic import Field, field_validator, model_validator + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + +logger = logging.getLogger(__name__) + + +class OpenAIPromptExecutionSettings(PromptExecutionSettings): + """Common request settings for (Azure) OpenAI services.""" + + ai_model_id: Optional[str] = Field(None, serialization_alias="model") + frequency_penalty: float = Field(0.0, ge=-2.0, le=2.0) + logit_bias: Dict[Union[str, int], float] = Field(default_factory=dict) + max_tokens: int = Field(256, gt=0) + number_of_responses: int = Field(1, ge=1, le=128, serialization_alias="n") + presence_penalty: float = Field(0.0, ge=-2.0, le=2.0) + seed: Optional[int] = None + stop: Optional[Union[str, List[str]]] = None + stream: bool = False + temperature: float = Field(0.0, ge=0.0, le=2.0) + top_p: float = Field(1.0, ge=0.0, le=1.0) + user: Optional[str] = None + + +class OpenAITextPromptExecutionSettings(OpenAIPromptExecutionSettings): + """Specific settings for the completions endpoint.""" + + prompt: Optional[str] = None + best_of: Optional[int] = Field(None, ge=1) + echo: bool = False + logprobs: Optional[int] = Field(None, ge=0, le=5) + suffix: Optional[str] = None + + @model_validator(mode="after") + def check_best_of_and_n(self) -> "OpenAITextPromptExecutionSettings": + """Check that the best_of parameter is not greater than the number_of_responses parameter.""" + if self.best_of is not None and self.best_of < self.number_of_responses: + raise ValueError( + "When used with number_of_responses, best_of controls the number of candidate completions and n specifies how many to return, therefore best_of must be greater than number_of_responses." # noqa: E501 + ) + if self.extension_data.get("best_of") is not None and self.extension_data["best_of"] < self.extension_data.get( + "number_of_responses" + ): + raise ValueError( + "When used with number_of_responses, best_of controls the number of candidate completions and n specifies how many to return, therefore best_of must be greater than number_of_responses." # noqa: E501 + ) + return self + + +class OpenAIChatPromptExecutionSettings(OpenAIPromptExecutionSettings): + """Specific settings for the Chat Completion endpoint.""" + + response_format: Optional[Dict[Literal["type"], Literal["text", "json_object"]]] = None + tools: Optional[List[Dict[str, Any]]] = None + tool_choice: Optional[str] = None + function_call: Optional[str] = None + functions: Optional[List[Dict[str, Any]]] = None + messages: Optional[List[Dict[str, Any]]] = None + + @field_validator("functions", "function_call", mode="after") + @classmethod + def validate_function_call(cls, v: Optional[Union[str, List[Dict[str, Any]]]] = None): + if v is not None: + logger.warning( + "The function_call and functions parameters are deprecated. Please use the tool_choice and tools parameters instead." # noqa: E501 + ) + return v + + +class OpenAIEmbeddingPromptExecutionSettings(PromptExecutionSettings): + input: Optional[Union[str, List[str], List[int], List[List[int]]]] = None + ai_model_id: Optional[str] = Field(None, serialization_alias="model") + encoding_format: Optional[Literal["float", "base64"]] = None + user: Optional[str] = None + extra_headers: Optional[Dict] = None + extra_query: Optional[Dict] = None + extra_body: Optional[Dict] = None + timeout: Optional[float] = None diff --git a/python/semantic_kernel/connectors/ai/open_ai/semantic_functions/open_ai_chat_prompt_template.py b/python/semantic_kernel/connectors/ai/open_ai/semantic_functions/open_ai_chat_prompt_template.py index 6b4b4d234f07..6b62593cc24c 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/semantic_functions/open_ai_chat_prompt_template.py +++ b/python/semantic_kernel/connectors/ai/open_ai/semantic_functions/open_ai_chat_prompt_template.py @@ -1,32 +1,29 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import Any, Dict, List, Optional +import logging +from typing import Any, Optional -from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import FunctionCall from semantic_kernel.connectors.ai.open_ai.models.chat.open_ai_chat_message import ( OpenAIChatMessage, ) from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate from semantic_kernel.semantic_functions.prompt_template import PromptTemplate -from semantic_kernel.semantic_functions.prompt_template_config import ( - PromptTemplateConfig, -) -from semantic_kernel.template_engine.protocols.prompt_templating_engine import ( - PromptTemplatingEngine, -) +logger: logging.Logger = logging.getLogger(__name__) -class OpenAIChatPromptTemplate(ChatPromptTemplate): - def add_function_response_message(self, name: str, content: Any) -> None: + +class OpenAIChatPromptTemplate(ChatPromptTemplate[OpenAIChatMessage]): + def add_function_response_message(self, name: str, content: Any, tool_call_id: Optional[str] = None) -> None: """Add a function response message to the chat template.""" - self._messages.append( - OpenAIChatMessage(role="function", name=name, fixed_content=str(content)) + self.messages.append( + OpenAIChatMessage(role="function", name=name, fixed_content=str(content), tool_call_id=tool_call_id) ) - def add_message( - self, role: str, message: Optional[str] = None, **kwargs: Any - ) -> None: + def add_tool_call_response_message(self, tool_call_id: str, content: Any) -> None: + """Add a tool call response message to the chat template.""" + self.messages.append(OpenAIChatMessage(role="tool", tool_call_id=tool_call_id, fixed_content=str(content))) + + def add_message(self, role: str, message: Optional[str] = None, **kwargs: Any) -> None: """Add a message to the chat template. Arguments: @@ -38,66 +35,59 @@ def add_message( """ name = kwargs.get("name") if name is not None and role != "function": - self._log.warning("name is only used with role: function, ignoring") + logger.warning("name is only used with role: function, ignoring") name = None function_call = kwargs.get("function_call") - if function_call is not None and role != "assistant": - self._log.warning( - "function_call is only used with role: assistant, ignoring" - ) + if function_call is not None: + if role == "assistant": + self.messages.append( + OpenAIChatMessage( + role=role, + fixed_content=message, + name=name, + function_call=function_call, + ) + ) + return + logger.warning("function_call is only used with role: assistant, ignoring") function_call = None - if function_call and not isinstance(function_call, FunctionCall): - self._log.warning( - "function_call is not a FunctionCall, ignoring: %s", function_call + tool_calls = kwargs.get("tool_calls") + if tool_calls is not None: + # TODO: update this when tool_calls is implemented + # and allow for multiple tool calls + ids = [tool_call.id for tool_call in tool_calls] + if role == "assistant": + self.messages.append( + OpenAIChatMessage( + role=role, + fixed_content=message, + name=name, + tool_calls=tool_calls[0], + tool_call_id=ids[0], + ) + ) + return + self._log.warning("tool_calls is only used with role: assistant, ignoring") + tool_calls = None + tool_call_id = kwargs.get("tool_call_id") + if tool_call_id is not None: + if role == "tool": + self.messages.append( + OpenAIChatMessage( + role=role, + fixed_content=message, + name=name, + tool_call_id=tool_call_id, + ) ) - function_call = None - self._messages.append( + return + self._log.warning("tool_call_id is only used with role: tool, ignoring") + tool_call_id = None + self.messages.append( OpenAIChatMessage( role=role, - content_template=PromptTemplate( - message, self._template_engine, self._prompt_config - ), + content_template=PromptTemplate(message, self.template_engine, self.prompt_config), name=name, function_call=function_call, ) ) - - @classmethod - def restore( - cls, - messages: List[Dict[str, str]], - template: str, - template_engine: PromptTemplatingEngine, - prompt_config: PromptTemplateConfig, - log: Optional[Logger] = None, - ) -> "OpenAIChatPromptTemplate": - """Restore a ChatPromptTemplate from a list of role and message pairs. - - If there is a chat_system_prompt in the prompt_config.completion settings, - that takes precedence over the first message in the list of messages, - if that is a system message. - """ - chat_template = cls(template, template_engine, prompt_config, log) - if ( - prompt_config.completion.chat_system_prompt - and messages[0]["role"] == "system" - ): - existing_system_message = messages.pop(0) - if ( - existing_system_message["message"] - != prompt_config.completion.chat_system_prompt - ): - chat_template._log.info( - "Overriding system prompt with chat_system_prompt, old system message: %s, new system message: %s", - existing_system_message["message"], - prompt_config.completion.chat_system_prompt, - ) - for message in messages: - chat_template.add_message( - message["role"], - message["message"], - name=message["name"], - function_call=message["function_call"], - ) - - return chat_template diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py index b328b2473ffe..d1bdd5998205 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py @@ -1,68 +1,378 @@ # Copyright (c) Microsoft. All rights reserved. +import logging +from typing import ( + Any, + Dict, + List, + Mapping, + Optional, + Union, + overload, +) +from openai import AsyncAzureOpenAI +from openai.lib.azure import AsyncAzureADTokenProvider +from openai.types.chat.chat_completion import ChatCompletion, Choice +from openai.types.chat.chat_completion_chunk import ChatCompletionChunk +from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice -from logging import Logger -from typing import Optional - -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import ( - OpenAIChatCompletion, +from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION +from semantic_kernel.connectors.ai.open_ai.contents import ( + AzureChatMessageContent, + AzureStreamingChatMessageContent, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.azure_config_base import ( + AzureOpenAIConfigBase, ) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base import OpenAIChatCompletionBase +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIModelTypes, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion_base import ( + OpenAITextCompletionBase, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.kernel_pydantic import HttpsUrl +from semantic_kernel.models.chat.chat_role import ChatRole +from semantic_kernel.models.chat.finish_reason import FinishReason +logger: logging.Logger = logging.getLogger(__name__) -class AzureChatCompletion(OpenAIChatCompletion): - _endpoint: str - _api_version: str - _api_type: str +class AzureChatCompletion(AzureOpenAIConfigBase, OpenAIChatCompletionBase, OpenAITextCompletionBase): + """Azure Chat completion class.""" + + @overload def __init__( self, deployment_name: str, - endpoint: Optional[str] = None, + base_url: Union[HttpsUrl, str], + api_version: str = DEFAULT_AZURE_API_VERSION, api_key: Optional[str] = None, - api_version: str = "2023-03-15-preview", - logger: Optional[Logger] = None, - ad_auth=False, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, ) -> None: """ Initialize an AzureChatCompletion service. - You must provide: - - A deployment_name, endpoint, and api_key (plus, optionally: ad_auth) + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + base_url: The url of the Azure deployment. This value + can be found in the Keys & Endpoint section when examining + your resource from the Azure portal, the base_url consists of the endpoint, + followed by /openai/deployments/{deployment_name}/, + use endpoint if you only want to supply the endpoint. + api_key: The API key for the Azure deployment. This value can be + found in the Keys & Endpoint section when examining your resource in + the Azure portal. You can use either KEY1 or KEY2. + api_version: The API version to use. (Optional) + The default value is "2023-05-15". + ad_auth: Whether to use Azure Active Directory authentication. (Optional) + The default value is False. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log: The logger instance to use. (Optional) (Deprecated) + logger: deprecated, use 'log' instead. + """ + + @overload + def __init__( + self, + deployment_name: str, + endpoint: Union[HttpsUrl, str], + api_version: str = DEFAULT_AZURE_API_VERSION, + api_key: Optional[str] = None, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an AzureChatCompletion service. - :param deployment_name: The name of the Azure deployment. This value + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + endpoint: The endpoint of the Azure deployment. This value + can be found in the Keys & Endpoint section when examining + your resource from the Azure portal, the endpoint should end in openai.azure.com. + api_key: The API key for the Azure deployment. This value can be + found in the Keys & Endpoint section when examining your resource in + the Azure portal. You can use either KEY1 or KEY2. + api_version: The API version to use. (Optional) + The default value is "2023-05-15". + ad_auth: Whether to use Azure Active Directory authentication. (Optional) + The default value is False. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log: The logger instance to use. (Optional) (Deprecated) + logger: deprecated, use 'log' instead. + """ + + @overload + def __init__( + self, + deployment_name: str, + async_client: AsyncAzureOpenAI, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an AzureChatCompletion service. + + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + async_client {AsyncAzureOpenAI} -- An existing client to use. + log: The logger instance to use. (Optional) (Deprecated) + """ + + @overload + def __init__( + self, + deployment_name: str, + endpoint: Union[HttpsUrl, str], + api_version: str = DEFAULT_AZURE_API_VERSION, + api_key: Optional[str] = None, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + use_extensions: bool = False, + ) -> None: + """ + Initialize an AzureChatCompletion service. + + Arguments: + deployment_name: The name of the Azure deployment. This value will correspond to the custom name you chose for your deployment when you deployed a model. This value can be found under Resource Management > Deployments in the Azure portal or, alternatively, under Management > Deployments in Azure OpenAI Studio. - :param endpoint: The endpoint of the Azure deployment. This value + endpoint: The endpoint of the Azure deployment. This value can be found in the Keys & Endpoint section when examining - your resource from the Azure portal. - :param api_key: The API key for the Azure deployment. This value can be + your resource from the Azure portal, the endpoint should end in openai.azure.com. + api_key: The API key for the Azure deployment. This value can be found in the Keys & Endpoint section when examining your resource in the Azure portal. You can use either KEY1 or KEY2. - :param api_version: The API version to use. (Optional) - The default value is "2022-12-01". - :param logger: The logger instance to use. (Optional) - :param ad_auth: Whether to use Azure Active Directory authentication. - (Optional) The default value is False. + api_version: The API version to use. (Optional) + The default value is "2023-05-15". + ad_auth: Whether to use Azure Active Directory authentication. (Optional) + The default value is False. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log: The logger instance to use. (Optional) + use_extensions: Whether to use extensions, for example when chatting with data. (Optional) + When True, base_url is overwritten to '{endpoint}/openai/deployments/{deployment_name}/extensions'. + The default value is False. """ - if not deployment_name: - raise ValueError("The deployment name cannot be `None` or empty") - if not api_key: - raise ValueError("The Azure API key cannot be `None` or empty`") - if not endpoint: - raise ValueError("The Azure endpoint cannot be `None` or empty") - if not endpoint.startswith("https://"): - raise ValueError("The Azure endpoint must start with https://") - self._api_type = "azure_ad" if ad_auth else "azure" + def __init__( + self, + deployment_name: str, + endpoint: Optional[Union[HttpsUrl, str]] = None, + base_url: Optional[Union[HttpsUrl, str]] = None, + api_version: str = DEFAULT_AZURE_API_VERSION, + api_key: Optional[str] = None, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + async_client: Optional[AsyncAzureOpenAI] = None, + use_extensions: bool = False, + log: Optional[Any] = None, + **kwargs, + ) -> None: + """ + Initialize an AzureChatCompletion service. + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + base_url: The url of the Azure deployment. This value + can be found in the Keys & Endpoint section when examining + your resource from the Azure portal, the base_url consists of the endpoint, + followed by /openai/deployments/{deployment_name}/, + use endpoint if you only want to supply the endpoint. + endpoint: The endpoint of the Azure deployment. This value + can be found in the Keys & Endpoint section when examining + your resource from the Azure portal, the endpoint should end in openai.azure.com. + If both base_url and endpoint are supplied, base_url will be used. + api_key: The API key for the Azure deployment. This value can be + found in the Keys & Endpoint section when examining your resource in + the Azure portal. You can use either KEY1 or KEY2. + api_version: The API version to use. (Optional) + The default value is "2023-05-15". + ad_auth: Whether to use Azure Active Directory authentication. (Optional) + The default value is False. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log: The logger instance to use. (Optional) (Deprecated) + logger: deprecated. + async_client {Optional[AsyncAzureOpenAI]} -- An existing client to use. (Optional) + use_extensions: Whether to use extensions, for example when chatting with data. (Optional) + When True, base_url is overwritten to '{endpoint}/openai/deployments/{deployment_name}/extensions'. + The default value is False. + """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + if kwargs.get("logger"): + logger.warning("The 'logger' argument is deprecated. Please use the `logging` module instead.") + + if base_url and isinstance(base_url, str): + base_url = HttpsUrl(base_url) + if use_extensions and endpoint and deployment_name: + base_url = HttpsUrl(f"{str(endpoint).rstrip('/')}/openai/deployments/{deployment_name}/extensions") super().__init__( - deployment_name, - api_key, - api_type=self._api_type, + deployment_name=deployment_name, + endpoint=endpoint if not isinstance(endpoint, str) else HttpsUrl(endpoint), + base_url=base_url, api_version=api_version, - endpoint=endpoint, - org_id=None, - log=logger, + api_key=api_key, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + default_headers=default_headers, + ai_model_type=OpenAIModelTypes.CHAT, + async_client=async_client, + ) + + @classmethod + def from_dict(cls, settings: Dict[str, str]) -> "AzureChatCompletion": + """ + Initialize an Azure OpenAI service from a dictionary of settings. + + Arguments: + settings: A dictionary of settings for the service. + should contains keys: deployment_name, endpoint, api_key + and optionally: api_version, ad_auth, default_headers + """ + return AzureChatCompletion( + deployment_name=settings.get("deployment_name"), + endpoint=settings.get("endpoint"), + base_url=settings.get("base_url"), + api_version=settings.get("api_version", DEFAULT_AZURE_API_VERSION), + api_key=settings.get("api_key"), + ad_token=settings.get("ad_token"), + ad_token_provider=settings.get("ad_token_provider"), + default_headers=settings.get("default_headers"), + ) + + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Create a request settings object.""" + return AzureChatPromptExecutionSettings + + def _create_chat_message_content( + self, response: ChatCompletion, choice: Choice, response_metadata: Dict[str, Any] + ) -> AzureChatMessageContent: + """Create a Azure chat message content object from a choice.""" + metadata = self._get_metadata_from_chat_choice(choice) + metadata.update(response_metadata) + return AzureChatMessageContent( + inner_content=response, + ai_model_id=self.ai_model_id, + metadata=metadata, + role=ChatRole(choice.message.role) if choice.message.role is not None else None, + content=choice.message.content, + function_call=self._get_function_call_from_chat_choice(choice), + tool_calls=self._get_tool_calls_from_chat_choice(choice), + tool_message=self._get_tool_message_from_chat_choice(choice), ) + + def _create_streaming_chat_message_content( + self, + chunk: ChatCompletionChunk, + choice: ChunkChoice, + chunk_metadata: Dict[str, Any], + ): + """Create a Azure streaming chat message content object from a choice.""" + metadata = self._get_metadata_from_chat_choice(choice) + metadata.update(chunk_metadata) + return AzureStreamingChatMessageContent( + choice_index=choice.index, + inner_content=chunk, + ai_model_id=self.ai_model_id, + metadata=metadata, + role=ChatRole(choice.delta.role) if choice.delta.role is not None else None, + content=choice.delta.content, + finish_reason=FinishReason(choice.finish_reason) if choice.finish_reason is not None else None, + function_call=self._get_function_call_from_chat_choice(choice), + tool_calls=self._get_tool_calls_from_chat_choice(choice), + tool_message=self._get_tool_message_from_chat_choice(choice), + ) + + def _get_update_storage_fields(self) -> Dict[str, Dict[int, Any]]: + """Get the fields to store the updates.""" + out_messages = {} + tool_messages_by_index = {} + tool_call_ids_by_index = {} + function_call_by_index = {} + return { + "out_messages": out_messages, + "tool_call_ids_by_index": tool_call_ids_by_index, + "function_call_by_index": function_call_by_index, + "tool_messages_by_index": tool_messages_by_index, + } + + def _update_storages( + self, contents: List[AzureStreamingChatMessageContent], update_storage: Dict[str, Dict[int, Any]] + ): + """Handle updates to the messages, tool_calls and function_calls. + + This will be used for auto-invoking tools. + """ + out_messages = update_storage["out_messages"] + tool_call_ids_by_index = update_storage["tool_call_ids_by_index"] + function_call_by_index = update_storage["function_call_by_index"] + tool_messages_by_index = update_storage["tool_messages_by_index"] + + for index, content in enumerate(contents): + if content.content is not None: + if index not in out_messages: + out_messages[index] = content.content + else: + out_messages[index] += content.content + if content.tool_calls is not None: + if index not in tool_call_ids_by_index: + tool_call_ids_by_index[index] = content.tool_calls + else: + for tc_index, tool_call in enumerate(content.tool_calls): + tool_call_ids_by_index[index][tc_index] += tool_call + if content.function_call is not None: + if index not in function_call_by_index: + function_call_by_index[index] = content.function_call + else: + function_call_by_index[index] += content.function_call + if content.tool_message is not None: + if index not in tool_messages_by_index: + tool_messages_by_index[index] = content.tool_message + else: + tool_messages_by_index[index] += content.tool_message + + def _get_tool_message_from_chat_choice(self, choice: Union[Choice, ChunkChoice]) -> Optional[str]: + """Get the tool message from a choice.""" + if isinstance(choice, Choice): + content = choice.message + else: + content = choice.delta + if content.model_extra is not None and "context" in content.model_extra: + if "messages" in content.model_extra["context"]: + for message in content.model_extra["context"]["messages"]: + if "tool" in message["role"]: + return message["content"] + return None diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py new file mode 100644 index 000000000000..1c97504a8331 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_config_base.py @@ -0,0 +1,129 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +import logging +from typing import Any, Awaitable, Callable, Dict, Mapping, Optional, Union + +from openai import AsyncAzureOpenAI +from pydantic import validate_call + +from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.open_ai.const import ( + DEFAULT_AZURE_API_VERSION, + USER_AGENT, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIHandler, + OpenAIModelTypes, +) +from semantic_kernel.connectors.telemetry import APP_INFO +from semantic_kernel.kernel_pydantic import HttpsUrl + +logger: logging.Logger = logging.getLogger(__name__) + + +class AzureOpenAIConfigBase(OpenAIHandler): + """Internal class for configuring a connection to an Azure OpenAI service.""" + + @validate_call(config=dict(arbitrary_types_allowed=True)) + def __init__( + self, + deployment_name: str, + ai_model_type: OpenAIModelTypes, + endpoint: Optional[HttpsUrl] = None, + base_url: Optional[HttpsUrl] = None, + api_version: str = DEFAULT_AZURE_API_VERSION, + api_key: Optional[str] = None, + ad_token: Optional[str] = None, + ad_token_provider: Optional[Callable[[], Union[str, Awaitable[str]]]] = None, + default_headers: Union[Mapping[str, str], None] = None, + log: Optional[Any] = None, + async_client: Optional[AsyncAzureOpenAI] = None, + ) -> None: + """Internal class for configuring a connection to an Azure OpenAI service. + + Arguments: + deployment_name {str} -- Name of the deployment. + ai_model_type {OpenAIModelTypes} -- The type of OpenAI model to deploy. + endpoint {Optional[HttpsUrl]} -- The specific endpoint URL for the deployment. (Optional) + base_url {Optional[HttpsUrl]} -- The base URL for Azure services. (Optional) + api_version {str} -- Azure API version. Defaults to the defined DEFAULT_AZURE_API_VERSION. + api_key {Optional[str]} -- API key for Azure services. (Optional) + ad_token {Optional[str]} -- Azure AD token for authentication. (Optional) + ad_token_provider {Optional[Callable[[], Union[str, Awaitable[str]]]]} -- A callable + or coroutine function providing Azure AD tokens. (Optional) + default_headers {Union[Mapping[str, str], None]} -- Default headers for HTTP requests. (Optional) + log -- Logger instance for logging purposes. (Optional) (Deprecated) + async_client {Optional[AsyncAzureOpenAI]} -- An existing client to use. (Optional) + + The `validate_call` decorator is used with a configuration that allows arbitrary types. + This is necessary for types like `HttpsUrl` and `OpenAIModelTypes`. + """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + # Merge APP_INFO into the headers if it exists + merged_headers = default_headers.copy() if default_headers else {} + if APP_INFO: + merged_headers[USER_AGENT] = json.dumps(APP_INFO) + + if not async_client: + if not api_key and not ad_token and not ad_token_provider: + raise AIException( + AIException.ErrorCodes.InvalidConfiguration, + "Please provide either api_key, ad_token or ad_token_provider", + ) + if base_url: + async_client = AsyncAzureOpenAI( + base_url=str(base_url), + api_version=api_version, + api_key=api_key, + azure_ad_token=ad_token, + azure_ad_token_provider=ad_token_provider, + default_headers=merged_headers, + ) + else: + if not endpoint: + raise AIException( + AIException.ErrorCodes.InvalidConfiguration, + "Please provide either base_url or endpoint", + ) + async_client = AsyncAzureOpenAI( + azure_endpoint=str(endpoint), + azure_deployment=deployment_name, + api_version=api_version, + api_key=api_key, + azure_ad_token=ad_token, + azure_ad_token_provider=ad_token_provider, + default_headers=merged_headers, + ) + + super().__init__( + ai_model_id=deployment_name, + client=async_client, + ai_model_type=ai_model_type, + ) + + def to_dict(self) -> Dict[str, str]: + client_settings = { + "base_url": str(self.client.base_url), + "api_version": self.client._custom_query["api-version"], + "api_key": self.client.api_key, + "ad_token": self.client._azure_ad_token, + "ad_token_provider": self.client._azure_ad_token_provider, + "default_headers": {k: v for k, v in self.client.default_headers.items() if k != USER_AGENT}, + } + base = self.model_dump( + exclude={ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "api_type", + "org_id", + "ai_model_type", + "client", + }, + by_alias=True, + exclude_none=True, + ) + base.update(client_settings) + return base diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py index 0c6b1ee6e57d..534424377647 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py @@ -1,68 +1,195 @@ # Copyright (c) Microsoft. All rights reserved. +import logging +from typing import Any, Dict, Mapping, Optional, overload -from logging import Logger -from typing import Optional +from openai import AsyncAzureOpenAI +from openai.lib.azure import AsyncAzureADTokenProvider -from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion import ( - OpenAITextCompletion, +from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION +from semantic_kernel.connectors.ai.open_ai.services.azure_config_base import ( + AzureOpenAIConfigBase, ) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIModelTypes, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion_base import ( + OpenAITextCompletionBase, +) + +logger: logging.Logger = logging.getLogger(__name__) -class AzureTextCompletion(OpenAITextCompletion): - _endpoint: str - _api_version: str - _api_type: str +class AzureTextCompletion(AzureOpenAIConfigBase, OpenAITextCompletionBase): + """Azure Text Completion class.""" + @overload + def __init__( + self, + base_url: str, + api_version: str = DEFAULT_AZURE_API_VERSION, + api_key: Optional[str] = None, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an AzureTextCompletion service. + + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + endpoint: The endpoint of the Azure deployment. This value + can be found in the Keys & Endpoint section when examining + your resource from the Azure portal. + api_key: The API key for the Azure deployment. This value can be + found in the Keys & Endpoint section when examining your resource in + the Azure portal. You can use either KEY1 or KEY2. + api_version: The API version to use. (Optional) + The default value is "2023-05-15". + ad_auth: Whether to use Azure Active Directory authentication. (Optional) + The default value is False. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log: The logger instance to use. (Optional) (Deprecated) + logger: deprecated. + """ + + @overload def __init__( self, deployment_name: str, - endpoint: Optional[str] = None, + endpoint: str, + api_version: str = DEFAULT_AZURE_API_VERSION, api_key: Optional[str] = None, - api_version: str = "2022-12-01", - logger: Optional[Logger] = None, - ad_auth=False, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, ) -> None: """ Initialize an AzureTextCompletion service. - You must provide: - - A deployment_name, endpoint, and api_key (plus, optionally: ad_auth) - - :param deployment_name: The name of the Azure deployment. This value - will correspond to the custom name you chose for your deployment - when you deployed a model. This value can be found under - Resource Management > Deployments in the Azure portal or, alternatively, - under Management > Deployments in Azure OpenAI Studio. - :param endpoint: The endpoint of the Azure deployment. This value - can be found in the Keys & Endpoint section when examining - your resource from the Azure portal. - :param api_key: The API key for the Azure deployment. This value can be - found in the Keys & Endpoint section when examining your resource in - the Azure portal. You can use either KEY1 or KEY2. - :param api_version: The API version to use. (Optional) - The default value is "2022-12-01". - :param logger: The logger instance to use. (Optional) - :param ad_auth: Whether to use Azure Active Directory authentication. - (Optional) The default value is False. + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + endpoint: The endpoint of the Azure deployment. This value + can be found in the Keys & Endpoint section when examining + your resource from the Azure portal. + api_key: The API key for the Azure deployment. This value can be + found in the Keys & Endpoint section when examining your resource in + the Azure portal. You can use either KEY1 or KEY2. + api_version: The API version to use. (Optional) + The default value is "2023-05-15". + ad_auth: Whether to use Azure Active Directory authentication. (Optional) + The default value is False. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log: The logger instance to use. (Optional) (Deprecated) + logger: deprecated, use 'log' instead. """ - if not deployment_name: - raise ValueError("The deployment name cannot be `None` or empty") - if not api_key: - raise ValueError("The Azure API key cannot be `None` or empty`") - if not endpoint: - raise ValueError("The Azure endpoint cannot be `None` or empty") - if not endpoint.startswith("https://"): - raise ValueError("The Azure endpoint must start with https://") - self._api_type = "azure_ad" if ad_auth else "azure" + @overload + def __init__( + self, + deployment_name: str, + async_client: AsyncAzureOpenAI, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an AzureChatCompletion service. + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + async_client {AsyncAzureOpenAI} -- An existing client to use. + log: The logger instance to use. (Optional) (Deprecated) + """ + + def __init__( + self, + deployment_name: Optional[str] = None, + endpoint: Optional[str] = None, + base_url: Optional[str] = None, + api_version: str = DEFAULT_AZURE_API_VERSION, + api_key: Optional[str] = None, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + async_client: Optional[AsyncAzureOpenAI] = None, + **kwargs, + ) -> None: + """ + Initialize an AzureTextCompletion service. + + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + endpoint: The endpoint of the Azure deployment. This value + can be found in the Keys & Endpoint section when examining + your resource from the Azure portal. + api_key: The API key for the Azure deployment. This value can be + found in the Keys & Endpoint section when examining your resource in + the Azure portal. You can use either KEY1 or KEY2. + api_version: The API version to use. (Optional) + The default value is "2023-03-15-preview". + ad_auth: Whether to use Azure Active Directory authentication. (Optional) + The default value is False. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log: The logger instance to use. (Optional) (Deprecated) + logger: deprecated, use 'log' instead. + async_client {Optional[AsyncAzureOpenAI]} -- An existing client to use. + """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + if kwargs.get("logger"): + logger.warning("The 'logger' argument is deprecated.") super().__init__( - deployment_name, - api_key, - api_type=self._api_type, - api_version=api_version, + deployment_name=deployment_name, endpoint=endpoint, - org_id=None, - log=logger, + base_url=base_url, + api_version=api_version, + api_key=api_key, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + default_headers=default_headers, + ai_model_type=OpenAIModelTypes.TEXT, + async_client=async_client, + ) + + @classmethod + def from_dict(cls, settings: Dict[str, str]) -> "AzureTextCompletion": + """ + Initialize an Azure OpenAI service from a dictionary of settings. + + Arguments: + settings: A dictionary of settings for the service. + should contains keys: deployment_name, endpoint, api_key + and optionally: api_version, ad_auth + """ + + return AzureTextCompletion( + deployment_name=settings.get("deployment_name"), + endpoint=settings.get("endpoint"), + base_url=settings.get("base_url"), + api_version=settings.get("api_version", DEFAULT_AZURE_API_VERSION), + api_key=settings["api_key"], + ad_token=settings.get("ad_token"), + ad_token_provider=settings.get("ad_token_provider"), + default_headers=settings.get("default_headers"), ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py index 816c8db1c5b9..0034e76ab5ad 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py @@ -1,27 +1,61 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import Optional +import logging +from typing import Any, Dict, Mapping, Optional, overload -from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import ( - OpenAITextEmbedding, +from openai import AsyncAzureOpenAI +from openai.lib.azure import AsyncAzureADTokenProvider + +from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION +from semantic_kernel.connectors.ai.open_ai.services.azure_config_base import ( + AzureOpenAIConfigBase, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIModelTypes, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding_base import ( + OpenAITextEmbeddingBase, ) +logger: logging.Logger = logging.getLogger(__name__) + + +class AzureTextEmbedding(AzureOpenAIConfigBase, OpenAITextEmbeddingBase): + """Azure Text Embedding class.""" + + @overload + def __init__( + self, + deployment_name: str, + async_client: AsyncAzureOpenAI, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an AzureChatCompletion service. -class AzureTextEmbedding(OpenAITextEmbedding): - _endpoint: str - _api_version: str - _api_type: str + Arguments: + deployment_name: The name of the Azure deployment. This value + will correspond to the custom name you chose for your deployment + when you deployed a model. This value can be found under + Resource Management > Deployments in the Azure portal or, alternatively, + under Management > Deployments in Azure OpenAI Studio. + async_client {AsyncAzureOpenAI} -- An existing client to use. + log: The logger instance to use. (Optional) (Deprecated) + """ def __init__( self, deployment_name: str, endpoint: Optional[str] = None, + api_version: str = DEFAULT_AZURE_API_VERSION, api_key: Optional[str] = None, - api_version: str = "2022-12-01", - logger: Optional[Logger] = None, - ad_auth=False, + ad_token: Optional[str] = None, + ad_token_provider: Optional[AsyncAzureADTokenProvider] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + async_client: Optional[AsyncAzureOpenAI] = None, + **kwargs, ) -> None: """ Initialize an AzureTextEmbedding service. @@ -37,32 +71,53 @@ def __init__( :param endpoint: The endpoint of the Azure deployment. This value can be found in the Keys & Endpoint section when examining your resource from the Azure portal. + :param api_version: The API version to use. (Optional) + The default value is "2023-05-15". :param api_key: The API key for the Azure deployment. This value can be found in the Keys & Endpoint section when examining your resource in the Azure portal. You can use either KEY1 or KEY2. - :param api_version: The API version to use. (Optional) - The default value is "2022-12-01". - :param logger: The logger instance to use. (Optional) + :param ad_token : The Azure AD token for authentication. (Optional) :param ad_auth: Whether to use Azure Active Directory authentication. (Optional) The default value is False. - """ - if not deployment_name: - raise ValueError("The deployment name cannot be `None` or empty") - if not api_key: - raise ValueError("The Azure API key cannot be `None` or empty`") - if not endpoint: - raise ValueError("The Azure endpoint cannot be `None` or empty") - if not endpoint.startswith("https://"): - raise ValueError("The Azure endpoint must start with https://") - - self._api_type = "azure_ad" if ad_auth else "azure" + :param default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + :param log: The logger instance to use. (Optional) (Deprecated) + :param logger: Deprecated, please use log instead. (Optional) + :param async_client: An existing client to use. (Optional) + """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + if kwargs.get("logger"): + logger.warning("The 'logger' argument is deprecated.") super().__init__( - deployment_name, - api_key, - api_type=self._api_type, - api_version=api_version, + deployment_name=deployment_name, endpoint=endpoint, - org_id=None, - log=logger, + api_version=api_version, + api_key=api_key, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + default_headers=default_headers, + ai_model_type=OpenAIModelTypes.EMBEDDING, + async_client=async_client, + ) + + @classmethod + def from_dict(cls, settings: Dict[str, str]) -> "AzureTextEmbedding": + """ + Initialize an Azure OpenAI service from a dictionary of settings. + + Arguments: + settings: A dictionary of settings for the service. + should contains keys: deployment_name, endpoint, api_key + and optionally: api_version, ad_auth + """ + return AzureTextEmbedding( + deployment_name=settings["deployment_name"], + endpoint=settings["endpoint"], + api_key=settings["api_key"], + api_version=settings.get("api_version", DEFAULT_AZURE_API_VERSION), + ad_token=settings.get("ad_token"), + ad_token_provider=settings.get("ad_token_provider"), + default_headers=settings.get("default_headers"), ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py index 24adf4896c61..b10068153102 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py @@ -1,333 +1,142 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union - -import openai - -from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import FunctionCall +import logging +from typing import ( + Any, + Dict, + Mapping, + Optional, + overload, +) -if TYPE_CHECKING: - from openai.openai_object import OpenAIObject +from openai import AsyncOpenAI -from semantic_kernel.connectors.ai.ai_exception import AIException -from semantic_kernel.connectors.ai.chat_completion_client_base import ( - ChatCompletionClientBase, -) -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base import OpenAIChatCompletionBase +from semantic_kernel.connectors.ai.open_ai.services.open_ai_config_base import OpenAIConfigBase +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIModelTypes, ) -from semantic_kernel.connectors.ai.text_completion_client_base import ( - TextCompletionClientBase, +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion_base import ( + OpenAITextCompletionBase, ) -from semantic_kernel.utils.null_logger import NullLogger +logger: logging.Logger = logging.getLogger(__name__) + + +class OpenAIChatCompletion(OpenAIConfigBase, OpenAIChatCompletionBase, OpenAITextCompletionBase): + """OpenAI Chat completion class.""" + + @overload + def __init__( + self, + ai_model_id: str, + async_client: AsyncOpenAI, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an OpenAIChatCompletion service. -class OpenAIChatCompletion(ChatCompletionClientBase, TextCompletionClientBase): - _model_id: str - _api_key: str - _org_id: Optional[str] = None - _api_type: Optional[str] = None - _api_version: Optional[str] = None - _endpoint: Optional[str] = None - _log: Logger - _prompt_tokens: int = 0 - _completion_tokens: int = 0 - _total_tokens: int = 0 + Arguments: + ai_model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + async_client {AsyncOpenAI} -- An existing client to use. + log: The logger instance to use. (Optional) (Deprecated) + """ + @overload def __init__( self, - model_id: str, - api_key: str, + ai_model_id: str, + api_key: Optional[str] = None, org_id: Optional[str] = None, - api_type: Optional[str] = None, - api_version: Optional[str] = None, - endpoint: Optional[str] = None, - log: Optional[Logger] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, ) -> None: """ - Initializes a new instance of the OpenAIChatCompletion class. + Initialize an OpenAIChatCompletion service. Arguments: - model_id {str} -- OpenAI model name, see + ai_model_id {str} -- OpenAI model name, see https://platform.openai.com/docs/models - api_key {str} -- OpenAI API key, see + api_key {Optional[str]} -- OpenAI API key, see https://platform.openai.com/account/api-keys org_id {Optional[str]} -- OpenAI organization ID. This is usually optional unless your account belongs to multiple organizations. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log -- The logger instance to use. (Optional) (Deprecated) """ - self._model_id = model_id - self._api_key = api_key - self._org_id = org_id - self._api_type = api_type - self._api_version = api_version - self._endpoint = endpoint.rstrip("/") if endpoint is not None else None - self._log = log if log is not None else NullLogger() - self._messages = [] - - async def complete_chat_async( - self, - messages: List[Dict[str, str]], - request_settings: ChatRequestSettings, - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: - # TODO: tracking on token counts/etc. - response = await self._send_chat_request( - messages, request_settings, False, None - ) - - if len(response.choices) == 1: - return response.choices[0].message.content - return [choice.message.content for choice in response.choices] - - async def complete_chat_with_functions_async( - self, - messages: List[Dict[str, str]], - functions: List[Dict[str, Any]], - request_settings: ChatRequestSettings, - logger: Optional[Logger] = None, - ) -> Union[ - Tuple[Optional[str], Optional[FunctionCall]], - List[Tuple[Optional[str], Optional[FunctionCall]]], - ]: - # TODO: tracking on token counts/etc. - response = await self._send_chat_request( - messages, request_settings, False, functions - ) - - if len(response.choices) == 1: - return _parse_message(response.choices[0].message, self._log) - else: - return [ - _parse_message(choice.message, self._log) for choice in response.choices - ] - - async def complete_chat_stream_async( + @overload + def __init__( self, - messages: List[Dict[str, str]], - request_settings: ChatRequestSettings, - ): - # TODO: enable function calling - response = await self._send_chat_request(messages, request_settings, True, None) + ai_model_id: str, + api_key: Optional[str] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an OpenAIChatCompletion service. - # parse the completion text(s) and yield them - async for chunk in response: - text, index = _parse_choices(chunk) - # if multiple responses are requested, keep track of them - if request_settings.number_of_responses > 1: - completions = [""] * request_settings.number_of_responses - completions[index] = text - yield completions - # if only one response is requested, yield it - else: - yield text + Arguments: + ai_model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + api_key {Optional[str]} -- OpenAI API key, see + https://platform.openai.com/account/api-keys + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + log -- The logger instance to use. (Optional) (Deprecated) + """ - async def complete_async( + def __init__( self, - prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: + ai_model_id: str, + api_key: Optional[str] = None, + org_id: Optional[str] = None, + default_headers: Optional[Mapping[str, str]] = None, + async_client: Optional[AsyncOpenAI] = None, + log: Optional[Any] = None, + ) -> None: """ - Completes the given prompt. + Initialize an OpenAIChatCompletion service. Arguments: - prompt {str} -- The prompt to complete. - request_settings {CompleteRequestSettings} -- The request settings. - - Returns: - str -- The completed text. + ai_model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + api_key {Optional[str]} -- OpenAI API key, see + https://platform.openai.com/account/api-keys + org_id {Optional[str]} -- OpenAI organization ID. + This is usually optional unless your + account belongs to multiple organizations. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + async_client {Optional[AsyncOpenAI]} -- An existing client to use. (Optional) + log -- The logger instance to use. (Optional) (Deprecated) """ - prompt_to_message = [{"role": "user", "content": prompt}] - chat_settings = ChatRequestSettings.from_completion_config(request_settings) - response = await self._send_chat_request( - prompt_to_message, chat_settings, False - ) - - if len(response.choices) == 1: - return response.choices[0].message.content - else: - return [choice.message.content for choice in response.choices] - - async def complete_stream_async( - self, - prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ): - prompt_to_message = [{"role": "user", "content": prompt}] - chat_settings = ChatRequestSettings( - temperature=request_settings.temperature, - top_p=request_settings.top_p, - presence_penalty=request_settings.presence_penalty, - frequency_penalty=request_settings.frequency_penalty, - max_tokens=request_settings.max_tokens, - number_of_responses=request_settings.number_of_responses, - token_selection_biases=request_settings.token_selection_biases, - stop_sequences=request_settings.stop_sequences, + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + super().__init__( + ai_model_id=ai_model_id, + api_key=api_key, + org_id=org_id, + ai_model_type=OpenAIModelTypes.CHAT, + default_headers=default_headers, + async_client=async_client, ) - response = await self._send_chat_request(prompt_to_message, chat_settings, True) - - # parse the completion text(s) and yield them - async for chunk in response: - text, index = _parse_choices(chunk) - # if multiple responses are requested, keep track of them - if request_settings.number_of_responses > 1: - completions = [""] * request_settings.number_of_responses - completions[index] = text - yield completions - # if only one response is requested, yield it - else: - yield text - async def _send_chat_request( - self, - messages: List[Tuple[str, str]], - request_settings: ChatRequestSettings, - stream: bool, - functions: Optional[List[Dict[str, Any]]] = None, - ): + @classmethod + def from_dict(cls, settings: Dict[str, str]) -> "OpenAIChatCompletion": """ - Completes the given user message with an asynchronous stream. + Initialize an Open AI service from a dictionary of settings. Arguments: - messages {List[Tuple[str,str]]} -- The messages (from a user) to respond to. - request_settings {ChatRequestSettings} -- The request settings. - stream {bool} -- Whether to stream the response. - functions {List[Dict[str, Any]]} -- The functions available to the api. - - Returns: - str -- The completed text. + settings: A dictionary of settings for the service. """ - if request_settings is None: - raise ValueError("The request settings cannot be `None`") - - if request_settings.max_tokens < 1: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "The max tokens must be greater than 0, " - f"but was {request_settings.max_tokens}", - ) - - if len(messages) <= 0: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "To complete a chat you need at least one message", - ) - if messages[-1]["role"] in ["assistant", "system"]: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "The last message must be from the user or a function output", - ) - - model_args = { - "api_key": self._api_key, - "api_type": self._api_type, - "api_base": self._endpoint, - "api_version": self._api_version, - "organization": self._org_id, - "engine" - if self._api_type in ["azure", "azure_ad"] - else "model": self._model_id, - "messages": messages, - "temperature": request_settings.temperature, - "top_p": request_settings.top_p, - "n": request_settings.number_of_responses, - "stream": stream, - "stop": ( - request_settings.stop_sequences - if request_settings.stop_sequences is not None - and len(request_settings.stop_sequences) > 0 - else None - ), - "max_tokens": request_settings.max_tokens, - "presence_penalty": request_settings.presence_penalty, - "frequency_penalty": request_settings.frequency_penalty, - "logit_bias": ( - request_settings.token_selection_biases - if request_settings.token_selection_biases is not None - and len(request_settings.token_selection_biases) > 0 - else {} - ), - } - - if functions and request_settings.function_call is not None: - model_args["function_call"] = request_settings.function_call - if request_settings.function_call != "auto": - model_args["functions"] = [ - func - for func in functions - if func["name"] == request_settings.function_call - ] - else: - model_args["functions"] = functions - - try: - response: Any = await openai.ChatCompletion.acreate(**model_args) - except Exception as ex: - raise AIException( - AIException.ErrorCodes.ServiceError, - f"{self.__class__.__name__} failed to complete the chat", - ex, - ) from ex - - # streaming does not have usage info, therefore checking the type of the response - if not stream and "usage" in response: - self._log.info(f"OpenAI usage: {response.usage}") - self._prompt_tokens += response.usage.prompt_tokens - self._completion_tokens += response.usage.completion_tokens - self._total_tokens += response.usage.total_tokens - - return response - - @property - def prompt_tokens(self) -> int: - return self._prompt_tokens - - @property - def completion_tokens(self) -> int: - return self._completion_tokens - - @property - def total_tokens(self) -> int: - return self._total_tokens - - -def _parse_choices(chunk): - message = "" - if "role" in chunk.choices[0].delta: - message += chunk.choices[0].delta.role + ": " - if "content" in chunk.choices[0].delta: - message += chunk.choices[0].delta.content - if "function_call" in chunk.choices[0].delta: - message += chunk.choices[0].delta.function_call - - index = chunk.choices[0].index - return message, index - - -def _parse_message( - message: "OpenAIObject", logger: Optional[Logger] = None -) -> Tuple[Optional[str], Optional[FunctionCall]]: - """ - Parses the message. - - Arguments: - message {OpenAIObject} -- The message to parse. - - Returns: - Tuple[Optional[str], Optional[Dict]] -- The parsed message. - """ - content = message.content if hasattr(message, "content") else None - function_call = message.function_call if hasattr(message, "function_call") else None - if function_call: - function_call = FunctionCall( - name=function_call.name, - arguments=function_call.arguments, + return OpenAIChatCompletion( + ai_model_id=settings["ai_model_id"], + api_key=settings["api_key"], + org_id=settings.get("org_id"), + default_headers=settings.get("default_headers"), ) - - return (content, function_call) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py new file mode 100644 index 000000000000..a46714426d61 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py @@ -0,0 +1,229 @@ +# Copyright (c) Microsoft. All rights reserved. +import logging +from typing import ( + Any, + AsyncIterable, + Dict, + List, + Optional, + Union, +) + +from openai import AsyncStream +from openai.types.chat.chat_completion import ChatCompletion, Choice +from openai.types.chat.chat_completion_chunk import ChatCompletionChunk +from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice + +from semantic_kernel.connectors.ai.chat_completion_client_base import ( + ChatCompletionClientBase, +) +from semantic_kernel.connectors.ai.open_ai.contents import OpenAIChatMessageContent, OpenAIStreamingChatMessageContent +from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import FunctionCall +from semantic_kernel.connectors.ai.open_ai.models.chat.tool_calls import ToolCall +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, + OpenAIPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.models.chat.chat_role import ChatRole +from semantic_kernel.models.chat.finish_reason import FinishReason + +logger: logging.Logger = logging.getLogger(__name__) + + +class OpenAIChatCompletionBase(OpenAIHandler, ChatCompletionClientBase): + """OpenAI Chat completion class.""" + + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Create a request settings object.""" + return OpenAIChatPromptExecutionSettings + + async def complete_chat( + self, + messages: List[Dict[str, str]], + settings: OpenAIPromptExecutionSettings, + **kwargs, + ) -> List[OpenAIChatMessageContent]: + """Executes a chat completion request and returns the result. + + Arguments: + messages {List[Dict[str,str]]} -- The messages to use for the chat completion. + settings {OpenAIChatPromptExecutionSettings | AzureChatPromptExecutionSettings} -- The settings to use + for the chat completion request. + + Returns: + List[OpenAIChatMessageContent | AzureChatMessageContent] -- The completion result(s). + """ + # TODO: replace messages with ChatHistory object with ChatMessageContent objects + settings.messages = messages + settings.stream = False + if not settings.ai_model_id: + settings.ai_model_id = self.ai_model_id + response = await self._send_request(request_settings=settings) + response_metadata = self._get_metadata_from_chat_response(response) + return [self._create_chat_message_content(response, choice, response_metadata) for choice in response.choices] + + async def complete_chat_stream( + self, + messages: List[Dict[str, str]], + settings: OpenAIPromptExecutionSettings, + **kwargs, + ) -> AsyncIterable[List[OpenAIStreamingChatMessageContent]]: + """Executes a streaming chat completion request and returns the result. + + Arguments: + messages {List[Tuple[str,str]]} -- The messages to use for the chat completion. + settings {OpenAIChatPromptExecutionSettings | AzureChatPromptExecutionSettings} -- The settings to use + for the chat completion request. + + Yields: + List[OpenAIStreamingChatMessageContent | AzureStreamingChatMessageContent] -- A stream of + OpenAIStreamingChatMessages or AzureStreamingChatMessageContent when using Azure. + """ + settings.messages = messages + settings.stream = True + if not settings.ai_model_id: + settings.ai_model_id = self.ai_model_id + response = await self._send_request(request_settings=settings) + if not isinstance(response, AsyncStream): + raise ValueError("Expected an AsyncStream[ChatCompletionChunk] response.") + + update_storage = self._get_update_storage_fields() + + async for chunk in response: + if len(chunk.choices) == 0: + continue + chunk_metadata = self._get_metadata_from_streaming_chat_response(chunk) + contents = [ + self._create_streaming_chat_message_content(chunk, choice, chunk_metadata) for choice in chunk.choices + ] + self._update_storages(contents, update_storage) + yield contents + + def _create_chat_message_content( + self, response: ChatCompletion, choice: Choice, response_metadata: Dict[str, Any] + ) -> OpenAIChatMessageContent: + """Create a chat message content object from a choice.""" + metadata = self._get_metadata_from_chat_choice(choice) + metadata.update(response_metadata) + return OpenAIChatMessageContent( + inner_content=response, + ai_model_id=self.ai_model_id, + metadata=metadata, + role=ChatRole(choice.message.role), + content=choice.message.content, + function_call=self._get_function_call_from_chat_choice(choice), + tool_calls=self._get_tool_calls_from_chat_choice(choice), + ) + + def _create_streaming_chat_message_content( + self, + chunk: ChatCompletionChunk, + choice: ChunkChoice, + chunk_metadata: Dict[str, Any], + ): + """Create a streaming chat message content object from a choice.""" + metadata = self._get_metadata_from_chat_choice(choice) + metadata.update(chunk_metadata) + return OpenAIStreamingChatMessageContent( + choice_index=choice.index, + inner_content=chunk, + ai_model_id=self.ai_model_id, + metadata=metadata, + role=ChatRole(choice.delta.role), + content=choice.delta.content, + finish_reason=FinishReason(choice.finish_reason), + function_call=self._get_function_call_from_chat_choice(choice), + tool_calls=self._get_tool_calls_from_chat_choice(choice), + ) + + def _get_update_storage_fields(self) -> Dict[str, Dict[int, Any]]: + """Get the fields to use for storing updates to the messages, tool_calls and function_calls.""" + out_messages = {} + tool_call_ids_by_index = {} + function_call_by_index = {} + return { + "out_messages": out_messages, + "tool_call_ids_by_index": tool_call_ids_by_index, + "function_call_by_index": function_call_by_index, + } + + def _update_storages( + self, contents: List[OpenAIStreamingChatMessageContent], update_storage: Dict[str, Dict[int, Any]] + ): + """Handle updates to the messages, tool_calls and function_calls. + + This will be used for auto-invoking tools. + """ + out_messages = update_storage["out_messages"] + tool_call_ids_by_index = update_storage["tool_call_ids_by_index"] + function_call_by_index = update_storage["function_call_by_index"] + + for index, content in enumerate(contents): + if content.content is not None: + if index not in out_messages: + out_messages[index] = str(content) + else: + out_messages[index] += str(content) + if content.tool_calls is not None: + if index not in tool_call_ids_by_index: + tool_call_ids_by_index[index] = content.tool_calls + else: + for tc_index, tool_call in enumerate(content.tool_calls): + tool_call_ids_by_index[index][tc_index].update(tool_call) + if content.function_call is not None: + if index not in function_call_by_index: + function_call_by_index[index] = content.function_call + else: + function_call_by_index[index].update(content.function_call) + + def _get_metadata_from_chat_response(self, response: ChatCompletion) -> Dict[str, Any]: + """Get metadata from a chat response.""" + return { + "id": response.id, + "created": response.created, + "system_fingerprint": response.system_fingerprint, + "usage": response.usage, + } + + def _get_metadata_from_streaming_chat_response(self, response: ChatCompletionChunk) -> Dict[str, Any]: + """Get metadata from a streaming chat response.""" + return { + "id": response.id, + "created": response.created, + "system_fingerprint": response.system_fingerprint, + } + + def _get_metadata_from_chat_choice(self, choice: Union[Choice, ChunkChoice]) -> Dict[str, Any]: + """Get metadata from a chat choice.""" + return { + "logprobs": getattr(choice, "logprobs", None), + } + + def _get_tool_calls_from_chat_choice(self, choice: Union[Choice, ChunkChoice]) -> Optional[List[ToolCall]]: + """Get tool calls from a chat choice.""" + if isinstance(choice, Choice): + content = choice.message + else: + content = choice.delta + if content.tool_calls is None: + return None + return [ + ToolCall( + id=tool.id, + type=tool.type, + function=FunctionCall(name=tool.function.name, arguments=tool.function.arguments), + ) + for tool in content.tool_calls + ] + + def _get_function_call_from_chat_choice(self, choice: Union[Choice, ChunkChoice]) -> Optional[FunctionCall]: + """Get a function call from a chat choice.""" + if isinstance(choice, Choice): + content = choice.message + else: + content = choice.delta + if content.function_call is None: + return None + return FunctionCall(name=content.function_call.name, arguments=content.function_call.arguments) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py new file mode 100644 index 000000000000..d965a8ef299d --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_config_base.py @@ -0,0 +1,103 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +import logging +from typing import Any, Dict, Mapping, Optional + +from openai import AsyncOpenAI +from pydantic import Field, validate_call + +from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.open_ai.const import ( + USER_AGENT, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIHandler, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_model_types import ( + OpenAIModelTypes, +) +from semantic_kernel.connectors.telemetry import APP_INFO + +logger: logging.Logger = logging.getLogger(__name__) + + +class OpenAIConfigBase(OpenAIHandler): + @validate_call(config=dict(arbitrary_types_allowed=True)) + def __init__( + self, + ai_model_id: str = Field(min_length=1), + api_key: Optional[str] = Field(min_length=1), + ai_model_type: Optional[OpenAIModelTypes] = OpenAIModelTypes.CHAT, + org_id: Optional[str] = None, + default_headers: Optional[Mapping[str, str]] = None, + async_client: Optional[AsyncOpenAI] = None, + log: Optional[Any] = None, + ) -> None: + """Initialize a client for OpenAI services. + + This constructor sets up a client to interact with OpenAI's API, allowing for + different types of AI model interactions, like chat or text completion. + + Arguments: + ai_model_id {str} -- OpenAI model identifier. Must be non-empty. + Default to a preset value. + api_key {Optional[str]} -- OpenAI API key for authentication. + Must be non-empty. (Optional) + ai_model_type {Optional[OpenAIModelTypes]} -- The type of OpenAI + model to interact with. Defaults to CHAT. + org_id {Optional[str]} -- OpenAI organization ID. This is optional + unless the account belongs to multiple organizations. + default_headers {Optional[Mapping[str, str]]} -- Default headers + for HTTP requests. (Optional) + + """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + # Merge APP_INFO into the headers if it exists + merged_headers = default_headers.copy() if default_headers else {} + if APP_INFO: + merged_headers[USER_AGENT] = json.dumps(APP_INFO) + + if not async_client: + if not api_key: + raise AIException( + AIException.ErrorCodes.InvalidConfiguration, + "Please provide an api_key", + ) + async_client = AsyncOpenAI( + api_key=api_key, + organization=org_id, + default_headers=merged_headers, + ) + + super().__init__( + ai_model_id=ai_model_id, + client=async_client, + ai_model_type=ai_model_type, + ) + + def to_dict(self) -> Dict[str, str]: + """ + Create a dict of the service settings. + """ + client_settings = { + "api_key": self.client.api_key, + "default_headers": {k: v for k, v in self.client.default_headers.items() if k != USER_AGENT}, + } + if self.client.organization: + client_settings["org_id"] = self.client.organization + base = self.model_dump( + exclude={ + "prompt_tokens", + "completion_tokens", + "total_tokens", + "api_type", + "ai_model_type", + "client", + }, + by_alias=True, + exclude_none=True, + ) + base.update(client_settings) + return base diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_handler.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_handler.py new file mode 100644 index 000000000000..3041b0933b26 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_handler.py @@ -0,0 +1,106 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from abc import ABC +from typing import List, Union + +from numpy import array, ndarray +from openai import AsyncOpenAI, AsyncStream, BadRequestError +from openai.types import Completion +from openai.types.chat import ChatCompletion, ChatCompletionChunk +from pydantic import Field + +from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.ai_service_client_base import AIServiceClientBase +from semantic_kernel.connectors.ai.open_ai.exceptions.content_filter_ai_exception import ( + ContentFilterAIException, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIEmbeddingPromptExecutionSettings, + OpenAIPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_model_types import ( + OpenAIModelTypes, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + +logger: logging.Logger = logging.getLogger(__name__) + + +class OpenAIHandler(AIServiceClientBase, ABC): + """Internal class for calls to OpenAI API's.""" + + client: AsyncOpenAI + ai_model_type: OpenAIModelTypes = OpenAIModelTypes.CHAT + prompt_tokens: int = Field(0, init_var=False) + completion_tokens: int = Field(0, init_var=False) + total_tokens: int = Field(0, init_var=False) + + async def _send_request( + self, + request_settings: OpenAIPromptExecutionSettings, + ) -> Union[ChatCompletion, Completion, AsyncStream[ChatCompletionChunk], AsyncStream[Completion]]: + """ + Completes the given prompt. Returns a single string completion. + Cannot return multiple completions. Cannot return logprobs. + + Arguments: + prompt {str} -- The prompt to complete. + messages {List[Tuple[str, str]]} -- A list of tuples, where each tuple is a role and content set. + request_settings {OpenAIPromptExecutionSettings} -- The request settings. + stream {bool} -- Whether to stream the response. + + Returns: + ChatCompletion, Completion, AsyncStream[Completion | ChatCompletionChunk] -- The completion response. + """ + try: + if self.ai_model_type == OpenAIModelTypes.CHAT: + response = await self.client.chat.completions.create(**request_settings.prepare_settings_dict()) + else: + response = await self.client.completions.create(**request_settings.prepare_settings_dict()) + self.store_usage(response) + return response + except BadRequestError as ex: + if ex.code == "content_filter": + raise ContentFilterAIException( + AIException.ErrorCodes.BadContentError, + f"{type(self)} service encountered a content error", + ex, + ) + raise AIException( + AIException.ErrorCodes.ServiceError, + f"{type(self)} service failed to complete the prompt", + ex, + ) from ex + except Exception as ex: + raise AIException( + AIException.ErrorCodes.ServiceError, + f"{type(self)} service failed to complete the prompt", + ex, + ) from ex + + async def _send_embedding_request(self, settings: OpenAIEmbeddingPromptExecutionSettings) -> List[ndarray]: + try: + response = await self.client.embeddings.create(**settings.prepare_settings_dict()) + self.store_usage(response) + # make numpy arrays from the response + # TODO: the openai response is cast to a list[float], could be used instead of ndarray + return [array(x.embedding) for x in response.data] + except Exception as ex: + raise AIException( + AIException.ErrorCodes.ServiceError, + f"{type(self)} service failed to generate embeddings", + ex, + ) from ex + + def store_usage(self, response): + if not isinstance(response, AsyncStream): + logger.info(f"OpenAI usage: {response.usage}") + self.prompt_tokens += response.usage.prompt_tokens + self.total_tokens += response.usage.total_tokens + if hasattr(response.usage, "completion_tokens"): + self.completion_tokens += response.usage.completion_tokens + + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Return the class with the applicable request settings.""" + return OpenAIPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py new file mode 100644 index 000000000000..95e9b4521596 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_model_types.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum + + +class OpenAIModelTypes(Enum): + """OpenAI model types, can be text, chat or embedding.""" + + TEXT = "text" + CHAT = "chat" + EMBEDDING = "embedding" diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py index cd7ccc5c49d6..8e07ea41ef43 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py @@ -1,186 +1,135 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import Any, List, Optional, Union +import json +import logging +from typing import Any, Dict, Mapping, Optional, overload -import openai +from openai import AsyncOpenAI -from semantic_kernel.connectors.ai.ai_exception import AIException -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from semantic_kernel.connectors.ai.open_ai.services.open_ai_config_base import ( + OpenAIConfigBase, ) -from semantic_kernel.connectors.ai.text_completion_client_base import ( - TextCompletionClientBase, +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIModelTypes, ) -from semantic_kernel.utils.null_logger import NullLogger +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion_base import ( + OpenAITextCompletionBase, +) + +logger: logging.Logger = logging.getLogger(__name__) -class OpenAITextCompletion(TextCompletionClientBase): - _model_id: str - _api_key: str - _api_type: Optional[str] = None - _api_version: Optional[str] = None - _endpoint: Optional[str] = None - _org_id: Optional[str] = None - _log: Logger - _prompt_tokens: int - _completion_tokens: int - _total_tokens: int +class OpenAITextCompletion(OpenAITextCompletionBase, OpenAIConfigBase): + """OpenAI Text Completion class.""" + @overload def __init__( self, - model_id: str, - api_key: str, + ai_model_id: str, + async_client: AsyncOpenAI, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an OpenAITextCompletion service. + + Arguments: + ai_model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + async_client {AsyncOpenAI} -- An existing client to use. + """ + + @overload + def __init__( + self, + ai_model_id: str, + api_key: Optional[str] = None, org_id: Optional[str] = None, - api_type: Optional[str] = None, - api_version: Optional[str] = None, - endpoint: Optional[str] = None, - log: Optional[Logger] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, ) -> None: """ - Initializes a new instance of the OpenAITextCompletion class. + Initialize an OpenAITextCompletion service. Arguments: - model_id {str} -- OpenAI model name, see + ai_model_id {str} -- OpenAI model name, see https://platform.openai.com/docs/models - api_key {str} -- OpenAI API key, see - https://platform.openai.com/account/api-keys + api_key {Optional[str]} -- OpenAI API key, see + https://platform.openai.com/account/api-keys (Optional) org_id {Optional[str]} -- OpenAI organization ID. This is usually optional unless your account belongs to multiple organizations. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) """ - self._model_id = model_id - self._api_key = api_key - self._api_type = api_type - self._api_version = api_version - self._endpoint = endpoint.rstrip("/") if endpoint is not None else None - self._org_id = org_id - self._log = log if log is not None else NullLogger() - - async def complete_async( - self, - prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: - # TODO: tracking on token counts/etc. - response = await self._send_completion_request(prompt, request_settings, False) - if len(response.choices) == 1: - return response.choices[0].text - else: - return [choice.text for choice in response.choices] + @overload + def __init__( + self, + ai_model_id: str, + api_key: Optional[str] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an OpenAITextCompletion service. - # TODO: complete w/ multiple... + Arguments: + ai_model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + api_key {Optional[str]} -- OpenAI API key, see + https://platform.openai.com/account/api-keys (Optional) + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + """ - async def complete_stream_async( + def __init__( self, - prompt: str, - request_settings: CompleteRequestSettings, - logger: Optional[Logger] = None, - ): - response = await self._send_completion_request(prompt, request_settings, True) - - async for chunk in response: - if request_settings.number_of_responses > 1: - for choice in chunk.choices: - completions = [""] * request_settings.number_of_responses - completions[choice.index] = choice.text - yield completions - else: - yield chunk.choices[0].text - - async def _send_completion_request( - self, prompt: str, request_settings: CompleteRequestSettings, stream: bool - ): + ai_model_id: str, + api_key: Optional[str] = None, + org_id: Optional[str] = None, + default_headers: Optional[Mapping[str, str]] = None, + log: Optional[Any] = None, + async_client: Optional[AsyncOpenAI] = None, + ) -> None: """ - Completes the given prompt. Returns a single string completion. - Cannot return multiple completions. Cannot return logprobs. + Initialize an OpenAITextCompletion service. Arguments: - prompt {str} -- The prompt to complete. - request_settings {CompleteRequestSettings} -- The request settings. + ai_model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + api_key {Optional[str]} -- OpenAI API key, see + https://platform.openai.com/account/api-keys (Optional) + org_id {Optional[str]} -- OpenAI organization ID. + This is usually optional unless your + account belongs to multiple organizations. + default_headers: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + async_client {Optional[AsyncOpenAI]} -- An existing client to use. (Optional) + """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + super().__init__( + ai_model_id=ai_model_id, + api_key=api_key, + org_id=org_id, + ai_model_type=OpenAIModelTypes.TEXT, + default_headers=default_headers, + async_client=async_client, + ) + + @classmethod + def from_dict(cls, settings: Dict[str, str]) -> "OpenAITextCompletion": + """ + Initialize an Open AI service from a dictionary of settings. - Returns: - str -- The completed text. + Arguments: + settings: A dictionary of settings for the service. """ - if not prompt: - raise ValueError("The prompt cannot be `None` or empty") - if request_settings is None: - raise ValueError("The request settings cannot be `None`") - - if request_settings.max_tokens < 1: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "The max tokens must be greater than 0, " - f"but was {request_settings.max_tokens}", - ) - - if request_settings.logprobs != 0: - raise AIException( - AIException.ErrorCodes.InvalidRequest, - "complete_async does not support logprobs, " - f"but logprobs={request_settings.logprobs} was requested", - ) - - model_args = {} - if self._api_type in ["azure", "azure_ad"]: - model_args["engine"] = self._model_id - else: - model_args["model"] = self._model_id - - try: - response: Any = await openai.Completion.acreate( - **model_args, - api_key=self._api_key, - api_type=self._api_type, - api_base=self._endpoint, - api_version=self._api_version, - organization=self._org_id, - prompt=prompt, - temperature=request_settings.temperature, - top_p=request_settings.top_p, - presence_penalty=request_settings.presence_penalty, - frequency_penalty=request_settings.frequency_penalty, - max_tokens=request_settings.max_tokens, - stream=stream, - n=request_settings.number_of_responses, - stop=( - request_settings.stop_sequences - if request_settings.stop_sequences is not None - and len(request_settings.stop_sequences) > 0 - else None - ), - logit_bias=( - request_settings.token_selection_biases - if request_settings.token_selection_biases is not None - and len(request_settings.token_selection_biases) > 0 - else {} - ), - ) - except Exception as ex: - raise AIException( - AIException.ErrorCodes.ServiceError, - f"{self.__class__.__name__} failed to complete the prompt", - ex, - ) - - if "usage" in response: - self._log.info(f"OpenAI usage: {response.usage}") - self._prompt_tokens += response.usage.prompt_tokens - self._completion_tokens += response.usage.completion_tokens - self._total_tokens += response.usage.total_tokens - - return response - - @property - def prompt_tokens(self) -> int: - return self._prompt_tokens - - @property - def completion_tokens(self) -> int: - return self._completion_tokens - - @property - def total_tokens(self) -> int: - return self._total_tokens + if "default_headers" in settings and isinstance(settings["default_headers"], str): + settings["default_headers"] = json.loads(settings["default_headers"]) + return OpenAITextCompletion( + ai_model_id=settings["ai_model_id"], + api_key=settings["api_key"], + org_id=settings.get("org_id"), + default_headers=settings.get("default_headers"), + ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion_base.py new file mode 100644 index 000000000000..cb83180e71f7 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion_base.py @@ -0,0 +1,148 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import TYPE_CHECKING, Any, AsyncIterable, Dict, List, Union + +from openai import AsyncStream +from openai.types import Completion, CompletionChoice +from openai.types.chat.chat_completion import Choice as ChatCompletionChoice +from openai.types.chat.chat_completion_chunk import ChatCompletionChunk + +from semantic_kernel.connectors.ai import TextCompletionClientBase +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAITextPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIHandler, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.models.contents import StreamingTextContent, TextContent + +if TYPE_CHECKING: + from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIPromptExecutionSettings, + ) + +logger: logging.Logger = logging.getLogger(__name__) + + +class OpenAITextCompletionBase(OpenAIHandler, TextCompletionClientBase): + def get_prompt_execution_settings_class(self) -> "PromptExecutionSettings": + """Create a request settings object.""" + return OpenAITextPromptExecutionSettings + + async def complete( + self, + prompt: str, + settings: "OpenAIPromptExecutionSettings", + **kwargs, + ) -> List["TextContent"]: + """Executes a completion request and returns the result. + + Arguments: + prompt {str} -- The prompt to use for the completion request. + settings {OpenAITextPromptExecutionSettings} -- The settings to use for the completion request. + + Returns: + List["TextContent"] -- The completion result(s). + """ + if isinstance(settings, OpenAITextPromptExecutionSettings): + settings.prompt = prompt + else: + settings.messages = [{"role": "user", "content": prompt}] + if settings.ai_model_id is None: + settings.ai_model_id = self.ai_model_id + response = await self._send_request(request_settings=settings) + metadata = self._get_metadata_from_text_response(response) + return [self._create_text_content(response, choice, metadata) for choice in response.choices] + + def _create_text_content( + self, + response: Completion, + choice: Union[CompletionChoice, ChatCompletionChoice], + response_metadata: Dict[str, Any], + ) -> "TextContent": + """Create a text content object from a choice.""" + choice_metadata = self._get_metadata_from_text_choice(choice) + choice_metadata.update(response_metadata) + text = choice.text if isinstance(choice, CompletionChoice) else choice.message.content + return TextContent( + inner_content=response, + ai_model_id=self.ai_model_id, + text=text, + metadata=choice_metadata, + ) + + async def complete_stream( + self, + prompt: str, + settings: "OpenAIPromptExecutionSettings", + **kwargs, + ) -> AsyncIterable[List["StreamingTextContent"]]: + """ + Executes a completion request and streams the result. + Supports both chat completion and text completion. + + Arguments: + prompt {str} -- The prompt to use for the completion request. + settings {OpenAITextPromptExecutionSettings} -- The settings to use for the completion request. + + Yields: + List["StreamingTextContent"] -- The result stream made up of StreamingTextContent objects. + """ + if "prompt" in settings.model_fields: + settings.prompt = prompt + if "messages" in settings.model_fields: + if not settings.messages: + settings.messages = [{"role": "user", "content": prompt}] + else: + settings.messages.append({"role": "user", "content": prompt}) + settings.ai_model_id = self.ai_model_id + settings.stream = True + response = await self._send_request(request_settings=settings) + if not isinstance(response, AsyncStream): + raise ValueError("Expected an AsyncStream[Completion] response.") + + async for chunk in response: + if len(chunk.choices) == 0: + continue + chunk_metadata = self._get_metadata_from_text_response(chunk) + yield [self._create_streaming_text_content(chunk, choice, chunk_metadata) for choice in chunk.choices] + + def _create_streaming_text_content( + self, chunk: Completion, choice: Union[CompletionChoice, ChatCompletionChunk], response_metadata: Dict[str, Any] + ) -> "StreamingTextContent": + """Create a streaming text content object from a choice.""" + choice_metadata = self._get_metadata_from_text_choice(choice) + choice_metadata.update(response_metadata) + text = choice.text if isinstance(choice, CompletionChoice) else choice.delta.content + return StreamingTextContent( + choice_index=choice.index, + inner_content=chunk, + ai_model_id=self.ai_model_id, + metadata=choice_metadata, + text=text, + ) + + def _get_metadata_from_text_response(self, response: Completion) -> Dict[str, Any]: + """Get metadata from a completion response.""" + return { + "id": response.id, + "created": response.created, + "system_fingerprint": response.system_fingerprint, + "usage": response.usage, + } + + def _get_metadata_from_streaming_text_response(self, response: Completion) -> Dict[str, Any]: + """Get metadata from a streaming completion response.""" + return { + "id": response.id, + "created": response.created, + "system_fingerprint": response.system_fingerprint, + } + + def _get_metadata_from_text_choice(self, choice: CompletionChoice) -> Dict[str, Any]: + """Get metadata from a completion choice.""" + return { + "logprobs": choice.logprobs, + } diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py index 04fca023c96e..a56580453842 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py @@ -1,86 +1,89 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import Any, List, Optional +import logging +from typing import Any, Dict, Mapping, Optional, overload -import openai -from numpy import array, ndarray +from openai import AsyncOpenAI -from semantic_kernel.connectors.ai.ai_exception import AIException -from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( - EmbeddingGeneratorBase, +from semantic_kernel.connectors.ai.open_ai.services.open_ai_config_base import ( + OpenAIConfigBase, ) -from semantic_kernel.utils.null_logger import NullLogger +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIModelTypes, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding_base import ( + OpenAITextEmbeddingBase, +) + +logger: logging.Logger = logging.getLogger(__name__) + +class OpenAITextEmbedding(OpenAIConfigBase, OpenAITextEmbeddingBase): + """OpenAI Text Embedding class.""" + + @overload + def __init__( + self, + ai_model_id: str, + async_client: AsyncOpenAI, + log: Optional[Any] = None, + ) -> None: + """ + Initialize an OpenAITextEmbedding service. -class OpenAITextEmbedding(EmbeddingGeneratorBase): - _model_id: str - _api_key: str - _api_type: Optional[str] = None - _api_version: Optional[str] = None - _endpoint: Optional[str] = None - _org_id: Optional[str] = None - _log: Logger + Arguments: + ai_model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + async_client {AsyncOpenAI} -- An existing client to use. + """ def __init__( self, - model_id: str, - api_key: str, + ai_model_id: str, + api_key: Optional[str] = None, org_id: Optional[str] = None, - api_type: Optional[str] = None, - api_version: Optional[str] = None, - endpoint: Optional[str] = None, - log: Optional[Logger] = None, + default_headers: Optional[Mapping[str, str]] = None, + async_client: Optional[AsyncOpenAI] = None, + log: Optional[Any] = None, ) -> None: """ Initializes a new instance of the OpenAITextCompletion class. Arguments: - model_id {str} -- OpenAI model name, see + ai_model_id {str} -- OpenAI model name, see https://platform.openai.com/docs/models api_key {str} -- OpenAI API key, see https://platform.openai.com/account/api-keys org_id {Optional[str]} -- OpenAI organization ID. This is usually optional unless your account belongs to multiple organizations. + default_headers {Optional[Mapping[str,str]]}: The default headers mapping of string keys to + string values for HTTP requests. (Optional) + async_client {Optional[AsyncOpenAI]} -- An existing client to use. (Optional) """ - self._model_id = model_id - self._api_key = api_key - self._api_type = api_type - self._api_version = api_version - self._endpoint = endpoint.rstrip("/") if endpoint is not None else None - self._org_id = org_id - self._log = log if log is not None else NullLogger() + super().__init__( + ai_model_id=ai_model_id, + api_key=api_key, + ai_model_type=OpenAIModelTypes.EMBEDDING, + org_id=org_id, + default_headers=default_headers, + async_client=async_client, + ) + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") - async def generate_embeddings_async( - self, texts: List[str], batch_size: Optional[int] = None - ) -> ndarray: - model_args = {} - if self._api_type in ["azure", "azure_ad"]: - model_args["engine"] = self._model_id - else: - model_args["model"] = self._model_id + @classmethod + def from_dict(cls, settings: Dict[str, str]) -> "OpenAITextEmbedding": + """ + Initialize an Open AI service from a dictionary of settings. + + Arguments: + settings: A dictionary of settings for the service. + """ - try: - raw_embeddings = [] - batch_size = batch_size or len(texts) - for i in range(0, len(texts), batch_size): - batch = texts[i : i + batch_size] - response: Any = await openai.Embedding.acreate( - **model_args, - api_key=self._api_key, - api_type=self._api_type, - api_base=self._endpoint, - api_version=self._api_version, - organization=self._org_id, - input=batch, - ) - # make numpy arrays from the response - raw_embeddings.extend([array(x["embedding"]) for x in response["data"]]) - return array(raw_embeddings) - except Exception as ex: - raise AIException( - AIException.ErrorCodes.ServiceError, - f"{self.__class__.__name__} failed to generate embeddings", - ex, - ) + return OpenAITextEmbedding( + ai_model_id=settings["ai_model_id"], + api_key=settings["api_key"], + org_id=settings.get("org_id"), + default_headers=settings.get("default_headers"), + ) diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py new file mode 100644 index 000000000000..531cf51c1994 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding_base.py @@ -0,0 +1,51 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any, Dict, List, Optional + +from numpy import array, ndarray + +from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( + EmbeddingGeneratorBase, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIEmbeddingPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import ( + OpenAIHandler, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + + +class OpenAITextEmbeddingBase(OpenAIHandler, EmbeddingGeneratorBase): + async def generate_embeddings( + self, texts: List[str], batch_size: Optional[int] = None, **kwargs: Dict[str, Any] + ) -> ndarray: + """Generates embeddings for the given texts. + + Arguments: + texts {List[str]} -- The texts to generate embeddings for. + batch_size {Optional[int]} -- The batch size to use for the request. + kwargs {Dict[str, Any]} -- Additional arguments to pass to the request, + see OpenAIEmbeddingPromptExecutionSettings for the details. + + Returns: + ndarray -- The embeddings for the text. + + """ + settings = OpenAIEmbeddingPromptExecutionSettings( + ai_model_id=self.ai_model_id, + **kwargs, + ) + raw_embeddings = [] + batch_size = batch_size or len(texts) + for i in range(0, len(texts), batch_size): + batch = texts[i : i + batch_size] # noqa: E203 + settings.input = batch + raw_embedding = await self._send_embedding_request( + settings=settings, + ) + raw_embeddings.extend(raw_embedding) + return array(raw_embeddings) + + def get_prompt_execution_settings_class(self) -> PromptExecutionSettings: + return OpenAIEmbeddingPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/open_ai/utils.py b/python/semantic_kernel/connectors/ai/open_ai/utils.py index 9b6da59fc6e6..632e8eb63eee 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/utils.py +++ b/python/semantic_kernel/connectors/ai/open_ai/utils.py @@ -1,124 +1,197 @@ -from logging import Logger -from typing import Any, Dict, List, Optional +# Copyright (c) Microsoft. All rights reserved. -from semantic_kernel import Kernel, SKContext +import json +import logging +from typing import Any, Dict, List, Optional, Tuple + +from openai.types.chat import ChatCompletion + +from semantic_kernel import Kernel, KernelContext from semantic_kernel.connectors.ai.open_ai.models.chat.function_call import FunctionCall +from semantic_kernel.connectors.ai.open_ai.models.chat.tool_calls import ToolCall from semantic_kernel.connectors.ai.open_ai.semantic_functions.open_ai_chat_prompt_template import ( OpenAIChatPromptTemplate, ) -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.kernel_function import KernelFunction +logger: logging.Logger = logging.getLogger(__name__) -def _describe_function(function: SKFunctionBase) -> Dict[str, str]: - """Create the object used for function_calling. +def _describe_tool_call(function: KernelFunction) -> Dict[str, str]: + """Create the object used for the tool call. + + Assumes that arguments for semantic functions are optional, for native functions required. + """ + func_view = function.describe() + return { + "type": "function", + "function": { + "name": f"{func_view.plugin_name}-{func_view.name}", + "description": func_view.description, + "parameters": { + "type": "object", + "properties": { + param.name: { + "description": param.description, + "type": param.type_ if param.type_ else "string", + **({"enum": param.enum} if hasattr(param, "enum") else {}), # Added support for enum + } + for param in func_view.parameters + }, + "required": [p.name for p in func_view.parameters if p.required], + }, + }, + } + + +def _describe_function(function: KernelFunction) -> Dict[str, str]: + """Create the object used for function_calling. Assumes that arguments for semantic functions are optional, for native functions required. """ func_view = function.describe() return { - "name": f"{func_view.skill_name}-{func_view.name}", + "name": f"{func_view.plugin_name}-{func_view.name}", "description": func_view.description, "parameters": { "type": "object", "properties": { - param.name: {"description": param.description, "type": param.type_} - for param in func_view.parameters + param.name: {"description": param.description, "type": param.type_} for param in func_view.parameters }, "required": [p.name for p in func_view.parameters if p.required], }, } +def get_tool_call_object(kernel: Kernel, filter: Dict[str, List[str]]) -> List[Dict[str, str]]: + """Create the object used for a tool call. + + This is the preferred method to create the tool call object. + + args: + kernel: the kernel. + filter: a dictionary with keys + exclude_plugin, include_plugin, exclude_function, include_function + and lists of the required filter. + The function name should be in the format "plugin_name-function_name". + Using exclude_plugin and include_plugin at the same time will raise an error. + Using exclude_function and include_function at the same time will raise an error. + If using include_* implies that all other function will be excluded. + Example: + filter = { + "exclude_plugin": ["plugin1", "plugin2"], + "include_function": ["plugin3-function1", "plugin4-function2"], + } + will return only plugin3-function1 and plugin4-function2. + filter = { + "exclude_function": ["plugin1-function1", "plugin2-function2"], + } + will return all functions except plugin1-function1 and plugin2-function2. + returns: + a filtered list of dictionaries of the functions in the kernel that can be passed to the function calling api. + """ + return get_function_calling_object(kernel, filter, is_tool_call=True) + + def get_function_calling_object( - kernel: Kernel, filter: Dict[str, List[str]] + kernel: Kernel, filter: Dict[str, List[str]], is_tool_call: Optional[bool] = False ) -> List[Dict[str, str]]: - """Create the object used for function_calling. + """Create the object used for a function call. + + Note: although Azure has deprecated function calling, SK still supports it for the time being. args: kernel: the kernel. filter: a dictionary with keys - exclude_skill, include_skill, exclude_function, include_function + exclude_plugin, include_plugin, exclude_function, include_function and lists of the required filter. - The function name should be in the format "skill_name-function_name". - Using exclude_skill and include_skill at the same time will raise an error. + The function name should be in the format "plugin_name-function_name". + Using exclude_plugin and include_plugin at the same time will raise an error. Using exclude_function and include_function at the same time will raise an error. If using include_* implies that all other function will be excluded. Example: filter = { - "exclude_skill": ["skill1", "skill2"], - "include_function": ["skill3-function1", "skill4-function2"], + "exclude_plugin": ["plugin1", "plugin2"], + "include_function": ["plugin3-function1", "plugin4-function2"], } - will return only skill3-function1 and skill4-function2. + will return only plugin3-function1 and plugin4-function2. filter = { - "exclude_function": ["skill1-function1", "skill2-function2"], + "exclude_function": ["plugin1-function1", "plugin2-function2"], } - will return all functions except skill1-function1 and skill2-function2. - caller_function_name: the name of the function that is calling the other functions. + will return all functions except plugin1-function1 and plugin2-function2. + is_tool_call: if True, the function will return a list of tool calls, otherwise a list of functions. returns: a filtered list of dictionaries of the functions in the kernel that can be passed to the function calling api. """ - include_skill = filter.get("include_skill", None) - exclude_skill = filter.get("exclude_skill", []) + include_plugin = filter.get("include_plugin", None) + exclude_plugin = filter.get("exclude_plugin", []) include_function = filter.get("include_function", None) exclude_function = filter.get("exclude_function", []) - if include_skill and exclude_skill: - raise ValueError( - "Cannot use both include_skill and exclude_skill at the same time." - ) + if include_plugin and exclude_plugin: + raise ValueError("Cannot use both include_plugin and exclude_plugin at the same time.") if include_function and exclude_function: - raise ValueError( - "Cannot use both include_function and exclude_function at the same time." - ) - if include_skill: - include_skill = [skill.lower() for skill in include_skill] - if exclude_skill: - exclude_skill = [skill.lower() for skill in exclude_skill] + raise ValueError("Cannot use both include_function and exclude_function at the same time.") + if include_plugin: + include_plugin = [plugin.lower() for plugin in include_plugin] + if exclude_plugin: + exclude_plugin = [plugin.lower() for plugin in exclude_plugin] if include_function: include_function = [function.lower() for function in include_function] if exclude_function: exclude_function = [function.lower() for function in exclude_function] result = [] for ( - skill_name, - skill, - ) in kernel.skills.data.items(): - if skill_name in exclude_skill or ( - include_skill and skill_name not in include_skill - ): + plugin_name, + plugin, + ) in kernel.plugins.plugins.items(): + if plugin_name in exclude_plugin or (include_plugin and plugin_name not in include_plugin): continue - for function_name, function in skill.items(): - current_name = f"{skill_name}-{function_name}" - if current_name in exclude_function or ( - include_function and current_name not in include_function - ): + for function_name, function in plugin.functions.items(): + current_name = f"{plugin_name}-{function_name}" + if current_name in exclude_function or (include_function and current_name not in include_function): continue - result.append(_describe_function(function)) + result.append(_describe_tool_call(function) if is_tool_call else _describe_function(function)) return result -async def execute_function_call( - kernel: Kernel, function_call: FunctionCall, log: Optional[Logger] = None -) -> str: - result = await kernel.run_async( - kernel.func(**function_call.split_name_dict()), - input_vars=function_call.to_context_variables(), - ) - if log: - log.info(f"Function call result: {result}") +async def execute(kernel: Kernel, func: KernelFunction, input_vars: ContextVariables) -> str: + """Execute a function and return the result. + + Args: + kernel (Kernel): the kernel to use. + func (KernelFunction): the function to execute. + input_vars (ContextVariables): the input variables. + + Returns: + str: the result of the execution. + """ + result = await kernel.run(func, input_vars=input_vars) + logger.info(f"Execution result: {result}") return str(result) +async def execute_function_call(kernel: Kernel, function_call: FunctionCall) -> str: + """Execute a function call and return the result.""" + func = kernel.func(**function_call.split_name_dict()) + input_vars = function_call.to_context_variables() + return await execute(kernel, func, input_vars) + + +async def execute_tool_call(kernel: Kernel, tool_call: ToolCall) -> str: + """Execute a tool call and return the result.""" + func = kernel.func(**tool_call.function.split_name_dict()) + input_vars = tool_call.function.to_context_variables() + return await execute(kernel, func, input_vars) + + async def chat_completion_with_function_call( kernel: Kernel, - context: SKContext, - functions: List[Dict[str, str]] = [], - chat_skill_name: Optional[str] = None, + context: KernelContext, + chat_plugin_name: Optional[str] = None, chat_function_name: Optional[str] = None, - chat_function: Optional[SKFunctionBase] = None, - *, - log: Optional[Logger] = None, + chat_function: Optional[KernelFunction] = None, **kwargs: Dict[str, Any], -) -> SKContext: +) -> KernelContext: """Perform a chat completion with auto-executing function calling. This is a recursive function that will execute the chat function multiple times, @@ -133,52 +206,107 @@ async def chat_completion_with_function_call( kernel: the kernel to use. context: the context to use. functions: the function calling object, - make sure to use get_function_calling_object method to create it. + make sure to use get_tool_call_object method to create it. Optional arguments: - chat_skill_name: the skill name of the chat function. + chat_plugin_name: the plugin name of the chat function. chat_function_name: the function name of the chat function. chat_function: the chat function, if not provided, it will be retrieved from the kernel. - make sure to provide either the chat_function or the chat_skill_name and chat_function_name. + make sure to provide either the chat_function or the chat_plugin_name and chat_function_name. - log: the logger to use. max_function_calls: the maximum number of function calls to execute, defaults to 5. current_call_count: the current number of function calls executed. returns: - the context with the result of the chat completion, just like a regular invoke_async/run_async. + the context with the result of the chat completion, just like a regular invoke/run_async. """ # check the number of function calls max_function_calls = kwargs.get("max_function_calls", 5) current_call_count = kwargs.get("current_call_count", 0) # get the chat function if chat_function is None: - chat_function = kernel.func( - skill_name=chat_skill_name, function_name=chat_function_name - ) + chat_function = kernel.func(plugin_name=chat_plugin_name, function_name=chat_function_name) assert isinstance( - chat_function._chat_prompt_template, OpenAIChatPromptTemplate + chat_function.chat_prompt_template, OpenAIChatPromptTemplate ), "Please make sure to initialize your chat function with the OpenAIChatPromptTemplate class." - context = await chat_function.invoke_async( + settings = chat_function.chat_prompt_template.prompt_config.execution_settings + if current_call_count >= max_function_calls: + settings.functions = [] + context = await chat_function.invoke( context=context, # when the maximum number of function calls is reached, execute the chat function without Functions. - functions=[] if current_call_count >= max_function_calls else functions, + settings=settings, ) - function_call = context.objects.pop("function_call", None) - # if there is no function_call or if the content is not a FunctionCall object, return the context - if function_call is None or not isinstance(function_call, FunctionCall): + if not (results := context.objects.pop("results", None)): + return context + function_call = next( + ( + fc + for fc in (results[0].function_call or results[0].tool_calls or [None]) + if isinstance(fc, (FunctionCall, ToolCall)) + ), + None, + ) + if function_call: + execute_call = execute_tool_call if isinstance(function_call, ToolCall) else execute_function_call + result = await execute_call(kernel, function_call) + tool_call_id = function_call.id + else: return context - result = await execute_function_call(kernel, function_call, log=log) # add the result to the chat prompt template - chat_function._chat_prompt_template.add_function_response_message( - name=function_call.name, content=str(result) + chat_function.chat_prompt_template.add_function_response_message( + name=function_call.function.name, content=str(result), tool_call_id=tool_call_id ) # request another completion return await chat_completion_with_function_call( kernel, chat_function=chat_function, - functions=functions, context=context, - log=log, max_function_calls=max_function_calls, current_call_count=current_call_count + 1, ) + + +def _parse_message( + message: ChatCompletion, with_data: bool = False +) -> Tuple[Optional[str], Optional[str], Optional[FunctionCall]]: + """ + Parses the message. + + Arguments: + message {OpenAIObject} -- The message to parse. + + Returns: + Tuple[Optional[str], Optional[Dict]] -- The parsed message. + """ + content = message.content if hasattr(message, "content") else None + tool_calls = message.tool_calls if hasattr(message, "tool_calls") else None + function_calls = ( + [FunctionCall(id=call.id, name=call.function.name, arguments=call.function.arguments) for call in tool_calls] + if tool_calls + else None + ) + + # todo: support multiple function calls + function_call = function_calls[0] if function_calls else None + + if not with_data: + return (content, None, function_call) + else: + tool_content = None + if message.model_extra and "context" in message.model_extra: + if "messages" in message.model_extra["context"]: + for m in message.model_extra["context"]["messages"]: + if m.get("role") == "tool": + tool_content = m.get("content", None) + break + else: + tool_content = json.dumps(message.model_extra["context"]) + return (content, tool_content, function_call) + + +def _parse_choices(choice) -> Tuple[str, int]: + message = "" + if choice.delta.content: + message += choice.delta.content + + return message, choice.index diff --git a/python/semantic_kernel/connectors/ai/prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/prompt_execution_settings.py new file mode 100644 index 000000000000..769a880f63c0 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/prompt_execution_settings.py @@ -0,0 +1,80 @@ +from typing import Any, Dict, Optional + +from pydantic import Field + +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class PromptExecutionSettings(KernelBaseModel): + """Base class for prompt execution settings. + + Can be used by itself or as a base class for other prompt execution settings. The methods are used to create + specific prompt execution settings objects based on the keys in the extension_data field, this way you can + create a generic PromptExecutionSettings object in your application, which get's mapped into the keys of the + prompt execution settings that each services returns by using the service.get_prompt_execution_settings() method. + + Parameters: + service_id (str, optional): The service ID to use for the request. Defaults to None. + extension_data (Dict[str, Any], optional): Any additional data to send with the request. Defaults to None. + kwargs (Any): Additional keyword arguments, + these are attempted to parse into the keys of the specific prompt execution settings. + Methods: + prepare_settings_dict: Prepares the settings as a dictionary for sending to the AI service. + update_from_prompt_execution_settings: Update the keys from another prompt execution settings object. + from_prompt_execution_settings: Create a prompt execution settings from another prompt execution settings object. + """ # noqa: E501 + + service_id: Optional[str] = Field(None, min_length=1) + extension_data: Dict[str, Any] = Field(default_factory=dict) + + def __init__(self, service_id: Optional[str] = None, **kwargs: Any): + extension_data = kwargs.pop("extension_data", {}) + extension_data.update(kwargs) + super().__init__(service_id=service_id, extension_data=extension_data) + self.unpack_extension_data() + + @property + def keys(self): + """Get the keys of the prompt execution settings.""" + return self.model_fields.keys() + + def prepare_settings_dict(self, **kwargs) -> Dict[str, Any]: + return self.model_dump( + exclude={"service_id", "extension_data"}, + exclude_none=True, + by_alias=True, + ) + + def update_from_prompt_execution_settings(self, config: "PromptExecutionSettings") -> None: + """Update the prompt execution settings from a completion config.""" + if config.service_id is not None: + self.service_id = config.service_id + config.pack_extension_data() + self.extension_data.update(config.extension_data) + self.unpack_extension_data() + + @classmethod + def from_prompt_execution_settings(cls, config: "PromptExecutionSettings") -> "PromptExecutionSettings": + """Create a prompt execution settings from a completion config.""" + config.pack_extension_data() + return cls( + service_id=config.service_id, + extension_data=config.extension_data, + ) + + def unpack_extension_data(self) -> None: + """Update the prompt execution settings from extension data. + + Does not overwrite existing values with None. + """ + for key, value in self.extension_data.items(): + if value is None: + continue + if key in self.keys: + setattr(self, key, value) + + def pack_extension_data(self) -> None: + """Update the extension data from the prompt execution settings.""" + for key in self.model_fields_set: + if key not in ["service_id", "extension_data"] and getattr(self, key) is not None: + self.extension_data[key] = getattr(self, key) diff --git a/python/semantic_kernel/connectors/ai/text_completion_client_base.py b/python/semantic_kernel/connectors/ai/text_completion_client_base.py index c01653b9aabb..e2651c5c5c5d 100644 --- a/python/semantic_kernel/connectors/ai/text_completion_client_base.py +++ b/python/semantic_kernel/connectors/ai/text_completion_client_base.py @@ -1,52 +1,51 @@ # Copyright (c) Microsoft. All rights reserved. + from abc import ABC, abstractmethod -from logging import Logger -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Any, AsyncIterable, List, Optional if TYPE_CHECKING: - from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, - ) + from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + from semantic_kernel.models.contents import StreamingTextContent, TextContent class TextCompletionClientBase(ABC): + """Base class for text completion AI services.""" + @abstractmethod - async def complete_async( + async def complete( self, prompt: str, - settings: "CompleteRequestSettings", - logger: Optional[Logger] = None, - ) -> Union[str, List[str]]: + settings: "PromptExecutionSettings", + logger: Optional[Any] = None, + ) -> List["TextContent"]: """ This is the method that is called from the kernel to get a response from a text-optimized LLM. Arguments: prompt {str} -- The prompt to send to the LLM. - settings {CompleteRequestSettings} -- Settings for the request. - logger {Logger} -- A logger to use for logging. + settings {PromptExecutionSettings} -- Settings for the request. + logger {Logger} -- A logger to use for logging (deprecated). Returns: Union[str, List[str]] -- A string or list of strings representing the response(s) from the LLM. """ - pass @abstractmethod - async def complete_stream_async( + async def complete_stream( self, prompt: str, - settings: "CompleteRequestSettings", - logger: Optional[Logger] = None, - ): + settings: "PromptExecutionSettings", + logger: Optional[Any] = None, + ) -> AsyncIterable[List["StreamingTextContent"]]: """ This is the method that is called from the kernel to get a stream response from a text-optimized LLM. Arguments: prompt {str} -- The prompt to send to the LLM. - settings {CompleteRequestSettings} -- Settings for the request. - logger {Logger} -- A logger to use for logging. + settings {PromptExecutionSettings} -- Settings for the request. + logger {Logger} -- A logger to use for logging (deprecated). Yields: A stream representing the response(s) from the LLM. """ - pass diff --git a/python/semantic_kernel/connectors/memory/astradb/__init__.py b/python/semantic_kernel/connectors/memory/astradb/__init__.py new file mode 100644 index 000000000000..b8907d83882b --- /dev/null +++ b/python/semantic_kernel/connectors/memory/astradb/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.connectors.memory.astradb.astradb_memory_store import ( + AstraDBMemoryStore, +) + +__all__ = ["AstraDBMemoryStore"] diff --git a/python/semantic_kernel/connectors/memory/astradb/astra_client.py b/python/semantic_kernel/connectors/memory/astradb/astra_client.py new file mode 100644 index 000000000000..d0aa30870b92 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/astradb/astra_client.py @@ -0,0 +1,157 @@ +import json +from typing import Dict, List, Optional + +import aiohttp + +from semantic_kernel.connectors.memory.astradb.utils import AsyncSession + + +class AstraClient: + def __init__( + self, + astra_id: str, + astra_region: str, + astra_application_token: str, + keyspace_name: str, + embedding_dim: int, + similarity_function: str, + session: Optional[aiohttp.ClientSession] = None, + ): + self.astra_id = astra_id + self.astra_application_token = astra_application_token + self.astra_region = astra_region + self.keyspace_name = keyspace_name + self.embedding_dim = embedding_dim + self.similarity_function = similarity_function + + self.request_base_url = ( + f"https://{self.astra_id}-{self.astra_region}.apps.astra.datastax.com/api/json/v1/{self.keyspace_name}" + ) + self.request_header = { + "x-cassandra-token": self.astra_application_token, + "Content-Type": "application/json", + } + self._session = session + + async def _run_query(self, request_url: str, query: Dict): + async with AsyncSession(self._session) as session: + async with session.post(request_url, data=json.dumps(query), headers=self.request_header) as response: + if response.status == 200: + response_dict = await response.json() + if "errors" in response_dict: + raise Exception(f"Astra DB request error - {response_dict['errors']}") + else: + return response_dict + else: + raise Exception(f"Astra DB not available. Status : {response}") + + async def find_collections(self, include_detail: bool = True): + query = {"findCollections": {"options": {"explain": include_detail}}} + result = await self._run_query(self.request_base_url, query) + return result["status"]["collections"] + + async def find_collection(self, collection_name: str): + collections = await self.find_collections(False) + found = False + for collection in collections: + if collection == collection_name: + found = True + break + return found + + async def create_collection( + self, + collection_name: str, + embedding_dim: Optional[int] = None, + similarity_function: Optional[str] = None, + ): + query = { + "createCollection": { + "name": collection_name, + "options": { + "vector": { + "dimension": embedding_dim if embedding_dim is not None else self.embedding_dim, + "metric": similarity_function if similarity_function is not None else self.similarity_function, + } + }, + } + } + result = await self._run_query(self.request_base_url, query) + return True if result["status"]["ok"] == 1 else False + + async def delete_collection(self, collection_name: str): + query = {"deleteCollection": {"name": collection_name}} + result = await self._run_query(self.request_base_url, query) + return True if result["status"]["ok"] == 1 else False + + def _build_request_collection_url(self, collection_name: str): + return f"{self.request_base_url}/{collection_name}" + + async def find_documents( + self, + collection_name: str, + filter: Optional[Dict] = None, + vector: Optional[List[float]] = None, + limit: Optional[int] = None, + include_vector: Optional[bool] = None, + include_similarity: Optional[bool] = None, + ) -> List[Dict]: + find_query = {} + + if filter is not None: + find_query["filter"] = filter + + if vector is not None: + find_query["sort"] = {"$vector": vector} + + if include_vector is not None and include_vector is False: + find_query["projection"] = {"$vector": 0} + + if limit is not None: + find_query["options"] = {"limit": limit} + + if include_similarity is not None: + if "options" in find_query: + find_query["options"]["includeSimilarity"] = int(include_similarity) + else: + find_query["options"] = {"includeSimilarity": int(include_similarity)} + + query = {"find": find_query} + result = await self._run_query(self._build_request_collection_url(collection_name), query) + return result["data"]["documents"] + + async def insert_document(self, collection_name: str, document: Dict) -> str: + query = {"insertOne": {"document": document}} + result = await self._run_query(self._build_request_collection_url(collection_name), query) + return result["status"]["insertedIds"][0] + + async def insert_documents(self, collection_name: str, documents: List[Dict]) -> List[str]: + query = {"insertMany": {"documents": documents}} + result = await self._run_query(self._build_request_collection_url(collection_name), query) + return result["status"]["insertedIds"] + + async def update_document(self, collection_name: str, filter: Dict, update: Dict, upsert: bool = True) -> Dict: + query = { + "findOneAndUpdate": { + "filter": filter, + "update": update, + "options": {"returnDocument": "after", "upsert": upsert}, + } + } + result = await self._run_query(self._build_request_collection_url(collection_name), query) + return result["status"] + + async def update_documents(self, collection_name: str, filter: Dict, update: Dict): + query = { + "updateMany": { + "filter": filter, + "update": update, + } + } + result = await self._run_query(self._build_request_collection_url(collection_name), query) + return result["status"] + + async def delete_documents(self, collection_name: str, filter: Dict) -> int: + query = {"deleteMany": {"filter": filter}} + result = await self._run_query(self._build_request_collection_url(collection_name), query) + return result["status"]["deletedCount"] diff --git a/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py b/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py new file mode 100644 index 000000000000..68db6789b2dc --- /dev/null +++ b/python/semantic_kernel/connectors/memory/astradb/astradb_memory_store.py @@ -0,0 +1,303 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +from typing import List, Optional, Tuple + +import aiohttp +from numpy import ndarray + +from semantic_kernel.connectors.memory.astradb.astra_client import AstraClient +from semantic_kernel.connectors.memory.astradb.utils import ( + build_payload, + parse_payload, +) +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.memory_store_base import MemoryStoreBase + +MAX_DIMENSIONALITY = 20000 +MAX_UPSERT_BATCH_SIZE = 100 +MAX_QUERY_WITHOUT_METADATA_BATCH_SIZE = 10000 +MAX_QUERY_WITH_METADATA_BATCH_SIZE = 1000 +MAX_FETCH_BATCH_SIZE = 1000 +MAX_DELETE_BATCH_SIZE = 1000 + +logger: logging.Logger = logging.getLogger(__name__) + + +class AstraDBMemoryStore(MemoryStoreBase): + """A memory store that uses Astra database as the backend.""" + + def __init__( + self, + astra_application_token: str, + astra_id: str, + astra_region: str, + keyspace_name: str, + embedding_dim: int, + similarity: str, + session: Optional[aiohttp.ClientSession] = None, + ) -> None: + """Initializes a new instance of the AstraDBMemoryStore class. + + Arguments: + astra_application_token {str} -- The Astra application token. + astra_id {str} -- The Astra id of database. + astra_region {str} -- The Astra region + keyspace_name {str} -- The Astra keyspace + embedding_dim {int} -- The dimensionality to use for new collections. + similarity {str} -- TODO + session -- Optional session parameter + """ + self._embedding_dim = embedding_dim + self._similarity = similarity + self._session = session + + if self._embedding_dim > MAX_DIMENSIONALITY: + raise ValueError( + f"Dimensionality of {self._embedding_dim} exceeds " + + f"the maximum allowed value of {MAX_DIMENSIONALITY}." + ) + + self._client = AstraClient( + astra_id=astra_id, + astra_region=astra_region, + astra_application_token=astra_application_token, + keyspace_name=keyspace_name, + embedding_dim=embedding_dim, + similarity_function=similarity, + session=self._session, + ) + + async def get_collections_async(self) -> List[str]: + """Gets the list of collections. + + Returns: + List[str] -- The list of collections. + """ + return await self._client.find_collections(False) + + async def create_collection_async( + self, + collection_name: str, + dimension_num: Optional[int] = None, + distance_type: Optional[str] = "cosine", + ) -> None: + """Creates a new collection in Astra if it does not exist. + + Arguments: + collection_name {str} -- The name of the collection to create. + dimension_num {int} -- The dimension of the vectors to be stored in this collection. + distance_type {str} -- Specifies the similarity metric to be used when querying or comparing vectors within + this collection. The available options are dot_product, euclidean, and cosine. + Returns: + None + """ + dimension_num = dimension_num if dimension_num is not None else self._embedding_dim + distance_type = distance_type if distance_type is not None else self._similarity + + if dimension_num > MAX_DIMENSIONALITY: + raise ValueError( + f"Dimensionality of {dimension_num} exceeds " + f"the maximum allowed value of {MAX_DIMENSIONALITY}." + ) + + result = await self._client.create_collection(collection_name, dimension_num, distance_type) + if result is True: + logger.info(f"Collection {collection_name} created.") + + async def delete_collection_async(self, collection_name: str) -> None: + """Deletes a collection. + + Arguments: + collection_name {str} -- The name of the collection to delete. + + Returns: + None + """ + result = await self._client.delete_collection(collection_name) + logger.log( + logging.INFO if result is True else logging.WARNING, + f"Collection {collection_name} {'deleted.' if result is True else 'does not exist.'}", + ) + + async def does_collection_exist_async(self, collection_name: str) -> bool: + """Checks if a collection exists. + + Arguments: + collection_name {str} -- The name of the collection to check. + + Returns: + bool -- True if the collection exists; otherwise, False. + """ + return await self._client.find_collection(collection_name) + + async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + """Upserts a memory record into the data store. Does not guarantee that the collection exists. + If the record already exists, it will be updated. + If the record does not exist, it will be created. + + Arguments: + collection_name {str} -- The name associated with a collection of embeddings. + record {MemoryRecord} -- The memory record to upsert. + + Returns: + str -- The unique identifier for the memory record. + """ + filter = {"_id": record._id} + update = {"$set": build_payload(record)} + status = await self._client.update_document(collection_name, filter, update, True) + + return status["upsertedId"] if "upsertedId" in status else record._id + + async def upsert_batch_async(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: + """Upserts a batch of memory records into the data store. Does not guarantee that the collection exists. + If the record already exists, it will be updated. + If the record does not exist, it will be created. + + Arguments: + collection_name {str} -- The name associated with a collection of embeddings. + records {List[MemoryRecord]} -- The memory records to upsert. + + Returns: + List[str] -- The unique identifiers for the memory record. + """ + return await asyncio.gather(*[self.upsert_async(collection_name, record) for record in records]) + + async def get_async(self, collection_name: str, key: str, with_embedding: bool = False) -> MemoryRecord: + """Gets a record. Does not guarantee that the collection exists. + + Arguments: + collection_name {str} -- The name of the collection to get the record from. + key {str} -- The unique database key of the record. + with_embedding {bool} -- Whether to include the embedding in the result. (default: {False}) + + Returns: + MemoryRecord -- The record. + """ + filter = {"_id": key} + documents = await self._client.find_documents( + collection_name=collection_name, + filter=filter, + include_vector=with_embedding, + ) + + if len(documents) == 0: + raise KeyError(f"Record with key '{key}' does not exist") + + return parse_payload(documents[0]) + + async def get_batch_async( + self, collection_name: str, keys: List[str], with_embeddings: bool = False + ) -> List[MemoryRecord]: + """Gets a batch of records. Does not guarantee that the collection exists. + + Arguments: + collection_name {str} -- The name of the collection to get the records from. + keys {List[str]} -- The unique database keys of the records. + with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False}) + + Returns: + List[MemoryRecord] -- The records. + """ + + filter = {"_id": {"$in": keys}} + documents = await self._client.find_documents( + collection_name=collection_name, + filter=filter, + include_vector=with_embeddings, + ) + return [parse_payload(document) for document in documents] + + async def remove_async(self, collection_name: str, key: str) -> None: + """Removes a memory record from the data store. Does not guarantee that the collection exists. + + Arguments: + collection_name {str} -- The name of the collection to remove the record from. + key {str} -- The unique id associated with the memory record to remove. + + Returns: + None + """ + filter = {"_id": key} + await self._client.delete_documents(collection_name, filter) + + async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + """Removes a batch of records. Does not guarantee that the collection exists. + + Arguments: + collection_name {str} -- The name of the collection to remove the records from. + keys {List[str]} -- The unique ids associated with the memory records to remove. + + Returns: + None + """ + filter = {"_id": {"$in": keys}} + await self._client.delete_documents(collection_name, filter) + + async def get_nearest_match_async( + self, + collection_name: str, + embedding: ndarray, + min_relevance_score: float = 0.0, + with_embedding: bool = False, + ) -> Tuple[MemoryRecord, float]: + """Gets the nearest match to an embedding using cosine similarity. + Arguments: + collection_name {str} -- The name of the collection to get the nearest matches from. + embedding {ndarray} -- The embedding to find the nearest matches to. + min_relevance_score {float} -- The minimum relevance score of the matches. (default: {0.0}) + with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False}) + + Returns: + Tuple[MemoryRecord, float] -- The record and the relevance score. + """ + matches = await self.get_nearest_matches_async( + collection_name=collection_name, + embedding=embedding, + limit=1, + min_relevance_score=min_relevance_score, + with_embeddings=with_embedding, + ) + return matches[0] + + async def get_nearest_matches_async( + self, + collection_name: str, + embedding: ndarray, + limit: int, + min_relevance_score: float = 0.0, + with_embeddings: bool = False, + ) -> List[Tuple[MemoryRecord, float]]: + """Gets the nearest matches to an embedding using cosine similarity. + Arguments: + collection_name {str} -- The name of the collection to get the nearest matches from. + embedding {ndarray} -- The embedding to find the nearest matches to. + limit {int} -- The maximum number of matches to return. + min_relevance_score {float} -- The minimum relevance score of the matches. (default: {0.0}) + with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False}) + + Returns: + List[Tuple[MemoryRecord, float]] -- The records and their relevance scores. + """ + matches = await self._client.find_documents( + collection_name=collection_name, + vector=embedding.tolist(), + limit=limit, + include_similarity=True, + include_vector=with_embeddings, + ) + + if min_relevance_score: + matches = [match for match in matches if match["$similarity"] >= min_relevance_score] + + return ( + [ + ( + parse_payload(match), + match["$similarity"], + ) + for match in matches + ] + if len(matches) > 0 + else [] + ) diff --git a/python/semantic_kernel/connectors/memory/astradb/utils.py b/python/semantic_kernel/connectors/memory/astradb/utils.py new file mode 100644 index 000000000000..a5a69a0595b4 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/astradb/utils.py @@ -0,0 +1,50 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Any, Dict + +import aiohttp +import numpy + +from semantic_kernel.memory.memory_record import MemoryRecord + + +class AsyncSession: + def __init__(self, session: aiohttp.ClientSession = None): + self._session = session if session else aiohttp.ClientSession() + + async def __aenter__(self): + return await self._session.__aenter__() + + async def __aexit__(self, *args, **kwargs): + await self._session.close() + + +def build_payload(record: MemoryRecord) -> Dict[str, Any]: + """ + Builds a metadata payload to be sent to AstraDb from a MemoryRecord. + """ + payload: Dict[str, Any] = {} + payload["$vector"] = record.embedding.tolist() + if record._text: + payload["text"] = record._text + if record._description: + payload["description"] = record._description + if record._additional_metadata: + payload["additional_metadata"] = record._additional_metadata + return payload + + +def parse_payload(document: Dict[str, Any]) -> MemoryRecord: + """ + Parses a record from AstraDb into a MemoryRecord. + """ + text = document.get("text", None) + description = document["description"] if "description" in document else None + additional_metadata = document["additional_metadata"] if "additional_metadata" in document else None + + return MemoryRecord.local_record( + id=document["_id"], + description=description, + text=text, + additional_metadata=additional_metadata, + embedding=document["$vector"] if "$vector" in document else numpy.array([]), + ) diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py index d5add2b89158..b824bee4d739 100644 --- a/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_cognitive_search_memory_store.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +import logging import uuid -from logging import Logger from typing import List, Optional, Tuple from azure.core.credentials import AzureKeyCredential, TokenCredential @@ -10,6 +10,7 @@ from azure.search.documents.indexes.models import ( HnswVectorSearchAlgorithmConfiguration, SearchIndex, + SearchResourceEncryptionKey, VectorSearch, ) from azure.search.documents.models import Vector @@ -27,13 +28,13 @@ ) from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) class AzureCognitiveSearchMemoryStore(MemoryStoreBase): _search_index_client: SearchIndexClient = None _vector_size: int = None - _logger: Logger = None def __init__( self, @@ -42,7 +43,7 @@ def __init__( admin_key: Optional[str] = None, azure_credentials: Optional[AzureKeyCredential] = None, token_credentials: Optional[TokenCredential] = None, - logger: Optional[Logger] = None, + **kwargs, ) -> None: """Initializes a new instance of the AzureCognitiveSearchMemoryStore class. @@ -54,12 +55,13 @@ def __init__( azure_credentials {Optional[AzureKeyCredential]} -- Azure Cognitive Search credentials (default: {None}). token_credentials {Optional[TokenCredential]} -- Azure Cognitive Search token credentials (default: {None}). - logger {Optional[Logger]} -- The logger to use (default: {None}). Instantiate using Async Context Manager: async with AzureCognitiveSearchMemoryStore(<...>) as memory: await memory.<...> """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") try: pass except ImportError: @@ -68,21 +70,21 @@ def __init__( "Please install Azure Cognitive Search client" ) - self._logger = logger or NullLogger() self._vector_size = vector_size self._search_index_client = get_search_index_async_client( search_endpoint, admin_key, azure_credentials, token_credentials ) - async def close_async(self): + async def close(self): """Async close connection, invoked by MemoryStoreBase.__aexit__()""" if self._search_index_client is not None: await self._search_index_client.close() - async def create_collection_async( + async def create_collection( self, collection_name: str, vector_config: Optional[HnswVectorSearchAlgorithmConfiguration] = None, + search_resource_encryption_key: Optional[SearchResourceEncryptionKey] = None, ) -> None: """Creates a new collection if it does not exist. @@ -91,6 +93,9 @@ async def create_collection_async( vector_config {HnswVectorSearchAlgorithmConfiguration} -- Optional search algorithm configuration (default: {None}). semantic_config {SemanticConfiguration} -- Optional search index configuration (default: {None}). + search_resource_encryption_key {SearchResourceEncryptionKey} -- Optional Search Encryption Key + (default: {None}). + Returns: None """ @@ -126,9 +131,7 @@ async def create_collection_async( # Check to see if collection exists collection_index = None try: - collection_index = await self._search_index_client.get_index( - collection_name.lower() - ) + collection_index = await self._search_index_client.get_index(collection_name.lower()) except ResourceNotFoundError: pass @@ -138,11 +141,12 @@ async def create_collection_async( name=collection_name.lower(), fields=get_index_schema(self._vector_size), vector_search=vector_search, + encryption_key=search_resource_encryption_key, ) await self._search_index_client.create_index(index) - async def get_collections_async(self) -> List[str]: + async def get_collections(self) -> List[str]: """Gets the list of collections. Returns: @@ -161,7 +165,7 @@ async def get_collections_async(self) -> List[str]: return results_list - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection. Arguments: @@ -172,7 +176,7 @@ async def delete_collection_async(self, collection_name: str) -> None: """ await self._search_index_client.delete_index(index=collection_name.lower()) - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Checks if a collection exists. Arguments: @@ -183,9 +187,7 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: """ try: - collection_result = await self._search_index_client.get_index( - name=collection_name.lower() - ) + collection_result = await self._search_index_client.get_index(name=collection_name.lower()) if collection_result: return True @@ -194,7 +196,7 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: except ResourceNotFoundError: return False - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upsert a record. Arguments: @@ -205,14 +207,12 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: str -- The unique record id of the record. """ - result = await self.upsert_batch_async(collection_name, [record]) + result = await self.upsert_batch(collection_name, [record]) if result: return result[0] return None - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """Upsert a batch of records. Arguments: @@ -225,9 +225,7 @@ async def upsert_batch_async( # Initialize search client here # Look up Search client class to see if exists or create - search_client = self._search_index_client.get_search_client( - collection_name.lower() - ) + search_client = self._search_index_client.get_search_client(collection_name.lower()) search_records = [] search_ids = [] @@ -251,9 +249,7 @@ async def upsert_batch_async( else: return None - async def get_async( - self, collection_name: str, key: str, with_embedding: bool = False - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool = False) -> MemoryRecord: """Gets a record. Arguments: @@ -266,9 +262,7 @@ async def get_async( """ # Look up Search client class to see if exists or create - search_client = self._search_index_client.get_search_client( - collection_name.lower() - ) + search_client = self._search_index_client.get_search_client(collection_name.lower()) try: search_result = await search_client.get_document( @@ -283,7 +277,7 @@ async def get_async( # Create Memory record from document return dict_to_memory_record(search_result, with_embedding) - async def get_batch_async( + async def get_batch( self, collection_name: str, keys: List[str], with_embeddings: bool = False ) -> List[MemoryRecord]: """Gets a batch of records. @@ -300,7 +294,7 @@ async def get_batch_async( search_results = [] for key in keys: - search_result = await self.get_async( + search_result = await self.get( collection_name=collection_name.lower(), key=key, with_embedding=with_embeddings, @@ -309,7 +303,7 @@ async def get_batch_async( return search_results - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Removes a batch of records. Arguments: @@ -321,11 +315,9 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non """ for record_id in keys: - await self.remove_async( - collection_name=collection_name.lower(), key=encode_id(record_id) - ) + await self.remove(collection_name=collection_name.lower(), key=encode_id(record_id)) - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Removes a record. Arguments: @@ -337,15 +329,13 @@ async def remove_async(self, collection_name: str, key: str) -> None: """ # Look up Search client class to see if exists or create - search_client = self._search_index_client.get_search_client( - collection_name.lower() - ) + search_client = self._search_index_client.get_search_client(collection_name.lower()) docs_to_delete = {SEARCH_FIELD_ID: encode_id(key)} await search_client.delete_documents(documents=[docs_to_delete]) await search_client.close() - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -364,7 +354,7 @@ async def get_nearest_match_async( Tuple[MemoryRecord, float] -- The record and the relevance score. """ - memory_records = await self.get_nearest_matches_async( + memory_records = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, min_relevance_score=min_relevance_score, @@ -377,7 +367,7 @@ async def get_nearest_match_async( else: return None - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -399,13 +389,9 @@ async def get_nearest_matches_async( """ # Look up Search client class to see if exists or create - search_client = self._search_index_client.get_search_client( - collection_name.lower() - ) + search_client = self._search_index_client.get_search_client(collection_name.lower()) - vector = Vector( - value=embedding.flatten(), k=limit, fields=SEARCH_FIELD_EMBEDDING - ) + vector = Vector(value=embedding.flatten(), k=limit, fields=SEARCH_FIELD_EMBEDDING) search_results = await search_client.search( search_text="*", diff --git a/python/semantic_kernel/connectors/memory/azure_cognitive_search/utils.py b/python/semantic_kernel/connectors/memory/azure_cognitive_search/utils.py index b502ffbcc497..a84958db96c7 100644 --- a/python/semantic_kernel/connectors/memory/azure_cognitive_search/utils.py +++ b/python/semantic_kernel/connectors/memory/azure_cognitive_search/utils.py @@ -13,6 +13,9 @@ ) from dotenv import load_dotenv +from semantic_kernel.connectors.ai.open_ai.const import ( + USER_AGENT, +) from semantic_kernel.memory.memory_record import MemoryRecord SEARCH_FIELD_ID = "Id" @@ -83,17 +86,13 @@ def get_search_index_async_client( if azure_credential is None and token_credential is None: raise ValueError("Error: Azure Cognitive Search credentials not set.") - sk_headers = {"User-Agent": "Semantic-Kernel"} + sk_headers = {USER_AGENT: "Semantic-Kernel"} if azure_credential: - return SearchIndexClient( - endpoint=service_endpoint, credential=azure_credential, headers=sk_headers - ) + return SearchIndexClient(endpoint=service_endpoint, credential=azure_credential, headers=sk_headers) if token_credential: - return SearchIndexClient( - endpoint=service_endpoint, credential=token_credential, headers=sk_headers - ) + return SearchIndexClient(endpoint=service_endpoint, credential=token_credential, headers=sk_headers) raise ValueError("Error: unable to create Azure Cognitive Search client.") diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/__init__.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/__init__.py new file mode 100644 index 000000000000..ca310d9b0964 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.connectors.memory.azure_cosmosdb.azure_cosmos_db_memory_store import ( + AzureCosmosDBMemoryStore, +) + +__all__ = ["AzureCosmosDBMemoryStore"] diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py new file mode 100644 index 000000000000..65d44beb0a89 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_memory_store.py @@ -0,0 +1,253 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import List, Tuple + +from numpy import ndarray + +from semantic_kernel.connectors.memory.azure_cosmosdb.azure_cosmos_db_store_api import ( + AzureCosmosDBStoreApi, +) +from semantic_kernel.connectors.memory.azure_cosmosdb.cosmosdb_utils import ( + get_mongodb_resources, +) +from semantic_kernel.connectors.memory.azure_cosmosdb.mongo_vcore_store_api import ( + MongoStoreApi, +) +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.memory_store_base import MemoryStoreBase +from semantic_kernel.utils.settings import azure_cosmos_db_settings_from_dot_env + +# Load environment variables +(cosmos_api, cosmos_connstr) = azure_cosmos_db_settings_from_dot_env() + + +class AzureCosmosDBMemoryStore(MemoryStoreBase): + """A memory store that uses AzureCosmosDB for MongoDB vCore, to perform vector similarity search on a fully + managed MongoDB compatible database service. + https://learn.microsoft.com/en-us/azure/cosmos-db/mongodb/vcore/vector-search""" + + # Right now this only supports Mongo, but set up to support more later. + apiStore: AzureCosmosDBStoreApi = None + mongodb_client = None + database = None + index_name = None + vector_dimensions = None + num_lists = None + similarity = None + collection_name = None + + def __init__( + self, + cosmosStore: AzureCosmosDBStoreApi, + database_name: str, + index_name: str, + vector_dimensions: int, + num_lists: int, + similarity: str, + ): + if vector_dimensions <= 0: + raise ValueError("Vector dimensions must be a positive number.") + # if connection_string is None: + # raise ValueError("Connection String cannot be empty.") + if database_name is None: + raise ValueError("Database Name cannot be empty.") + if index_name is None: + raise ValueError("Index Name cannot be empty.") + + self.cosmosStore = cosmosStore + self.index_name = index_name + self.num_lists = num_lists + self.similarity = similarity + + @staticmethod + async def create( + database_name, + collection_name, + index_name, + vector_dimensions, + num_lists, + similarity, + ) -> MemoryStoreBase: + """Creates the underlying data store based on the API definition""" + # Right now this only supports Mongo, but set up to support more later. + apiStore: AzureCosmosDBStoreApi = None + if cosmos_api == "mongo-vcore": + mongodb_client, database = get_mongodb_resources(cosmos_connstr, database_name) + apiStore = MongoStoreApi( + collection_name, + index_name, + vector_dimensions, + num_lists, + similarity, + database, + ) + else: + raise NotImplementedError + + store = AzureCosmosDBMemoryStore( + apiStore, + database_name, + index_name, + vector_dimensions, + num_lists, + similarity, + ) + await store.create_collection(collection_name) + return store + + async def create_collection(self, collection_name: str) -> None: + """Creates a new collection in the data store. + + Arguments: + collection_name {str} -- The name associated with a collection of embeddings. + + Returns: + None + """ + return await self.cosmosStore.create_collection(collection_name) + + async def get_collections(self) -> List[str]: + """Gets the list of collections. + + Returns: + List[str] -- The list of collections. + """ + return await self.cosmosStore.get_collections() + + async def delete_collection(self, collection_name: str) -> None: + """Deletes a collection. + + Arguments: + collection_name {str} -- The name of the collection to delete. + + Returns: + None + """ + return await self.cosmosStore.delete_collection(str()) + + async def does_collection_exist(self, collection_name: str) -> bool: + """Checks if a collection exists. + + Arguments: + collection_name {str} -- The name of the collection to check. + + Returns: + bool -- True if the collection exists; otherwise, False. + """ + return await self.cosmosStore.does_collection_exist(str()) + + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: + """Upsert a record. + + Arguments: + collection_name {str} -- The name of the collection to upsert the record into. + record {MemoryRecord} -- The record to upsert. + + Returns: + str -- The unique record id of the record. + """ + return await self.cosmosStore.upsert(str(), record) + + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: + """Upsert a batch of records. + + Arguments: + collection_name {str} -- The name of the collection to upsert the records into. + records {List[MemoryRecord]} -- The records to upsert. + + Returns: + List[str] -- The unique database keys of the records. + """ + return await self.cosmosStore.upsert_batch(str(), records) + + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: + """Gets a record. + + Arguments: + collection_name {str} -- The name of the collection to get the record from. + key {str} -- The unique database key of the record. + with_embedding {bool} -- Whether to include the embedding in the result. (default: {False}) + + Returns: + MemoryRecord -- The record. + """ + return await self.cosmosStore.get(str(), key, with_embedding) + + async def get_batch(self, collection_name: str, keys: List[str], with_embeddings: bool) -> List[MemoryRecord]: + """Gets a batch of records. + + Arguments: + collection_name {str} -- The name of the collection to get the records from. + keys {List[str]} -- The unique database keys of the records. + with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False}) + + Returns: + List[MemoryRecord] -- The records. + """ + return await self.cosmosStore.get_batch(str(), keys, with_embeddings) + + async def remove(self, collection_name: str, key: str) -> None: + """Removes a record. + + Arguments: + collection_name {str} -- The name of the collection to remove the record from. + key {str} -- The unique database key of the record to remove. + + Returns: + None + """ + return await self.cosmosStore.remove(str(), key) + + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: + """Removes a batch of records. + + Arguments: + collection_name {str} -- The name of the collection to remove the records from. + keys {List[str]} -- The unique database keys of the records to remove. + + Returns: + None + """ + return await self.cosmosStore.remove_batch(str(), keys) + + async def get_nearest_matches( + self, + collection_name: str, + embedding: ndarray, + limit: int, + min_relevance_score: float, + with_embeddings: bool, + ) -> List[Tuple[MemoryRecord, float]]: + """Gets the nearest matches to an embedding using vector configuration. + + Parameters: + collection_name (str) -- The name of the collection to get the nearest matches from. + embedding (ndarray) -- The embedding to find the nearest matches to. + limit {int} -- The maximum number of matches to return. + min_relevance_score {float} -- The minimum relevance score of the matches. (default: {0.0}) + with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False}) + + Returns: + List[Tuple[MemoryRecord, float]] -- The records and their relevance scores. + """ + return await self.cosmosStore.get_nearest_matches(str(), embedding, limit, min_relevance_score, with_embeddings) + + async def get_nearest_match( + self, + collection_name: str, + embedding: ndarray, + min_relevance_score: float, + with_embedding: bool, + ) -> Tuple[MemoryRecord, float]: + """Gets the nearest match to an embedding using vector configuration parameters. + + Arguments: + collection_name {str} -- The name of the collection to get the nearest match from. + embedding {ndarray} -- The embedding to find the nearest match to. + min_relevance_score {float} -- The minimum relevance score of the match. (default: {0.0}) + with_embedding {bool} -- Whether to include the embedding in the result. (default: {False}) + + Returns: + Tuple[MemoryRecord, float] -- The record and the relevance score. + """ + return await self.cosmosStore.get_nearest_match(str(), embedding, min_relevance_score, with_embedding) diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py new file mode 100644 index 000000000000..3498fed1c987 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/azure_cosmos_db_store_api.py @@ -0,0 +1,73 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from abc import ABC, abstractmethod +from typing import List, Tuple + +from numpy import ndarray + +from semantic_kernel.memory.memory_record import MemoryRecord + + +# Abstract class similar to the original data store that allows API level abstraction +class AzureCosmosDBStoreApi(ABC): + @abstractmethod + async def create_collection(self, collection_name: str) -> None: + raise NotImplementedError + + @abstractmethod + async def get_collections(self) -> List[str]: + raise NotImplementedError + + @abstractmethod + async def delete_collection(self, collection_name: str) -> None: + raise NotImplementedError + + @abstractmethod + async def does_collection_exist(self, collection_name: str) -> bool: + raise NotImplementedError + + @abstractmethod + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: + raise NotImplementedError + + @abstractmethod + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: + raise NotImplementedError + + @abstractmethod + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: + raise NotImplementedError + + @abstractmethod + async def get_batch(self, collection_name: str, keys: List[str], with_embeddings: bool) -> List[MemoryRecord]: + raise NotImplementedError + + @abstractmethod + async def remove(self, collection_name: str, key: str) -> None: + raise NotImplementedError + + @abstractmethod + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: + raise NotImplementedError + + @abstractmethod + async def get_nearest_matches( + self, + collection_name: str, + embedding: ndarray, + limit: int, + min_relevance_score: float, + with_embeddings: bool, + ) -> List[Tuple[MemoryRecord, float]]: + raise NotImplementedError + + @abstractmethod + async def get_nearest_match( + self, + collection_name: str, + embedding: ndarray, + min_relevance_score: float, + with_embedding: bool, + ) -> Tuple[MemoryRecord, float]: + raise NotImplementedError diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/cosmosdb_utils.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/cosmosdb_utils.py new file mode 100644 index 000000000000..2c96a2ae3b13 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/cosmosdb_utils.py @@ -0,0 +1,12 @@ +# Copyright (c) Microsoft. All rights reserved. + +from pymongo import MongoClient + + +def get_mongodb_resources(connection_string: str, database_name: str): + try: + client = MongoClient(connection_string) + database = client[database_name] + except Exception as ex: + raise Exception(f"Error while connecting to Azure Cosmos MongoDb vCore: {ex}") from ex + return client, database diff --git a/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py b/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py new file mode 100644 index 000000000000..1c9c5c1b0604 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/azure_cosmosdb/mongo_vcore_store_api.py @@ -0,0 +1,202 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +from typing import List, Tuple + +import numpy as np + +from semantic_kernel.connectors.memory.azure_cosmosdb.azure_cosmos_db_store_api import ( + AzureCosmosDBStoreApi, +) +from semantic_kernel.memory.memory_record import MemoryRecord + + +class MongoStoreApi(AzureCosmosDBStoreApi): + database = None + collection_name: str + index_name = None + vector_dimensions = None + num_lists = None + similarity = None + collection = None + + def __init__( + self, + collection_name: str, + index_name: str, + vector_dimensions: int, + num_lists: int, + similarity: str, + database=None, + ): + self.database = database + self.collection_name = collection_name + self.index_name = index_name + self.num_lists = num_lists + self.similarity = similarity + self.vector_dimensions = vector_dimensions + + async def create_collection(self, collection_name: str) -> None: + if not await self.does_collection_exist(collection_name): + if self.index_name not in self.database[collection_name].list_indexes(): + self.database.command( + { + "createIndexes": collection_name, + "indexes": [ + { + "name": self.index_name, + "key": {"embedding": "cosmosSearch"}, + "cosmosSearchOptions": { + "kind": "vector-ivf", + "numLists": self.num_lists, + "similarity": self.similarity, + "dimensions": self.vector_dimensions, + }, + } + ], + } + ) + self.collection = self.database[collection_name] + + async def get_collections(self) -> List[str]: + return self.database.list_collection_names() + + async def delete_collection(self, collection_name: str) -> None: + return self.collection.drop() + + async def does_collection_exist(self, collection_name: str) -> bool: + return collection_name in self.database.list_collection_names() + + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: + result = await self.upsert_batch(collection_name, [record]) + return result[0] + + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: + doc_ids: List[str] = [] + cosmosRecords: List[dict] = [] + for record in records: + cosmosRecord: dict = { + "_id": record.id, + "embedding": record.embedding.tolist(), + "text": record.text, + "description": record.description, + "metadata": self.__serialize_metadata(record), + } + if record.timestamp is not None: + cosmosRecord["timestamp"] = record.timestamp + + doc_ids.append(cosmosRecord["_id"]) + cosmosRecords.append(cosmosRecord) + self.collection.insert_many(cosmosRecords) + return doc_ids + + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: + if not with_embedding: + result = self.collection.find_one({"_id": key}, {"embedding": 0}) + else: + result = self.collection.find_one({"_id": key}) + return MemoryRecord.local_record( + id=result["_id"], + embedding=np.array(result["embedding"]) if with_embedding else np.array([]), + text=result["text"], + description=result["description"], + additional_metadata=result["metadata"], + timestamp=result.get("timestamp", None), + ) + + async def get_batch(self, collection_name: str, keys: List[str], with_embeddings: bool) -> List[MemoryRecord]: + if not with_embeddings: + results = self.collection.find({"_id": {"$in": keys}}, {"embedding": 0}) + else: + results = self.collection.find({"_id": {"$in": keys}}) + + return [ + MemoryRecord.local_record( + id=result["_id"], + embedding=np.array(result["embedding"]) if with_embeddings else np.array([]), + text=result["text"], + description=result["description"], + additional_metadata=result["metadata"], + timestamp=result.get("timestamp", None), + ) + for result in results + ] + + async def remove(self, collection_name: str, key: str) -> None: + self.collection.delete_one({"_id": key}) + + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: + self.collection.delete_many({"_id": {"$in": keys}}) + + async def get_nearest_matches( + self, + collection_name: str, + embedding: np.ndarray, + limit: int, + min_relevance_score: float, + with_embeddings: bool, + ) -> List[Tuple[MemoryRecord, float]]: + pipeline = [ + { + "$search": { + "cosmosSearch": { + "vector": embedding.tolist(), + "path": "embedding", + "k": limit, + }, + "returnStoredSource": True, + } + }, + { + "$project": { + "similarityScore": {"$meta": "searchScore"}, + "document": "$$ROOT", + } + }, + ] + nearest_results = [] + # Perform vector search + for aggResult in self.collection.aggregate(pipeline): + result = MemoryRecord.local_record( + id=aggResult["_id"], + embedding=np.array(aggResult["document"]["embedding"]) if with_embeddings else np.array([]), + text=aggResult["document"]["text"], + description=aggResult["document"]["description"], + additional_metadata=aggResult["document"]["metadata"], + timestamp=aggResult["document"].get("timestamp", None), + ) + if aggResult["similarityScore"] < min_relevance_score: + continue + else: + nearest_results.append((result, aggResult["similarityScore"])) + return nearest_results + + async def get_nearest_match( + self, + collection_name: str, + embedding: np.ndarray, + min_relevance_score: float, + with_embedding: bool, + ) -> Tuple[MemoryRecord, float]: + nearest_results = await self.get_nearest_matches( + collection_name=collection_name, + embedding=embedding, + min_relevance_score=min_relevance_score, + with_embeddings=with_embedding, + limit=1, + ) + + if len(nearest_results) > 0: + return nearest_results[0] + else: + return None + + @staticmethod + def __serialize_metadata(record: MemoryRecord) -> str: + return json.dumps( + { + "text": record.text, + "description": record.description, + "additional_metadata": record.additional_metadata, + } + ) diff --git a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py index 031a70d536c0..3a1de50c7bb6 100644 --- a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py +++ b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py @@ -1,6 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger +import logging from typing import TYPE_CHECKING, List, Optional, Tuple from numpy import array, ndarray @@ -12,23 +12,23 @@ ) from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger if TYPE_CHECKING: import chromadb import chromadb.config from chromadb.api.models.Collection import Collection +logger: logging.Logger = logging.getLogger(__name__) + class ChromaMemoryStore(MemoryStoreBase): _client: "chromadb.Client" - _logger: Logger def __init__( self, persist_directory: Optional[str] = None, client_settings: Optional["chromadb.config.Settings"] = None, - logger: Optional[Logger] = None, + **kwargs, ) -> None: """ ChromaMemoryStore provides an interface to store and retrieve data using ChromaDB. @@ -58,10 +58,11 @@ def __init__( except ImportError: raise ValueError( - "Could not import chromadb python package. " - "Please install it with `pip install chromadb`." + "Could not import chromadb python package. " "Please install it with `pip install chromadb`." ) + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") if client_settings: self._client_settings = client_settings else: @@ -74,10 +75,9 @@ def __init__( self._persist_directory = persist_directory self._default_query_includes = ["embeddings", "metadatas", "documents"] - self._logger = logger or NullLogger() self._default_embedding_function = "DisableChromaEmbeddingFunction" - async def create_collection_async(self, collection_name: str) -> None: + async def create_collection(self, collection_name: str) -> None: """Creates a new collection in Chroma if it does not exist. To prevent downloading model file from embedding_function, embedding_function is set to "DoNotUseChromaEmbeddingFunction". @@ -96,9 +96,7 @@ async def create_collection_async(self, collection_name: str) -> None: embedding_function=self._default_embedding_function, ) - async def get_collection_async( - self, collection_name: str - ) -> Optional["Collection"]: + async def get_collection(self, collection_name: str) -> Optional["Collection"]: try: # Current version of ChromeDB rejects camel case collection names. return self._client.get_collection( @@ -108,7 +106,7 @@ async def get_collection_async( except ValueError: return None - async def get_collections_async(self) -> List[str]: + async def get_collections(self) -> List[str]: """Gets the list of collections. Returns: @@ -116,7 +114,7 @@ async def get_collections_async(self) -> List[str]: """ return [collection.name for collection in self._client.list_collections()] - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection. Arguments: @@ -128,7 +126,7 @@ async def delete_collection_async(self, collection_name: str) -> None: # Current version of ChromeDB reject camel case collection names. self._client.delete_collection(name=camel_to_snake(collection_name)) - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Checks if a collection exists. Arguments: @@ -137,12 +135,12 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: Returns: bool -- True if the collection exists; otherwise, False. """ - if await self.get_collection_async(collection_name) is None: + if await self.get_collection(collection_name) is None: return False else: return True - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upserts a single MemoryRecord. Arguments: @@ -152,7 +150,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: Returns: List[str] -- The unique database key of the record. """ - collection = await self.get_collection_async(collection_name) + collection = await self.get_collection(collection_name) if collection is None: raise Exception(f"Collection '{collection_name}' does not exist") @@ -175,9 +173,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: ) return record._key - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """Upserts a batch of records. Arguments: @@ -187,12 +183,10 @@ async def upsert_batch_async( Returns: List[str] -- The unique database keys of the records. In Pinecone, these are the record IDs. """ - # upsert_async is checking collection existence - return [await self.upsert_async(collection_name, record) for record in records] + # upsert is checking collection existence + return [await self.upsert(collection_name, record) for record in records] - async def get_async( - self, collection_name: str, key: str, with_embedding: bool - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: """Gets a record. Arguments: @@ -203,17 +197,13 @@ async def get_async( Returns: MemoryRecord -- The record. """ - records = await self.get_batch_async(collection_name, [key], with_embedding) + records = await self.get_batch(collection_name, [key], with_embedding) try: return records[0] except IndexError: - raise Exception( - f"Record with key '{key}' does not exist in collection '{collection_name}'" - ) + raise Exception(f"Record with key '{key}' does not exist in collection '{collection_name}'") - async def get_batch_async( - self, collection_name: str, keys: List[str], with_embeddings: bool - ) -> List[MemoryRecord]: + async def get_batch(self, collection_name: str, keys: List[str], with_embeddings: bool) -> List[MemoryRecord]: """Gets a batch of records. Arguments: @@ -224,21 +214,17 @@ async def get_batch_async( Returns: List[MemoryRecord] -- The records. """ - collection = await self.get_collection_async(collection_name) + collection = await self.get_collection(collection_name) if collection is None: raise Exception(f"Collection '{collection_name}' does not exist") - query_includes = ( - ["embeddings", "metadatas", "documents"] - if with_embeddings - else ["metadatas", "documents"] - ) + query_includes = ["embeddings", "metadatas", "documents"] if with_embeddings else ["metadatas", "documents"] value = collection.get(ids=keys, include=query_includes) record = query_results_to_records(value, with_embeddings) return record - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Removes a record. Arguments: @@ -248,9 +234,9 @@ async def remove_async(self, collection_name: str, key: str) -> None: Returns: None """ - await self.remove_batch_async(collection_name, [key]) + await self.remove_batch(collection_name, [key]) - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Removes a batch of records. Arguments: @@ -260,11 +246,11 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non Returns: None """ - collection = await self.get_collection_async(collection_name=collection_name) + collection = await self.get_collection(collection_name=collection_name) if collection is not None: collection.delete(ids=keys) - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -285,11 +271,11 @@ async def get_nearest_matches_async( List[Tuple[MemoryRecord, float]] -- The records and their relevance scores. """ if with_embeddings is False: - self._logger.warning( + logger.warning( "Chroma returns distance score not cosine similarity score.\ So embeddings are automatically queried from database for calculation." ) - collection = await self.get_collection_async(collection_name) + collection = await self.get_collection(collection_name) if collection is None: return [] @@ -332,7 +318,7 @@ async def get_nearest_matches_async( return top_results - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -350,7 +336,7 @@ async def get_nearest_match_async( Returns: Tuple[MemoryRecord, float] -- The record and the relevance score. """ - results = await self.get_nearest_matches_async( + results = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, @@ -385,15 +371,11 @@ async def get_nearest_match_async( timestamp="timestamp", ) - asyncio.run(memory.create_collection_async("test_collection")) - collection = asyncio.run(memory.get_collection_async("test_collection")) + asyncio.run(memory.create_collection("test_collection")) + collection = asyncio.run(memory.get_collection("test_collection")) - asyncio.run( - memory.upsert_batch_async(collection.name, [memory_record1, memory_record2]) - ) + asyncio.run(memory.upsert_batch(collection.name, [memory_record1, memory_record2])) - result = asyncio.run(memory.get_async(collection.name, "test_id1", True)) - results = asyncio.run( - memory.get_nearest_match_async("test_collection", np.array([0.5, 0.5])) - ) + result = asyncio.run(memory.get(collection.name, "test_id1", True)) + results = asyncio.run(memory.get_nearest_match("test_collection", np.array([0.5, 0.5]))) print(results) diff --git a/python/semantic_kernel/connectors/memory/chroma/utils.py b/python/semantic_kernel/connectors/memory/chroma/utils.py index fa45441569ed..07643b5fec74 100644 --- a/python/semantic_kernel/connectors/memory/chroma/utils.py +++ b/python/semantic_kernel/connectors/memory/chroma/utils.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +import logging from typing import TYPE_CHECKING, List from numpy import array, linalg, ndarray @@ -9,6 +10,8 @@ if TYPE_CHECKING: from chromadb.api.types import QueryResult +logger: logging.Logger = logging.getLogger(__name__) + def camel_to_snake(camel_str): snake_str = "" @@ -22,9 +25,7 @@ def camel_to_snake(camel_str): return snake_str -def query_results_to_records( - results: "QueryResult", with_embedding: bool -) -> List[MemoryRecord]: +def query_results_to_records(results: "QueryResult", with_embedding: bool) -> List[MemoryRecord]: # if results has only one record, it will be a list instead of a nested list # this is to make sure that results is always a nested list # {'ids': ['test_id1'], 'embeddings': [[...]], 'documents': ['sample text1'], 'metadatas': [{...}]} @@ -82,9 +83,7 @@ def query_results_to_records( return memory_records -def chroma_compute_similarity_scores( - embedding: ndarray, embedding_array: ndarray, logger=None -) -> ndarray: +def chroma_compute_similarity_scores(embedding: ndarray, embedding_array: ndarray, **kwargs) -> ndarray: """Computes the cosine similarity scores between a query embedding and a group of embeddings. Arguments: embedding {ndarray} -- The query embedding. @@ -92,6 +91,8 @@ def chroma_compute_similarity_scores( Returns: ndarray -- The cosine similarity scores. """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") query_norm = linalg.norm(embedding) collection_norm = linalg.norm(embedding_array, axis=1) @@ -103,10 +104,10 @@ def chroma_compute_similarity_scores( similarity_scores = array([-1.0] * embedding_array.shape[0]) if valid_indices.any(): - similarity_scores[valid_indices] = embedding.dot( - embedding_array[valid_indices].T - ) / (query_norm * collection_norm[valid_indices]) - if not valid_indices.all() and logger: + similarity_scores[valid_indices] = embedding.dot(embedding_array[valid_indices].T) / ( + query_norm * collection_norm[valid_indices] + ) + if not valid_indices.all(): logger.warning( "Some vectors in the embedding collection are zero vectors." "Ignoring cosine similarity score computation for those vectors." diff --git a/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py index 709fd14cedac..eefb9573d06f 100644 --- a/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py +++ b/python/semantic_kernel/connectors/memory/milvus/milvus_memory_store.py @@ -1,6 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger +import logging from typing import List, Optional, Tuple from numpy import array, expand_dims, ndarray @@ -8,7 +8,8 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) def memoryrecord_to_milvus_dict(mem: MemoryRecord) -> dict: @@ -65,7 +66,7 @@ def __init__( self, uri: str = "http://localhost:19530", token: Optional[str] = None, - logger: Optional[Logger] = None, + **kwargs, ) -> None: """MilvusMemoryStore allows for searching for records using Milvus/Zilliz Cloud. @@ -79,18 +80,18 @@ def __init__( "http://localhost:19530". token (Optional[str], optional): The token to connect to the cluster if authentication is required. Defaults to None. - logger (Optional[Logger], optional): Logger to use. Defaults to None. """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") self._uri = uri self._token = (token,) - self._logger = logger or NullLogger() self._client = milvus_client.MilvusClient( uri=uri, token=token, ) self._metric_cache = {} - async def create_collection_async( + async def create_collection( self, collection_name: str, dimension_num: Optional[int] = 1536, @@ -138,7 +139,7 @@ async def create_collection_async( consistency_level=consistency, ) - async def get_collections_async( + async def get_collections( self, ) -> List[str]: """Return a list of present collections. @@ -148,9 +149,7 @@ async def get_collections_async( """ return self._client.list_collections() - async def delete_collection_async( - self, collection_name: str = "", all: bool = False - ) -> None: + async def delete_collection(self, collection_name: str = "", all: bool = False) -> None: """Delete the specified collection. If all is True, all collections in the cluster will be removed. @@ -166,7 +165,7 @@ async def delete_collection_async( elif collection_name in cols: self._client.drop_collection(collection_name) - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Return if the collection exists in the cluster. Args: @@ -177,7 +176,7 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: """ return True if collection_name in self._client.list_collections() else False - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upsert a single MemoryRecord into the collection. Args: @@ -188,16 +187,14 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: str: The ID of the inserted record. """ # Use the batch insert with a total batch - res = await self.upsert_batch_async( + res = await self.upsert_batch( collection_name=collection_name, records=[record], batch_size=0, ) return res[0] - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord], batch_size=100 - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord], batch_size=100) -> List[str]: """_summary_ Args: @@ -215,12 +212,8 @@ async def upsert_batch_async( """ # Check if the collection exists. if collection_name not in self._client.list_collections(): - self._logger.debug( - f"Collection {collection_name} does not exist, cannot insert." - ) - raise Exception( - f"Collection {collection_name} does not exist, cannot insert." - ) + logger.debug(f"Collection {collection_name} does not exist, cannot insert.") + raise Exception(f"Collection {collection_name} does not exist, cannot insert.") # Convert the records to dicts insert_list = [memoryrecord_to_milvus_dict(record) for record in records] # The ids to remove @@ -228,16 +221,12 @@ async def upsert_batch_async( try: # First delete then insert to have upsert self._client.delete(collection_name=collection_name, pks=delete_ids) - return self._client.insert( - collection_name=collection_name, data=insert_list, batch_size=batch_size - ) + return self._client.insert(collection_name=collection_name, data=insert_list, batch_size=batch_size) except Exception as e: - self._logger.debug(f"Upsert failed due to: {e}") + logger.debug(f"Upsert failed due to: {e}") raise e - async def get_async( - self, collection_name: str, key: str, with_embedding: bool - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: """Get the MemoryRecord corresponding to the key. Args: @@ -248,14 +237,10 @@ async def get_async( Returns: MemoryRecord: The MemoryRecord for the key. """ - res = await self.get_batch_async( - collection_name=collection_name, keys=[key], with_embeddings=with_embedding - ) + res = await self.get_batch(collection_name=collection_name, keys=[key], with_embeddings=with_embedding) return res[0] - async def get_batch_async( - self, collection_name: str, keys: List[str], with_embeddings: bool - ) -> List[MemoryRecord]: + async def get_batch(self, collection_name: str, keys: List[str], with_embeddings: bool) -> List[MemoryRecord]: """Get the MemoryRecords corresponding to the keys Args: @@ -272,9 +257,7 @@ async def get_batch_async( """ # Check if the collection exists if collection_name not in self._client.list_collections(): - self._logger.debug( - f"Collection {collection_name} does not exist, cannot get." - ) + logger.debug(f"Collection {collection_name} does not exist, cannot get.") raise Exception("Collection {collection_name} does not exist, cannot get.") try: gets = self._client.get( @@ -284,19 +267,19 @@ async def get_batch_async( ) return [milvus_dict_to_memoryrecord(get) for get in gets] except Exception as e: - self._logger.debug(f"Get failed due to: {e}") + logger.debug(f"Get failed due to: {e}") raise e - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Remove the specified record based on key. Args: collection_name (str): Collection to remove from. key (str): The key to remove. """ - await self.remove_batch_async(collection_name=collection_name, keys=[key]) + await self.remove_batch(collection_name=collection_name, keys=[key]) - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Remove multiple records based on keys. Args: @@ -308,19 +291,15 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non e: Failure to remove key. """ if collection_name not in self._client.list_collections(): - self._logger.debug( - f"Collection {collection_name} does not exist, cannot remove." - ) - raise Exception( - f"Collection {collection_name} does not exist, cannot remove." - ) + logger.debug(f"Collection {collection_name} does not exist, cannot remove.") + raise Exception(f"Collection {collection_name} does not exist, cannot remove.") try: self._client.delete( collection_name=collection_name, pks=keys, ) except Exception as e: - self._logger.debug(f"Remove failed due to: {e}") + logger.debug(f"Remove failed due to: {e}") raise e def _search(self, collection_name, data, limit, distance_metric): @@ -342,7 +321,7 @@ def _search(self, collection_name, data, limit, distance_metric): )[0] return results, distance_metric except Exception as e: - self._logger.debug(f"Search failed with IP, testing L2: {e}") + logger.debug(f"Search failed with IP, testing L2: {e}") try: distance_metric = distance_pairs[distance_metric.lower()] results = self._client.search( @@ -354,10 +333,10 @@ def _search(self, collection_name, data, limit, distance_metric): )[0] return results, distance_metric except Exception as e: - self._logger.debug(f"Search failed with L2: {e}") + logger.debug(f"Search failed with L2: {e}") raise e - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -383,12 +362,8 @@ async def get_nearest_matches_async( """ # Check if collection exists if collection_name not in self._client.list_collections(): - self._logger.debug( - f"Collection {collection_name} does not exist, cannot search." - ) - raise Exception( - f"Collection {collection_name} does not exist, cannot search." - ) + logger.debug(f"Collection {collection_name} does not exist, cannot search.") + raise Exception(f"Collection {collection_name} does not exist, cannot search.") # Search requests takes a list of requests. if len(embedding.shape) == 1: embedding = expand_dims(embedding, axis=0) @@ -425,21 +400,18 @@ async def get_nearest_matches_async( output_fields=[EMBEDDING_FIELD], ) except Exception as e: - self._logger.debug(f"Get embeddings in search failed due to: {e}.") + logger.debug(f"Get embeddings in search failed due to: {e}.") raise e vectors = {res[ID_FIELD]: res[EMBEDDING_FIELD] for res in vectors} for res in results: res["entity"][EMBEDDING_FIELD] = vectors[res[ID_FIELD]] - results = [ - (milvus_dict_to_memoryrecord(result["entity"]), result["distance"]) - for result in results - ] + results = [(milvus_dict_to_memoryrecord(result["entity"]), result["distance"]) for result in results] return results - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -457,7 +429,7 @@ async def get_nearest_match_async( Returns: Tuple[MemoryRecord, float]: A tuple of record and distance. """ - m = await self.get_nearest_matches_async( + m = await self.get_nearest_matches( collection_name, embedding, 1, diff --git a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py index b760e2609260..bb491f31e1fd 100644 --- a/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py +++ b/python/semantic_kernel/connectors/memory/mongodb_atlas/mongodb_atlas_memory_store.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from __future__ import annotations -from logging import Logger +import logging from typing import Any, List, Mapping, Optional, Tuple from motor import MotorCommandCursor, core, motor_asyncio @@ -19,17 +19,17 @@ ) from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger from semantic_kernel.utils.settings import mongodb_atlas_settings_from_dot_env +logger: logging.Logger = logging.getLogger(__name__) + class MongoDBAtlasMemoryStore(MemoryStoreBase): """Memory Store for MongoDB Atlas Vector Search Connections""" - __slots__ = ("_mongo_client", "_logger", "__database_name") + __slots__ = ("_mongo_client", "__database_name") _mongo_client: motor_asyncio.AsyncIOMotorClient - _logger: Logger __database_name: str __index_name: str @@ -38,14 +38,15 @@ def __init__( index_name: Optional[str] = None, connection_string: Optional[str] = None, database_name: Optional[str] = None, - logger: Optional[Logger] = None, read_preference: Optional[ReadPreference] = ReadPreference.PRIMARY, + **kwargs, ): + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") self._mongo_client = motor_asyncio.AsyncIOMotorClient( connection_string or mongodb_atlas_settings_from_dot_env(), read_preference=read_preference, ) - self._logger = logger or NullLogger() self.__database_name = database_name or DEFAULT_DB_NAME self.__index_name = index_name or DEFAULT_SEARCH_INDEX_NAME @@ -65,13 +66,13 @@ def index_name(self) -> str: def num_candidates(self) -> int: return self.__num_candidates - async def close_async(self): + async def close(self): """Async close connection, invoked by MemoryStoreBase.__aexit__()""" if self._mongo_client: self._mongo_client.close() self._mongo_client = None - async def create_collection_async(self, collection_name: str) -> None: + async def create_collection(self, collection_name: str) -> None: """Creates a new collection in the data store. Arguments: @@ -80,10 +81,10 @@ async def create_collection_async(self, collection_name: str) -> None: Returns: None """ - if not await self.does_collection_exist_async(collection_name): + if not await self.does_collection_exist(collection_name): await self.database.create_collection(collection_name) - async def get_collections_async( + async def get_collections( self, ) -> List[str]: """Gets all collection names in the data store. @@ -93,7 +94,7 @@ async def get_collections_async( """ return await self.database.list_collection_names() - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection from the data store. Arguments: @@ -104,7 +105,7 @@ async def delete_collection_async(self, collection_name: str) -> None: """ await self.database[collection_name].drop() - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Determines if a collection exists in the data store. Arguments: @@ -113,9 +114,9 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: Returns: bool -- True if given collection exists, False if not. """ - return collection_name in (await self.get_collections_async()) + return collection_name in (await self.get_collections()) - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upserts a memory record into the data store. Does not guarantee that the collection exists. If the record already exists, it will be updated. If the record does not exist, it will be created. @@ -130,16 +131,14 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: document: Mapping[str, Any] = memory_record_to_mongo_document(record) - update_result: results.UpdateResult = await self.database[ - collection_name - ].update_one(document, {"$set": document}, upsert=True) + update_result: results.UpdateResult = await self.database[collection_name].update_one( + document, {"$set": document}, upsert=True + ) assert update_result.acknowledged return record._id - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """Upserts a group of memory records into the data store. Does not guarantee that the collection exists. If the record already exists, it will be updated. If the record does not exist, it will be created. @@ -156,25 +155,20 @@ async def upsert_batch_async( for record in records: document = memory_record_to_mongo_document(record) upserts.append(UpdateOne(document, {"$set": document}, upsert=True)) - bulk_update_result: results.BulkWriteResult = await self.database[ - collection_name - ].bulk_write(upserts, ordered=False) + bulk_update_result: results.BulkWriteResult = await self.database[collection_name].bulk_write( + upserts, ordered=False + ) # Assert the number matched and the number upserted equal the total batch updated - self._logger.debug( + logger.debug( "matched_count=%s, upserted_count=%s", bulk_update_result.matched_count, bulk_update_result.upserted_count, ) - assert ( - bulk_update_result.matched_count + bulk_update_result.upserted_count - == len(records) - ) + assert bulk_update_result.matched_count + bulk_update_result.upserted_count == len(records) return [record._id for record in records] - async def get_async( - self, collection_name: str, key: str, with_embedding: bool - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: """Gets a memory record from the data store. Does not guarantee that the collection exists. Arguments: @@ -185,15 +179,11 @@ async def get_async( Returns: MemoryRecord -- The memory record if found """ - document = await self.database[collection_name].find_one( - {MONGODB_FIELD_ID: key} - ) + document = await self.database[collection_name].find_one({MONGODB_FIELD_ID: key}) return document_to_memory_record(document, with_embedding) if document else None - async def get_batch_async( - self, collection_name: str, keys: List[str], with_embeddings: bool - ) -> List[MemoryRecord]: + async def get_batch(self, collection_name: str, keys: List[str], with_embeddings: bool) -> List[MemoryRecord]: """Gets a batch of memory records from the data store. Does not guarantee that the collection exists. Arguments: @@ -207,11 +197,10 @@ async def get_batch_async( results = self.database[collection_name].find({MONGODB_FIELD_ID: {"$in": keys}}) return [ - document_to_memory_record(result, with_embeddings) - for result in await results.to_list(length=len(keys)) + document_to_memory_record(result, with_embeddings) for result in await results.to_list(length=len(keys)) ] - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Removes a memory record from the data store. Does not guarantee that the collection exists. Arguments: @@ -221,11 +210,11 @@ async def remove_async(self, collection_name: str, key: str) -> None: Returns: None """ - if not await self.does_collection_exist_async(collection_name): + if not await self.does_collection_exist(collection_name): raise Exception(f"collection {collection_name} not found") await self.database[collection_name].delete_one({MONGODB_FIELD_ID: key}) - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Removes a batch of memory records from the data store. Does not guarantee that the collection exists. Arguments: @@ -235,15 +224,13 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non Returns: None """ - if not await self.does_collection_exist_async(collection_name): + if not await self.does_collection_exist(collection_name): raise Exception(f"collection {collection_name} not found") deletes: List[DeleteOne] = [DeleteOne({MONGODB_FIELD_ID: key}) for key in keys] - bulk_write_result = await self.database[collection_name].bulk_write( - deletes, ordered=False - ) - self._logger.debug("%s entries deleted", bulk_write_result.deleted_count) + bulk_write_result = await self.database[collection_name].bulk_write(deletes, ordered=False) + logger.debug("%s entries deleted", bulk_write_result.deleted_count) - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -291,7 +278,7 @@ async def get_nearest_matches_async( for doc in await cursor.to_list(length=limit) ] - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -309,9 +296,7 @@ async def get_nearest_match_async( Returns: Tuple[MemoryRecord, float] -- A tuple consisting of the MemoryRecord and the similarity score as a float. """ - matches: List[ - Tuple[MemoryRecord, float] - ] = await self.get_nearest_matches_async( + matches: List[Tuple[MemoryRecord, float]] = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, diff --git a/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py b/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py index aeae07244b6e..7e4ff91d32c3 100644 --- a/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py +++ b/python/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py @@ -1,6 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger +import logging from typing import List, Optional, Tuple import pinecone @@ -13,7 +13,6 @@ ) from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger # Limitations set by Pinecone at https://docs.pinecone.io/docs/limits MAX_DIMENSIONALITY = 20000 @@ -23,11 +22,12 @@ MAX_FETCH_BATCH_SIZE = 1000 MAX_DELETE_BATCH_SIZE = 1000 +logger: logging.Logger = logging.getLogger(__name__) + class PineconeMemoryStore(MemoryStoreBase): """A memory store that uses Pinecone as the backend.""" - _logger: Logger _pinecone_api_key: str _pinecone_environment: str _default_dimensionality: int @@ -37,7 +37,7 @@ def __init__( api_key: str, environment: str, default_dimensionality: int, - logger: Optional[Logger] = None, + **kwargs, ) -> None: """Initializes a new instance of the PineconeMemoryStore class. @@ -45,8 +45,9 @@ def __init__( pinecone_api_key {str} -- The Pinecone API key. pinecone_environment {str} -- The Pinecone environment. default_dimensionality {int} -- The default dimensionality to use for new collections. - logger {Optional[Logger]} -- The logger to use. (default: {None}) """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") if default_dimensionality > MAX_DIMENSIONALITY: raise ValueError( f"Dimensionality of {default_dimensionality} exceeds " @@ -55,16 +56,10 @@ def __init__( self._pinecone_api_key = api_key self._pinecone_environment = environment self._default_dimensionality = default_dimensionality - self._logger = logger or NullLogger() - - pinecone.init( - api_key=self._pinecone_api_key, environment=self._pinecone_environment - ) - def get_collections(self) -> List[str]: - return pinecone.list_indexes() + pinecone.init(api_key=self._pinecone_api_key, environment=self._pinecone_environment) - async def create_collection_async( + async def create_collection( self, collection_name: str, dimension_num: Optional[int] = None, @@ -91,8 +86,7 @@ async def create_collection_async( dimension_num = self._default_dimensionality if dimension_num > MAX_DIMENSIONALITY: raise ValueError( - f"Dimensionality of {dimension_num} exceeds " - + f"the maximum allowed value of {MAX_DIMENSIONALITY}." + f"Dimensionality of {dimension_num} exceeds " + f"the maximum allowed value of {MAX_DIMENSIONALITY}." ) if collection_name not in pinecone.list_indexes(): @@ -106,9 +100,7 @@ async def create_collection_async( metadata_config=metadata_config, ) - async def describe_collection_async( - self, collection_name: str - ) -> Optional[IndexDescription]: + async def describe_collection(self, collection_name: str) -> Optional[IndexDescription]: """Gets the description of the index. Arguments: collection_name {str} -- The name of the index to get. @@ -119,7 +111,7 @@ async def describe_collection_async( return pinecone.describe_index(collection_name) return None - async def get_collections_async( + async def get_collections( self, ) -> List[str]: """Gets the list of collections. @@ -129,7 +121,7 @@ async def get_collections_async( """ return list(pinecone.list_indexes()) - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection. Arguments: @@ -141,7 +133,7 @@ async def delete_collection_async(self, collection_name: str) -> None: if collection_name in pinecone.list_indexes(): pinecone.delete_index(collection_name) - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Checks if a collection exists. Arguments: @@ -152,7 +144,7 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: """ return collection_name in pinecone.list_indexes() - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upserts a record. Arguments: @@ -177,9 +169,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: return record._id - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """Upserts a batch of records. Arguments: @@ -203,18 +193,14 @@ async def upsert_batch_async( for record in records ] - upsert_response = collection.upsert( - vectors, namespace="", batch_size=MAX_UPSERT_BATCH_SIZE - ) + upsert_response = collection.upsert(vectors, namespace="", batch_size=MAX_UPSERT_BATCH_SIZE) if upsert_response.upserted_count is None: raise Exception(f"Error upserting record: {upsert_response.message}") else: return [record._id for record in records] - async def get_async( - self, collection_name: str, key: str, with_embedding: bool = False - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool = False) -> MemoryRecord: """Gets a record. Arguments: @@ -236,7 +222,7 @@ async def get_async( return parse_payload(fetch_response.vectors[key], with_embedding) - async def get_batch_async( + async def get_batch( self, collection_name: str, keys: List[str], with_embeddings: bool = False ) -> List[MemoryRecord]: """Gets a batch of records. @@ -252,15 +238,10 @@ async def get_batch_async( if collection_name not in pinecone.list_indexes(): raise Exception(f"Collection '{collection_name}' does not exist") - fetch_response = await self.__get_batch_async( - collection_name, keys, with_embeddings - ) - return [ - parse_payload(fetch_response.vectors[key], with_embeddings) - for key in fetch_response.vectors.keys() - ] + fetch_response = await self.__get_batch(collection_name, keys, with_embeddings) + return [parse_payload(fetch_response.vectors[key], with_embeddings) for key in fetch_response.vectors.keys()] - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Removes a record. Arguments: @@ -276,7 +257,7 @@ async def remove_async(self, collection_name: str, key: str) -> None: collection = pinecone.Index(collection_name) collection.delete([key]) - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Removes a batch of records. Arguments: @@ -294,7 +275,7 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non collection.delete(keys[i : i + MAX_DELETE_BATCH_SIZE]) collection.delete(keys) - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -312,7 +293,7 @@ async def get_nearest_match_async( Returns: Tuple[MemoryRecord, float] -- The record and the relevance score. """ - matches = await self.get_nearest_matches_async( + matches = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, @@ -321,7 +302,7 @@ async def get_nearest_match_async( ) return matches[0] - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -347,10 +328,7 @@ async def get_nearest_matches_async( collection = pinecone.Index(collection_name) if limit > MAX_QUERY_WITHOUT_METADATA_BATCH_SIZE: - raise Exception( - "Limit must be less than or equal to " - + f"{MAX_QUERY_WITHOUT_METADATA_BATCH_SIZE}" - ) + raise Exception("Limit must be less than or equal to " + f"{MAX_QUERY_WITHOUT_METADATA_BATCH_SIZE}") elif limit > MAX_QUERY_WITH_METADATA_BATCH_SIZE: query_response = collection.query( vector=embedding.tolist(), @@ -359,9 +337,7 @@ async def get_nearest_matches_async( include_metadata=False, ) keys = [match.id for match in query_response.matches] - fetch_response = await self.__get_batch_async( - collection_name, keys, with_embeddings - ) + fetch_response = await self.__get_batch(collection_name, keys, with_embeddings) vectors = fetch_response.vectors for match in query_response.matches: vectors[match.id].update(match) @@ -388,16 +364,14 @@ async def get_nearest_matches_async( else [] ) - async def __get_batch_async( + async def __get_batch( self, collection_name: str, keys: List[str], with_embeddings: bool = False ) -> "FetchResponse": index = pinecone.Index(collection_name) if len(keys) > MAX_FETCH_BATCH_SIZE: fetch_response = index.fetch(keys[0:MAX_FETCH_BATCH_SIZE]) for i in range(MAX_FETCH_BATCH_SIZE, len(keys), MAX_FETCH_BATCH_SIZE): - fetch_response.vectors.update( - index.fetch(keys[i : i + MAX_FETCH_BATCH_SIZE]).vectors - ) + fetch_response.vectors.update(index.fetch(keys[i : i + MAX_FETCH_BATCH_SIZE]).vectors) else: fetch_response = index.fetch(keys) return fetch_response diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py index de030c557072..20273288c223 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py @@ -2,7 +2,7 @@ import atexit import json -from logging import Logger +import logging from typing import List, Optional, Tuple import numpy as np @@ -13,12 +13,13 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger # Limitation based on pgvector documentation https://github.com/pgvector/pgvector#what-if-i-want-to-index-vectors-with-more-than-2000-dimensions MAX_DIMENSIONALITY = 2000 DEFAULT_SCHEMA = "public" +logger: logging.Logger = logging.getLogger(__name__) + class PostgresMemoryStore(MemoryStoreBase): """A memory store that uses Postgres with pgvector as the backend.""" @@ -27,7 +28,6 @@ class PostgresMemoryStore(MemoryStoreBase): _connection_pool: ConnectionPool _default_dimensionality: int _schema: str - _logger: Logger def __init__( self, @@ -36,7 +36,7 @@ def __init__( min_pool: int, max_pool: int, schema: str = DEFAULT_SCHEMA, - logger: Optional[Logger] = None, + **kwargs, ) -> None: """Initializes a new instance of the PostgresMemoryStore class. @@ -48,21 +48,18 @@ def __init__( schema {str} -- The schema to use. (default: {"public"})\n timezone_offset {Optional[str]} -- The timezone offset to use. (default: {None}) Expected format '-7:00'. Uses the local timezone offset when not provided.\n - logger {Optional[Logger]} -- The logger to use. (default: {None}) """ - + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") self._check_dimensionality(default_dimensionality) self._connection_string = connection_string self._default_dimensionality = default_dimensionality - self._connection_pool = ConnectionPool( - self._connection_string, min_size=min_pool, max_size=max_pool - ) + self._connection_pool = ConnectionPool(self._connection_string, min_size=min_pool, max_size=max_pool) self._schema = schema atexit.register(self._connection_pool.close) - self._logger = logger or NullLogger() - async def create_collection_async( + async def create_collection( self, collection_name: str, dimension_num: Optional[int] = None, @@ -101,7 +98,7 @@ async def create_collection_async( (), ) - async def get_collections_async(self) -> List[str]: + async def get_collections(self) -> List[str]: """Gets the list of collections. Returns: @@ -109,9 +106,9 @@ async def get_collections_async(self) -> List[str]: """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - return await self.__get_collections_async(cur) + return await self.__get_collections(cur) - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection. Arguments: @@ -128,7 +125,7 @@ async def delete_collection_async(self, collection_name: str) -> None: ), ) - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Checks if a collection exists. Arguments: @@ -139,9 +136,9 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - return await self.__does_collection_exist_async(cur, collection_name) + return await self.__does_collection_exist(cur, collection_name) - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upserts a record. Arguments: @@ -153,7 +150,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - if not await self.__does_collection_exist_async(cur, collection_name): + if not await self.__does_collection_exist(cur, collection_name): raise Exception(f"Collection '{collection_name}' does not exist") cur.execute( SQL( @@ -182,9 +179,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: raise Exception("Upsert failed") return result[0] - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """Upserts a batch of records. Arguments: @@ -196,7 +191,7 @@ async def upsert_batch_async( """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - if not await self.__does_collection_exist_async(cur, collection_name): + if not await self.__does_collection_exist(cur, collection_name): raise Exception(f"Collection '{collection_name}' does not exist") cur.nextset() cur.executemany( @@ -234,9 +229,7 @@ async def upsert_batch_async( raise Exception("Upsert failed") return [result[0] for result in results if result is not None] - async def get_async( - self, collection_name: str, key: str, with_embedding: bool = False - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool = False) -> MemoryRecord: """Gets a record. Arguments: @@ -249,7 +242,7 @@ async def get_async( """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - if not await self.__does_collection_exist_async(cur, collection_name): + if not await self.__does_collection_exist(cur, collection_name): raise Exception(f"Collection '{collection_name}' does not exist") cur.execute( SQL( @@ -278,7 +271,7 @@ async def get_async( timestamp=result[3], ) - async def get_batch_async( + async def get_batch( self, collection_name: str, keys: List[str], with_embeddings: bool = False ) -> List[MemoryRecord]: """Gets a batch of records. @@ -293,7 +286,7 @@ async def get_batch_async( """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - if not await self.__does_collection_exist_async(cur, collection_name): + if not await self.__does_collection_exist(cur, collection_name): raise Exception(f"Collection '{collection_name}' does not exist") cur.execute( SQL( @@ -312,9 +305,7 @@ async def get_batch_async( return [ MemoryRecord.local_record( id=result[0], - embedding=np.fromstring( - result[1].strip("[]"), dtype=float, sep="," - ) + embedding=np.fromstring(result[1].strip("[]"), dtype=float, sep=",") if with_embeddings else np.array([]), text=result[2]["text"], @@ -325,7 +316,7 @@ async def get_batch_async( for result in results ] - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Removes a record. Arguments: @@ -337,7 +328,7 @@ async def remove_async(self, collection_name: str, key: str) -> None: """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - if not await self.__does_collection_exist_async(cur, collection_name): + if not await self.__does_collection_exist(cur, collection_name): raise Exception(f"Collection '{collection_name}' does not exist") cur.execute( SQL( @@ -345,13 +336,11 @@ async def remove_async(self, collection_name: str, key: str) -> None: DELETE FROM {scm}.{tbl} WHERE key = %s """ - ).format( - scm=Identifier(self._schema), tbl=Identifier(collection_name) - ), + ).format(scm=Identifier(self._schema), tbl=Identifier(collection_name)), (key,), ) - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Removes a batch of records. Arguments: @@ -363,7 +352,7 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - if not await self.__does_collection_exist_async(cur, collection_name): + if not await self.__does_collection_exist(cur, collection_name): raise Exception(f"Collection '{collection_name}' does not exist") cur.execute( SQL( @@ -371,13 +360,11 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non DELETE FROM {scm}.{tbl} WHERE key = ANY(%s) """ - ).format( - scm=Identifier(self._schema), tbl=Identifier(collection_name) - ), + ).format(scm=Identifier(self._schema), tbl=Identifier(collection_name)), (list(keys),), ) - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -399,7 +386,7 @@ async def get_nearest_matches_async( """ with self._connection_pool.connection() as conn: with conn.cursor() as cur: - if not await self.__does_collection_exist_async(cur, collection_name): + if not await self.__does_collection_exist(cur, collection_name): raise Exception(f"Collection '{collection_name}' does not exist") cur.execute( SQL( @@ -432,9 +419,7 @@ async def get_nearest_matches_async( ( MemoryRecord.local_record( id=result[0], - embedding=np.fromstring( - result[1].strip("[]"), dtype=float, sep="," - ) + embedding=np.fromstring(result[1].strip("[]"), dtype=float, sep=",") if with_embeddings else np.array([]), text=result[2]["text"], @@ -447,7 +432,7 @@ async def get_nearest_matches_async( for result in results ] - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -466,7 +451,7 @@ async def get_nearest_match_async( Tuple[MemoryRecord, float] -- The record and the relevance score. """ - results = await self.get_nearest_matches_async( + results = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, @@ -477,13 +462,11 @@ async def get_nearest_match_async( raise Exception("No match found") return results[0] - async def __does_collection_exist_async( - self, cur: Cursor, collection_name: str - ) -> bool: - results = await self.__get_collections_async(cur) + async def __does_collection_exist(self, cur: Cursor, collection_name: str) -> bool: + results = await self.__get_collections(cur) return collection_name in results - async def __get_collections_async(self, cur: Cursor) -> List[str]: + async def __get_collections(self, cur: Cursor) -> List[str]: cur.execute( """ SELECT table_name @@ -497,8 +480,7 @@ async def __get_collections_async(self, cur: Cursor) -> List[str]: def _check_dimensionality(self, dimension_num): if dimension_num > MAX_DIMENSIONALITY: raise ValueError( - f"Dimensionality of {dimension_num} exceeds " - + f"the maximum allowed value of {MAX_DIMENSIONALITY}." + f"Dimensionality of {dimension_num} exceeds " + f"the maximum allowed value of {MAX_DIMENSIONALITY}." ) if dimension_num <= 0: raise ValueError("Dimensionality must be a positive integer. ") diff --git a/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py b/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py index 4e3d209f70de..d9369564cc7d 100644 --- a/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py +++ b/python/semantic_kernel/connectors/memory/qdrant/qdrant_memory_store.py @@ -5,8 +5,8 @@ The QdrantMemoryStore inherits from MemoryStoreBase for persisting/retrieving data from a Qdrant Vector Database. """ import asyncio +import logging import uuid -from logging import Logger from typing import List, Optional, Tuple from numpy import ndarray @@ -15,26 +15,24 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) class QdrantMemoryStore(MemoryStoreBase): _qdrantclient: QdrantClient - _logger: Logger def __init__( self, vector_size: int, url: Optional[str] = None, port: Optional[int] = 6333, - logger: Optional[Logger] = None, local: Optional[bool] = False, + **kwargs, ) -> None: - """Initializes a new instance of the QdrantMemoryStore class. - - Arguments: - logger {Optional[Logger]} -- The logger to use. (default: {None}) - """ + """Initializes a new instance of the QdrantMemoryStore class.""" + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") if local: if url: self._qdrantclient = QdrantClient(location=url) @@ -43,10 +41,9 @@ def __init__( else: self._qdrantclient = QdrantClient(url=url, port=port) - self._logger = logger or NullLogger() self._default_vector_size = vector_size - async def create_collection_async(self, collection_name: str) -> None: + async def create_collection(self, collection_name: str) -> None: """Creates a new collection if it does not exist. Arguments: @@ -63,7 +60,7 @@ async def create_collection_async(self, collection_name: str) -> None: ), ) - async def get_collections_async( + async def get_collections( self, ) -> List[str]: """Gets the list of collections. @@ -74,20 +71,16 @@ async def get_collections_async( collection_info = self._qdrantclient.get_collections() return [collection.name for collection in collection_info.collections] - async def get_collection_async( - self, collection_name: str - ) -> qdrant_models.CollectionInfo: + async def get_collection(self, collection_name: str) -> qdrant_models.CollectionInfo: """Gets the a collections based upon collection name. Returns: CollectionInfo -- Collection Information from Qdrant about collection. """ - collection_info = self._qdrantclient.get_collection( - collection_name=collection_name - ) + collection_info = self._qdrantclient.get_collection(collection_name=collection_name) return collection_info - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection. Arguments: @@ -99,7 +92,7 @@ async def delete_collection_async(self, collection_name: str) -> None: self._qdrantclient.delete_collection(collection_name=collection_name) - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Checks if a collection exists. Arguments: @@ -109,12 +102,12 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: bool -- True if the collection exists; otherwise, False. """ try: - result = await self.get_collection_async(collection_name=collection_name) + result = await self.get_collection(collection_name=collection_name) return result.status == qdrant_models.CollectionStatus.GREEN except ValueError: return False - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upserts a record. Arguments: @@ -124,7 +117,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: Returns: str -- The unique database key of the record. """ - data_to_upsert = await self._convert_from_memory_record_async( + data_to_upsert = await self._convert_from_memory_record( collection_name=collection_name, record=record, ) @@ -139,13 +132,11 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: else: raise Exception("Upsert failed") - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: tasks = [] for record in records: tasks.append( - self._convert_from_memory_record_async( + self._convert_from_memory_record( collection_name=collection_name, record=record, ) @@ -163,10 +154,8 @@ async def upsert_batch_async( else: raise Exception("Batch upsert failed") - async def get_async( - self, collection_name: str, key: str, with_embedding: bool = False - ) -> Optional[MemoryRecord]: - result = await self._get_existing_record_by_payload_id_async( + async def get(self, collection_name: str, key: str, with_embedding: bool = False) -> Optional[MemoryRecord]: + result = await self._get_existing_record_by_payload_id( collection_name=collection_name, payload_id=key, with_embedding=with_embedding, @@ -187,13 +176,13 @@ async def get_async( else: return None - async def get_batch_async( + async def get_batch( self, collection_name: str, keys: List[str], with_embeddings: bool = False ) -> List[MemoryRecord]: tasks = [] for key in keys: tasks.append( - self.get_async( + self.get( collection_name=collection_name, key=key, with_embedding=with_embeddings, @@ -201,8 +190,8 @@ async def get_batch_async( ) return await asyncio.gather(*tasks) - async def remove_async(self, collection_name: str, key: str) -> None: - existing_record = await self._get_existing_record_by_payload_id_async( + async def remove(self, collection_name: str, key: str) -> None: + existing_record = await self._get_existing_record_by_payload_id( collection_name=collection_name, payload_id=key, with_embedding=False, @@ -210,17 +199,15 @@ async def remove_async(self, collection_name: str, key: str) -> None: if existing_record: pointId = existing_record.id - result = self._qdrantclient.delete( - collection_name=collection_name, points_selector=[pointId] - ) + result = self._qdrantclient.delete(collection_name=collection_name, points_selector=[pointId]) if result.status != qdrant_models.UpdateStatus.COMPLETED: raise Exception("Delete failed") - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: tasks = [] for key in keys: tasks.append( - self._get_existing_record_by_payload_id_async( + self._get_existing_record_by_payload_id( collection_name=collection_name, payload_id=key, with_embedding=False, @@ -237,7 +224,7 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non if result.status != qdrant_models.UpdateStatus.COMPLETED: raise Exception("Delete failed") - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -271,14 +258,14 @@ async def get_nearest_matches_async( for result in match_results ] - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, min_relevance_score: float, with_embedding: bool = False, ) -> Tuple[MemoryRecord, float]: - result = await self.get_nearest_matches_async( + result = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, @@ -287,7 +274,7 @@ async def get_nearest_match_async( ) return result[0] if result else None - async def _get_existing_record_by_payload_id_async( + async def _get_existing_record_by_payload_id( self, collection_name: str, payload_id: str, @@ -325,14 +312,14 @@ async def _get_existing_record_by_payload_id_async( else: return None - async def _convert_from_memory_record_async( + async def _convert_from_memory_record( self, collection_name: str, record: MemoryRecord ) -> qdrant_models.PointStruct: if record._key is not None and record._key != "": pointId = record._key else: - existing_record = await self._get_existing_record_by_payload_id_async( + existing_record = await self._get_existing_record_by_payload_id( collection_name=collection_name, payload_id=record._id, ) @@ -345,6 +332,4 @@ async def _convert_from_memory_record_async( payload = record.__dict__.copy() embedding = payload.pop("_embedding") - return qdrant_models.PointStruct( - id=pointId, vector=embedding.tolist(), payload=payload - ) + return qdrant_models.PointStruct(id=pointId, vector=embedding.tolist(), payload=payload) diff --git a/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py b/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py index d1052419d129..6a4a3f16243e 100644 --- a/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py +++ b/python/semantic_kernel/connectors/memory/redis/redis_memory_store.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import List, Optional, Tuple +import logging +from typing import List, Tuple import numpy as np import redis @@ -19,7 +19,8 @@ ) from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) class RedisMemoryStore(MemoryStoreBase): @@ -27,7 +28,6 @@ class RedisMemoryStore(MemoryStoreBase): _database: "redis.Redis" _ft: "redis.Redis.ft" - _logger: Logger # Without RedisAI, it is currently not possible to retrieve index-specific vector attributes to have # fully independent collections. _query_dialect: int @@ -45,7 +45,7 @@ def __init__( vector_type: str = "FLOAT32", vector_index_algorithm: str = "HNSW", query_dialect: int = 2, - logger: Optional[Logger] = None, + **kwargs, ) -> None: """ RedisMemoryStore is an abstracted interface to interact with a Redis node connection. @@ -59,15 +59,15 @@ def __init__( vector_type {str} -- Vector type, defaults to FLOAT32 vector_index_algorithm {str} -- Indexing algorithm for vectors, defaults to HNSW query_dialect {int} -- Query dialect, must be 2 or greater for vector similarity searching, defaults to 2 - logger {Optional[Logger]} -- Logger, defaults to None """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") if vector_size <= 0: raise ValueError("Vector dimension must be a positive integer") self._database = redis.Redis.from_url(connection_string) self._ft = self._database.ft - self._logger = logger or NullLogger() self._query_dialect = query_dialect self._vector_distance_metric = vector_distance_metric @@ -76,14 +76,14 @@ def __init__( self._vector_type = np.float32 if vector_type == "FLOAT32" else np.float64 self._vector_size = vector_size - async def close_async(self): + async def close(self): """ Closes the Redis database connection """ - self._logger.info("Closing Redis connection") + logger.info("Closing Redis connection") self._database.close() - async def create_collection_async(self, collection_name: str) -> None: + async def create_collection(self, collection_name: str) -> None: """ Creates a collection, implemented as a Redis index containing hashes prefixed with "collection_name:". @@ -93,12 +93,10 @@ async def create_collection_async(self, collection_name: str) -> None: collection_name {str} -- Name for a collection of embeddings """ - if await self.does_collection_exist_async(collection_name): - self._logger.info(f'Collection "{collection_name}" already exists.') + if await self.does_collection_exist(collection_name): + logger.info(f'Collection "{collection_name}" already exists.') else: - index_def = IndexDefinition( - prefix=f"{collection_name}:", index_type=IndexType.HASH - ) + index_def = IndexDefinition(prefix=f"{collection_name}:", index_type=IndexType.HASH) schema = ( TextField(name="key"), TextField(name="metadata"), @@ -115,14 +113,12 @@ async def create_collection_async(self, collection_name: str) -> None: ) try: - self._ft(collection_name).create_index( - definition=index_def, fields=schema - ) + self._ft(collection_name).create_index(definition=index_def, fields=schema) except Exception as e: - self._logger.error(e) + logger.error(e) raise e - async def get_collections_async(self) -> List[str]: + async def get_collections(self) -> List[str]: """ Returns a list of names of all collection names present in the data store. @@ -132,9 +128,7 @@ async def get_collections_async(self) -> List[str]: # Note: FT._LIST is a temporary command that may be deprecated in the future according to Redis return [name.decode() for name in self._database.execute_command("FT._LIST")] - async def delete_collection_async( - self, collection_name: str, delete_records: bool = True - ) -> None: + async def delete_collection(self, collection_name: str, delete_records: bool = True) -> None: """ Deletes a collection from the data store. If the collection does not exist, the database is left unchanged. @@ -144,10 +138,10 @@ async def delete_collection_async( delete_records {bool} -- Delete all data associated with the collection, default to True """ - if await self.does_collection_exist_async(collection_name): + if await self.does_collection_exist(collection_name): self._ft(collection_name).dropindex(delete_documents=delete_records) - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """ Determines if a collection exists in the data store. @@ -163,7 +157,7 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: except ResponseError: return False - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """ Upsert a memory record into the data store. Does not guarantee that the collection exists. * If the record already exists, it will be updated. @@ -180,8 +174,8 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: str -- Redis key associated with the upserted memory record """ - if not await self.does_collection_exist_async(collection_name): - self._logger.error(f'Collection "{collection_name}" does not exist') + if not await self.does_collection_exist(collection_name): + logger.error(f'Collection "{collection_name}" does not exist') raise Exception(f'Collection "{collection_name}" does not exist') # Typical Redis key structure: collection_name:{some identifier} @@ -196,12 +190,10 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: ) return record._key except Exception as e: - self._logger.error(e) + logger.error(e) raise e - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """ Upserts a group of memory records into the data store. Does not guarantee that the collection exists. * If the record already exists, it will be updated. @@ -220,14 +212,12 @@ async def upsert_batch_async( keys = list() for record in records: - record_key = await self.upsert_async(collection_name, record) + record_key = await self.upsert(collection_name, record) keys.append(record_key) return keys - async def get_async( - self, collection_name: str, key: str, with_embedding: bool = False - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool = False) -> MemoryRecord: """ Gets a memory record from the data store. Does not guarantee that the collection exists. @@ -240,8 +230,8 @@ async def get_async( MemoryRecord -- The memory record if found, else None """ - if not await self.does_collection_exist_async(collection_name): - self._logger.error(f'Collection "{collection_name}" does not exist') + if not await self.does_collection_exist(collection_name): + logger.error(f'Collection "{collection_name}" does not exist') raise Exception(f'Collection "{collection_name}" does not exist') internal_key = get_redis_key(collection_name, key) @@ -256,7 +246,7 @@ async def get_async( return record - async def get_batch_async( + async def get_batch( self, collection_name: str, keys: List[str], with_embeddings: bool = False ) -> List[MemoryRecord]: """ @@ -273,13 +263,13 @@ async def get_batch_async( records = list() for key in keys: - record = await self.get_async(collection_name, key, with_embeddings) + record = await self.get(collection_name, key, with_embeddings) if record: records.append(record) return records - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """ Removes a memory record from the data store. Does not guarantee that the collection exists. If the key does not exist, do nothing. @@ -288,13 +278,13 @@ async def remove_async(self, collection_name: str, key: str) -> None: collection_name {str} -- Name for a collection of embeddings key {str} -- ID associated with the memory to remove """ - if not await self.does_collection_exist_async(collection_name): - self._logger.error(f'Collection "{collection_name}" does not exist') + if not await self.does_collection_exist(collection_name): + logger.error(f'Collection "{collection_name}" does not exist') raise Exception(f'Collection "{collection_name}" does not exist') self._database.delete(get_redis_key(collection_name, key)) - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """ Removes a batch of memory records from the data store. Does not guarantee that the collection exists. @@ -302,13 +292,13 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non collection_name {str} -- Name for a collection of embeddings keys {List[str]} -- IDs associated with the memory records to remove """ - if not await self.does_collection_exist_async(collection_name): - self._logger.error(f'Collection "{collection_name}" does not exist') + if not await self.does_collection_exist(collection_name): + logger.error(f'Collection "{collection_name}" does not exist') raise Exception(f'Collection "{collection_name}" does not exist') self._database.delete(*[get_redis_key(collection_name, key) for key in keys]) - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -330,8 +320,8 @@ async def get_nearest_matches_async( List[Tuple[MemoryRecord, float]] -- Records and their relevance scores by descending order, or an empty list if no relevant matches are found """ - if not await self.does_collection_exist_async(collection_name): - self._logger.error(f'Collection "{collection_name}" does not exist') + if not await self.does_collection_exist(collection_name): + logger.error(f'Collection "{collection_name}" does not exist') raise Exception(f'Collection "{collection_name}" does not exist') # Perform a k-nearest neighbors query, score by similarity @@ -358,14 +348,12 @@ async def get_nearest_matches_async( if score < min_relevance_score: break - record = deserialize_document_to_record( - self._database, match, self._vector_type, with_embeddings - ) + record = deserialize_document_to_record(self._database, match, self._vector_type, with_embeddings) relevant_records.append((record, score)) return relevant_records - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -384,7 +372,7 @@ async def get_nearest_match_async( Returns: Tuple[MemoryRecord, float] -- Record and the relevance score, or None if not found """ - matches = await self.get_nearest_matches_async( + matches = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, diff --git a/python/semantic_kernel/connectors/memory/redis/utils.py b/python/semantic_kernel/connectors/memory/redis/utils.py index d083d7857774..377eced0a00c 100644 --- a/python/semantic_kernel/connectors/memory/redis/utils.py +++ b/python/semantic_kernel/connectors/memory/redis/utils.py @@ -39,9 +39,7 @@ def split_redis_key(redis_key: str) -> Tuple[str, str]: return collection, record_id -def serialize_record_to_redis( - record: MemoryRecord, vector_type: np.dtype -) -> Dict[str, Any]: +def serialize_record_to_redis(record: MemoryRecord, vector_type: np.dtype) -> Dict[str, Any]: all_metadata = { "is_reference": record._is_reference, "external_source_name": record._external_source_name or "", @@ -55,18 +53,12 @@ def serialize_record_to_redis( "key": record._key or "", "timestamp": record._timestamp.isoformat() if record._timestamp else "", "metadata": json.dumps(all_metadata), - "embedding": ( - record._embedding.astype(vector_type).tobytes() - if record._embedding is not None - else "" - ), + "embedding": (record._embedding.astype(vector_type).tobytes() if record._embedding is not None else ""), } return redis_mapping -def deserialize_redis_to_record( - fields: Dict[str, Any], vector_type: np.dtype, with_embedding: bool -) -> MemoryRecord: +def deserialize_redis_to_record(fields: Dict[str, Any], vector_type: np.dtype, with_embedding: bool) -> MemoryRecord: metadata = json.loads(fields[b"metadata"]) record = MemoryRecord( id=metadata["id"], @@ -83,9 +75,7 @@ def deserialize_redis_to_record( if with_embedding: # Extract using the vector type, then convert to regular Python float type - record._embedding = np.frombuffer( - fields[b"embedding"], dtype=vector_type - ).astype(float) + record._embedding = np.frombuffer(fields[b"embedding"], dtype=vector_type).astype(float) return record diff --git a/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py b/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py index 997b0427c8f9..78c285394aa0 100644 --- a/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py +++ b/python/semantic_kernel/connectors/memory/usearch/usearch_memory_store.py @@ -1,10 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. import itertools +import logging import os from dataclasses import dataclass from enum import Enum -from logging import Logger from pathlib import Path from typing import Dict, List, Optional, Tuple, Union @@ -24,7 +24,8 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) @dataclass @@ -93,15 +94,12 @@ class _CollectionFileType(Enum): def memoryrecords_to_pyarrow_table(records: List[MemoryRecord]) -> pa.Table: """Convert a list of `MemoryRecord` to a PyArrow Table""" records_pylist = [ - {attr: getattr(record, "_" + attr) for attr in _embeddings_data_schema.names} - for record in records + {attr: getattr(record, "_" + attr) for attr in _embeddings_data_schema.names} for record in records ] return pa.Table.from_pylist(records_pylist, schema=_embeddings_data_schema) -def pyarrow_table_to_memoryrecords( - table: pa.Table, vectors: Optional[ndarray] = None -) -> List[MemoryRecord]: +def pyarrow_table_to_memoryrecords(table: pa.Table, vectors: Optional[ndarray] = None) -> List[MemoryRecord]: """Convert a PyArrow Table to a list of MemoryRecords. Args: @@ -113,9 +111,7 @@ def pyarrow_table_to_memoryrecords( List[MemoryRecord]: List of MemoryRecords constructed from the table. """ result_memory_records = [ - MemoryRecord( - **row.to_dict(), embedding=vectors[index] if vectors is not None else None - ) + MemoryRecord(**row.to_dict(), embedding=vectors[index] if vectors is not None else None) for index, row in table.to_pandas().iterrows() ] @@ -126,36 +122,32 @@ class USearchMemoryStore(MemoryStoreBase): def __init__( self, persist_directory: Optional[os.PathLike] = None, - logger: Optional[Logger] = None, + **kwargs, ) -> None: """ Create a USearchMemoryStore instance. This store helps searching embeddings with USearch, keeping collections in memory. To save collections to disk, provide the `persist_directory` param. - Collections are saved when `close_async` is called. + Collections are saved when `close` is called. - To both save collections and free up memory, call `close_async`. + To both save collections and free up memory, call `close`. When `USearchMemoryStore` is used with a context manager, this will happen automatically. Otherwise, it should be called explicitly. Args: persist_directory (Optional[os.PathLike], default=None): Directory for loading and saving collections. If None, collections are not loaded nor saved. - logger (Optional[Logger], default=None): Logger for diagnostics. If None, a NullLogger is used. """ - self._logger = logger or NullLogger() - self._persist_directory = ( - Path(persist_directory) if persist_directory is not None else None - ) + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + self._persist_directory = Path(persist_directory) if persist_directory is not None else None self._collections: Dict[str, _USearchCollection] = {} if self._persist_directory: self._collections = self._read_collections_from_dir() - def _get_collection_path( - self, collection_name: str, *, file_type: _CollectionFileType - ) -> Path: + def _get_collection_path(self, collection_name: str, *, file_type: _CollectionFileType) -> Path: """ Get the path for the given collection name and file type. @@ -173,11 +165,9 @@ def _get_collection_path( if self._persist_directory is None: raise ValueError("Path of persist directory is not set") - return self._persist_directory / ( - collection_name + _collection_file_extensions[file_type] - ) + return self._persist_directory / (collection_name + _collection_file_extensions[file_type]) - async def create_collection_async( + async def create_collection( self, collection_name: str, ndim: int = 0, @@ -212,9 +202,7 @@ async def create_collection_async( raise ValueError(f"Collection with name {collection_name} already exists.") embeddings_index_path = ( - self._get_collection_path( - collection_name, file_type=_CollectionFileType.USEARCH - ) + self._get_collection_path(collection_name, file_type=_CollectionFileType.USEARCH) if self._persist_directory else None ) @@ -230,15 +218,11 @@ async def create_collection_async( view=view, ) - self._collections[collection_name] = _USearchCollection.create_default( - embeddings_index - ) + self._collections[collection_name] = _USearchCollection.create_default(embeddings_index) return None - def _read_embeddings_table( - self, path: os.PathLike - ) -> Tuple[pa.Table, Dict[str, int]]: + def _read_embeddings_table(self, path: os.PathLike) -> Tuple[pa.Table, Dict[str, int]]: """Read embeddings from the provided path and generate an ID to label mapping. Args: @@ -249,8 +233,7 @@ def _read_embeddings_table( """ embeddings_table = pq.read_table(path, schema=_embeddings_data_schema) embeddings_id_to_label: Dict[str, int] = { - record_id: idx - for idx, record_id in enumerate(embeddings_table.column("id").to_pylist()) + record_id: idx for idx, record_id in enumerate(embeddings_table.column("id").to_pylist()) } return embeddings_table, embeddings_id_to_label @@ -274,19 +257,12 @@ def _read_collections_from_dir(self) -> Dict[str, _USearchCollection]: for collection_name, collection_files in self._get_all_storage_files().items(): expected_storage_files = len(_CollectionFileType) if len(collection_files) != expected_storage_files: - raise ValueError( - f"Expected {expected_storage_files} files for collection {collection_name}" - ) + raise ValueError(f"Expected {expected_storage_files} files for collection {collection_name}") parquet_file, usearch_file = collection_files - if ( - parquet_file.suffix - == _collection_file_extensions[_CollectionFileType.USEARCH] - ): + if parquet_file.suffix == _collection_file_extensions[_CollectionFileType.USEARCH]: parquet_file, usearch_file = usearch_file, parquet_file - embeddings_table, embeddings_id_to_label = self._read_embeddings_table( - parquet_file - ) + embeddings_table, embeddings_id_to_label = self._read_embeddings_table(parquet_file) embeddings_index = self._read_embeddings_index(usearch_file) collections[collection_name] = _USearchCollection( @@ -297,7 +273,7 @@ def _read_collections_from_dir(self) -> Dict[str, _USearchCollection]: return collections - async def get_collections_async(self) -> List[str]: + async def get_collections(self) -> List[str]: """Get list of existing collections. Returns: @@ -305,26 +281,24 @@ async def get_collections_async(self) -> List[str]: """ return list(self._collections.keys()) - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: collection_name = collection_name.lower() collection = self._collections.pop(collection_name, None) if collection: collection.embeddings_index.reset() return None - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: collection_name = collection_name.lower() return collection_name in self._collections - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upsert single MemoryRecord and return its ID.""" collection_name = collection_name.lower() - res = await self.upsert_batch_async( - collection_name=collection_name, records=[record] - ) + res = await self.upsert_batch(collection_name=collection_name, records=[record]) return res[0] - async def upsert_batch_async( + async def upsert_batch( self, collection_name: str, records: List[MemoryRecord], @@ -354,22 +328,16 @@ async def upsert_batch_async( """ collection_name = collection_name.lower() if collection_name not in self._collections: - raise KeyError( - f"Collection {collection_name} does not exist, cannot insert." - ) + raise KeyError(f"Collection {collection_name} does not exist, cannot insert.") ucollection = self._collections[collection_name] all_records_id = [record._id for record in records] # Remove vectors from index remove_labels = [ - ucollection.embeddings_id_to_label[id] - for id in all_records_id - if id in ucollection.embeddings_id_to_label + ucollection.embeddings_id_to_label[id] for id in all_records_id if id in ucollection.embeddings_id_to_label ] - ucollection.embeddings_index.remove( - remove_labels, compact=compact, threads=threads - ) + ucollection.embeddings_index.remove(remove_labels, compact=compact, threads=threads) # Determine label insertion points table_num_rows = ucollection.embeddings_data_table.num_rows @@ -396,7 +364,7 @@ async def upsert_batch_async( return all_records_id - async def get_async( + async def get( self, collection_name: str, key: str, @@ -405,7 +373,7 @@ async def get_async( ) -> MemoryRecord: """Retrieve a single MemoryRecord using its key.""" collection_name = collection_name.lower() - result = await self.get_batch_async( + result = await self.get_batch( collection_name=collection_name, keys=[key], with_embeddings=with_embedding, @@ -415,7 +383,7 @@ async def get_async( raise KeyError(f"Key '{key}' not found in collection '{collection_name}'") return result[0] - async def get_batch_async( + async def get_batch( self, collection_name: str, keys: List[str], @@ -428,36 +396,24 @@ async def get_batch_async( raise KeyError(f"Collection {collection_name} does not exist") ucollection = self._collections[collection_name] - labels = [ - ucollection.embeddings_id_to_label[key] - for key in keys - if key in ucollection.embeddings_id_to_label - ] + labels = [ucollection.embeddings_id_to_label[key] for key in keys if key in ucollection.embeddings_id_to_label] if not labels: return [] - vectors = ( - ucollection.embeddings_index.get_vectors(labels, dtype) - if with_embeddings - else None - ) + vectors = ucollection.embeddings_index.get_vectors(labels, dtype) if with_embeddings else None - return pyarrow_table_to_memoryrecords( - ucollection.embeddings_data_table.take(pa.array(labels)), vectors - ) + return pyarrow_table_to_memoryrecords(ucollection.embeddings_data_table.take(pa.array(labels)), vectors) - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Remove a single MemoryRecord using its key.""" collection_name = collection_name.lower() - await self.remove_batch_async(collection_name=collection_name, keys=[key]) + await self.remove_batch(collection_name=collection_name, keys=[key]) return None - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Remove a batch of MemoryRecords using their keys.""" collection_name = collection_name.lower() if collection_name not in self._collections: - raise KeyError( - f"Collection {collection_name} does not exist, cannot insert." - ) + raise KeyError(f"Collection {collection_name} does not exist, cannot insert.") ucollection = self._collections[collection_name] @@ -468,7 +424,7 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non return None - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -495,7 +451,7 @@ async def get_nearest_match_async( Tuple[MemoryRecord, float]: The nearest matching record and its relevance score. """ collection_name = collection_name.lower() - results = await self.get_nearest_matches_async( + results = await self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, @@ -505,7 +461,7 @@ async def get_nearest_match_async( ) return results[0] - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -558,9 +514,7 @@ async def get_nearest_matches_async( assert isinstance(result, Matches) relevance_score = 1 / (result.distances + 1) - filtered_labels = result.keys[ - np.where(relevance_score >= min_relevance_score)[0] - ] + filtered_labels = result.keys[np.where(relevance_score >= min_relevance_score)[0]] filtered_vectors: Optional[np.ndarray] = None if with_embeddings: @@ -604,27 +558,21 @@ def _get_all_storage_files(self) -> Dict[str, List[Path]]: def _dump_collections(self) -> None: collection_storage_files = self._get_all_storage_files() - for file_path in itertools.chain.from_iterable( - collection_storage_files.values() - ): + for file_path in itertools.chain.from_iterable(collection_storage_files.values()): file_path.unlink() for collection_name, ucollection in self._collections.items(): ucollection.embeddings_index.save( - self._get_collection_path( - collection_name, file_type=_CollectionFileType.USEARCH - ) + self._get_collection_path(collection_name, file_type=_CollectionFileType.USEARCH) ) pq.write_table( ucollection.embeddings_data_table, - self._get_collection_path( - collection_name, file_type=_CollectionFileType.PARQUET - ), + self._get_collection_path(collection_name, file_type=_CollectionFileType.PARQUET), ) return None - async def close_async(self) -> None: + async def close(self) -> None: """Persist collection, clear. Returns: @@ -633,6 +581,6 @@ async def close_async(self) -> None: if self._persist_directory: self._dump_collections() - for collection_name in await self.get_collections_async(): - await self.delete_collection_async(collection_name) + for collection_name in await self.get_collections(): + await self.delete_collection(collection_name) self._collections = {} diff --git a/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py b/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py index 891fe223a0a6..a6b972b17732 100644 --- a/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py +++ b/python/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py @@ -1,9 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio +import logging from dataclasses import dataclass -from logging import Logger -from typing import List, Optional, Tuple +from typing import List, Tuple import numpy as np import weaviate @@ -11,7 +11,8 @@ from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) SCHEMA = { "class": "MemoryRecord", @@ -95,9 +96,7 @@ class FieldMapper: @classmethod def sk_to_weaviate(cls, sk_dict): return { - cls.SK_TO_WEAVIATE_MAPPING.get(k, k): v - for k, v in sk_dict.items() - if k in cls.SK_TO_WEAVIATE_MAPPING + cls.SK_TO_WEAVIATE_MAPPING.get(k, k): v for k, v in sk_dict.items() if k in cls.SK_TO_WEAVIATE_MAPPING } @classmethod @@ -115,8 +114,9 @@ def remove_underscore_prefix(cls, sk_dict): """ return {key.lstrip("_"): value for key, value in sk_dict.items()} - def __init__(self, config: WeaviateConfig, logger: Optional[Logger] = None): - self._logger = logger or NullLogger() + def __init__(self, config: WeaviateConfig, **kwargs): + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") self.config = config self.client = self._initialize_client() @@ -127,38 +127,30 @@ def _initialize_client(self): if self.config.api_key: return weaviate.Client( url=self.config.url, - auth_client_secret=weaviate.auth.AuthApiKey( - api_key=self.config.api_key - ), + auth_client_secret=weaviate.auth.AuthApiKey(api_key=self.config.api_key), ) else: return weaviate.Client(url=self.config.url) else: raise ValueError("Weaviate config must have either url or use_embed set") - async def create_collection_async(self, collection_name: str) -> None: + async def create_collection(self, collection_name: str) -> None: schema = SCHEMA.copy() schema["class"] = collection_name - await asyncio.get_running_loop().run_in_executor( - None, self.client.schema.create_class, schema - ) + await asyncio.get_running_loop().run_in_executor(None, self.client.schema.create_class, schema) - async def get_collections_async(self) -> List[str]: - schemas = await asyncio.get_running_loop().run_in_executor( - None, self.client.schema.get - ) + async def get_collections(self) -> List[str]: + schemas = await asyncio.get_running_loop().run_in_executor(None, self.client.schema.get) return [schema["class"] for schema in schemas["classes"]] - async def delete_collection_async(self, collection_name: str) -> bool: - await asyncio.get_running_loop().run_in_executor( - None, self.client.schema.delete_class, collection_name - ) + async def delete_collection(self, collection_name: str) -> bool: + await asyncio.get_running_loop().run_in_executor(None, self.client.schema.delete_class, collection_name) - async def does_collection_exist_async(self, collection_name: str) -> bool: - collections = await self.get_collections_async() + async def does_collection_exist(self, collection_name: str) -> bool: + collections = await self.get_collections() return collection_name in collections - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: weaviate_record = self.FieldMapper.sk_to_weaviate(vars(record)) vector = weaviate_record.pop("vector", None) @@ -173,18 +165,14 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: vector, ) - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: def _upsert_batch_inner(): results = [] with self.client.batch as batch: for record in records: weaviate_record = self.FieldMapper.sk_to_weaviate(vars(record)) vector = weaviate_record.pop("vector", None) - weaviate_id = weaviate.util.generate_uuid5( - weaviate_record, collection_name - ) + weaviate_id = weaviate.util.generate_uuid5(weaviate_record, collection_name) batch.add_data_object( data_object=weaviate_record, uuid=weaviate_id, @@ -195,39 +183,27 @@ def _upsert_batch_inner(): return results - return await asyncio.get_running_loop().run_in_executor( - None, _upsert_batch_inner - ) + return await asyncio.get_running_loop().run_in_executor(None, _upsert_batch_inner) - async def get_async( - self, collection_name: str, key: str, with_embedding: bool - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: # Call the batched version with a single key - results = await self.get_batch_async(collection_name, [key], with_embedding) + results = await self.get_batch(collection_name, [key], with_embedding) return results[0] if results else None - async def get_batch_async( - self, collection_name: str, keys: List[str], with_embedding: bool - ) -> List[MemoryRecord]: + async def get_batch(self, collection_name: str, keys: List[str], with_embedding: bool) -> List[MemoryRecord]: queries = self._build_multi_get_query(collection_name, keys, with_embedding) - results = await asyncio.get_running_loop().run_in_executor( - None, self.client.query.multi_get(queries).do - ) + results = await asyncio.get_running_loop().run_in_executor(None, self.client.query.multi_get(queries).do) get_dict = results.get("data", {}).get("Get", {}) memory_records = [ - self._convert_weaviate_doc_to_memory_record(doc) - for docs in get_dict.values() - for doc in docs + self._convert_weaviate_doc_to_memory_record(doc) for docs in get_dict.values() for doc in docs ] return memory_records - def _build_multi_get_query( - self, collection_name: str, keys: List[str], with_embedding: bool - ): + def _build_multi_get_query(self, collection_name: str, keys: List[str], with_embedding: bool): queries = [] for i, key in enumerate(keys): query = self.client.query.get(collection_name, ALL_PROPERTIES).with_where( @@ -246,9 +222,7 @@ def _build_multi_get_query( return queries - def _convert_weaviate_doc_to_memory_record( - self, weaviate_doc: dict - ) -> MemoryRecord: + def _convert_weaviate_doc_to_memory_record(self, weaviate_doc: dict) -> MemoryRecord: weaviate_doc_copy = weaviate_doc.copy() vector = weaviate_doc_copy.pop("_additional", {}).get("vector") weaviate_doc_copy["vector"] = np.array(vector) if vector else None @@ -256,10 +230,10 @@ def _convert_weaviate_doc_to_memory_record( mem_vals = self.FieldMapper.remove_underscore_prefix(sk_doc) return MemoryRecord(**mem_vals) - async def remove_async(self, collection_name: str, key: str) -> None: - await self.remove_batch_async(collection_name, [key]) + async def remove(self, collection_name: str, key: str) -> None: + await self.remove_batch(collection_name, [key]) - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: # TODO: Use In operator when it's available # (https://github.com/weaviate/weaviate/issues/2387) # and handle max delete objects @@ -275,7 +249,7 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non None, self.client.batch.delete_objects, collection_name, where ) - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: np.ndarray, @@ -315,14 +289,14 @@ async def get_nearest_matches_async( return memory_records_and_scores - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: np.ndarray, min_relevance_score: float, with_embedding: bool, ) -> Tuple[MemoryRecord, float]: - results = await self.get_nearest_matches_async( + results = await self.get_nearest_matches( collection_name, embedding, limit=1, diff --git a/python/semantic_kernel/connectors/openapi/__init__.py b/python/semantic_kernel/connectors/openapi/__init__.py index 1b4c738ce217..d0880d318f1e 100644 --- a/python/semantic_kernel/connectors/openapi/__init__.py +++ b/python/semantic_kernel/connectors/openapi/__init__.py @@ -1,5 +1,5 @@ -from semantic_kernel.connectors.openapi.sk_openapi import register_openapi_skill +from semantic_kernel.connectors.openapi.kernel_openapi import register_openapi_plugin __all__ = [ - "register_openapi_skill", + "register_openapi_plugin", ] diff --git a/python/semantic_kernel/connectors/openapi/kernel_openapi.py b/python/semantic_kernel/connectors/openapi/kernel_openapi.py new file mode 100644 index 000000000000..43490a778ff6 --- /dev/null +++ b/python/semantic_kernel/connectors/openapi/kernel_openapi.py @@ -0,0 +1,291 @@ +import json +import logging +from typing import Dict, Mapping, Optional, Union +from urllib.parse import urljoin + +import aiohttp +import requests +from openapi_core import Spec, unmarshal_request +from openapi_core.contrib.requests import RequestsOpenAPIRequest +from openapi_core.exceptions import OpenAPIError +from prance import ResolvingParser + +from semantic_kernel import Kernel, KernelContext +from semantic_kernel.connectors.ai.open_ai.const import ( + USER_AGENT, +) +from semantic_kernel.connectors.telemetry import HTTP_USER_AGENT +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter + +logger: logging.Logger = logging.getLogger(__name__) + + +class PreparedRestApiRequest: + def __init__(self, method: str, url: str, params=None, headers=None, request_body=None): + self.method = method + self.url = url + self.params = params + self.headers = headers + self.request_body = request_body + + def __repr__(self): + return ( + "PreparedRestApiRequest(" + f"method={self.method}, " + f"url={self.url}, " + f"params={self.params}, " + f"headers={self.headers}, " + f"request_body={self.request_body})" + ) + + def validate_request(self, spec: Spec, **kwargs): + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + request = requests.Request( + self.method, + self.url, + params=self.params, + headers=self.headers, + json=self.request_body, + ) + openapi_request = RequestsOpenAPIRequest(request=request) + try: + unmarshal_request(openapi_request, spec=spec) + return True + except OpenAPIError as e: + logger.debug(f"Error validating request: {e}", exc_info=True) + return False + + +class RestApiOperation: + def __init__( + self, + id: str, + method: str, + server_url: str, + path: str, + summary: Optional[str] = None, + description: Optional[str] = None, + params: Optional[Mapping[str, str]] = None, + request_body: Optional[Mapping[str, str]] = None, + ): + self.id = id + self.method = method + self.server_url = server_url + self.path = path + self.summary = summary + self.description = description + self.params = params + self.request_body = request_body + + """ + Fills in this RestApiOperation's parameters and payload with the provided values + :param path_params: A dictionary of path parameters + :param query_params: A dictionary of query parameters + :param headers: A dictionary of headers + :param request_body: The payload of the request + :return: A PreparedRestApiRequest object + """ + + def prepare_request( + self, path_params=None, query_params=None, headers=None, request_body=None + ) -> PreparedRestApiRequest: + path = self.path + if path_params: + path = path.format(**path_params) + + url = urljoin(self.server_url, path) + + processed_query_params, processed_headers = {}, headers + for param in self.params: + param_name = param["name"] + param_schema = param["schema"] + param_default = param_schema.get("default", None) + + if param["in"] == "query": + if query_params and param_name in query_params: + processed_query_params[param_name] = query_params[param_name] + elif param["schema"] and "default" in param["schema"] is not None: + processed_query_params[param_name] = param_default + elif param["in"] == "header": + if headers and param_name in headers: + processed_headers[param_name] = headers[param_name] + elif param_default is not None: + processed_headers[param_name] = param_default + elif param["in"] == "path": + if not path_params or param_name not in path_params: + raise ValueError(f"Required path parameter {param_name} not provided") + + processed_payload = None + if self.request_body: + if request_body is None and "required" in self.request_body and self.request_body["required"]: + raise ValueError("Payload is required but was not provided") + content = self.request_body["content"] + content_type = list(content.keys())[0] + processed_headers["Content-Type"] = content_type + processed_payload = request_body + + processed_headers[USER_AGENT] = " ".join((HTTP_USER_AGENT, processed_headers.get(USER_AGENT, ""))).rstrip() + + req = PreparedRestApiRequest( + method=self.method, + url=url, + params=processed_query_params, + headers=processed_headers, + request_body=processed_payload, + ) + return req + + def __repr__(self): + return ( + "RestApiOperation(" + f"id={self.id}, " + f"method={self.method}, " + f"server_url={self.server_url}, " + f"path={self.path}, " + f"params={self.params}, " + f"request_body={self.request_body}, " + f"summary={self.summary}, " + f"description={self.description})" + ) + + +class OpenApiParser: + def __init__(self, **kwargs): + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + + """ + Import an OpenAPI file. + :param openapi_file: The path to the OpenAPI file which can be local or a URL. + :return: The parsed OpenAPI file + """ + + def parse(self, openapi_document): + parser = ResolvingParser(openapi_document) + return parser.specification + + """ + Creates a RestApiOperation object for each path/method combination + :param parsed_document: The parsed OpenAPI document + :return: A dictionary of RestApiOperation objects keyed by operationId + """ + + def create_rest_api_operations(self, parsed_document) -> Dict[str, RestApiOperation]: + paths = parsed_document.get("paths", {}) + request_objects = {} + for path, methods in paths.items(): + for method, details in methods.items(): + server_url = parsed_document.get("servers", []) + server_url = server_url[0].get("url") if server_url else "/" + + request_method = method.lower() + + parameters = details.get("parameters", []) + operationId = details.get("operationId", path + "_" + request_method) + summary = details.get("summary", None) + description = details.get("description", None) + + rest_api_operation = RestApiOperation( + id=operationId, + method=request_method, + server_url=server_url, + path=path, + params=parameters, + request_body=details.get("requestBody", None), + summary=summary, + description=description, + ) + + request_objects[operationId] = rest_api_operation + return request_objects + + +class OpenApiRunner: + def __init__( + self, + parsed_openapi_document: Mapping[str, str], + ): + self.spec = Spec.from_dict(parsed_openapi_document) + + async def run_operation( + self, + operation: RestApiOperation, + path_params: Optional[Dict[str, str]] = None, + query_params: Optional[Dict[str, str]] = None, + headers: Optional[Dict[str, str]] = None, + request_body: Optional[Union[str, Dict[str, str]]] = None, + ) -> aiohttp.ClientResponse: + prepared_request = operation.prepare_request( + path_params=path_params, + query_params=query_params, + headers=headers, + request_body=request_body, + ) + is_valid = prepared_request.validate_request(spec=self.spec) + if not is_valid: + return None + + async with aiohttp.ClientSession(raise_for_status=True) as session: + async with session.request( + prepared_request.method, + prepared_request.url, + params=prepared_request.params, + headers=prepared_request.headers, + json=prepared_request.request_body, + ) as response: + return await response.text() + + +""" +Registers a plugin with the kernel that can run OpenAPI operations. +:param kernel: The kernel to register the plugin with +:param plugin_name: The name of the plugin +:param openapi_document: The OpenAPI document to register. Can be a filename or URL +:return: A dictionary of KernelFunctions keyed by operationId +""" + + +def register_openapi_plugin( + kernel: Kernel, + plugin_name: str, + openapi_document: str, +) -> Dict[str, KernelFunction]: + parser = OpenApiParser() + parsed_doc = parser.parse(openapi_document) + operations = parser.create_rest_api_operations(parsed_doc) + openapi_runner = OpenApiRunner(parsed_openapi_document=parsed_doc) + + plugin = {} + + def create_run_operation_function(runner: OpenApiRunner, operation: RestApiOperation): + @kernel_function( + description=operation.summary if operation.summary else operation.description, + name=operation_id, + ) + @kernel_function_context_parameter(name="path_params", description="A dictionary of path parameters") + @kernel_function_context_parameter(name="query_params", description="A dictionary of query parameters") + @kernel_function_context_parameter(name="headers", description="A dictionary of headers") + @kernel_function_context_parameter(name="request_body", description="A dictionary of the request body") + async def run_openapi_operation(kernel_context: KernelContext) -> str: + path_params = kernel_context.variables.get("path_params") + query_params = kernel_context.variables.get("query_params") + headers = kernel_context.variables.get("headers") + request_body = kernel_context.variables.get("request_body") + + response = await runner.run_operation( + operation, + path_params=json.loads(path_params) if path_params else None, + query_params=json.loads(query_params) if query_params else None, + headers=json.loads(headers) if headers else None, + request_body=json.loads(request_body) if request_body else None, + ) + return response + + return run_openapi_operation + + for operation_id, operation in operations.items(): + logger.info(f"Registering OpenAPI operation: {plugin_name}.{operation_id}") + plugin[operation_id] = create_run_operation_function(openapi_runner, operation) + return kernel.import_plugin(plugin, plugin_name) diff --git a/python/semantic_kernel/connectors/openapi/sk_openapi.py b/python/semantic_kernel/connectors/openapi/sk_openapi.py deleted file mode 100644 index 9c47f878d3a2..000000000000 --- a/python/semantic_kernel/connectors/openapi/sk_openapi.py +++ /dev/null @@ -1,309 +0,0 @@ -import json -import logging -from typing import Dict, Mapping, Optional, Union -from urllib.parse import urljoin - -import aiohttp -import requests -from openapi_core import Spec, unmarshal_request -from openapi_core.contrib.requests import RequestsOpenAPIRequest -from openapi_core.exceptions import OpenAPIError -from prance import ResolvingParser - -from semantic_kernel import Kernel, SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter -from semantic_kernel.utils.null_logger import NullLogger - - -class PreparedRestApiRequest: - def __init__( - self, method: str, url: str, params=None, headers=None, request_body=None - ): - self.method = method - self.url = url - self.params = params - self.headers = headers - self.request_body = request_body - - def __repr__(self): - return ( - "PreparedRestApiRequest(" - f"method={self.method}, " - f"url={self.url}, " - f"params={self.params}, " - f"headers={self.headers}, " - f"request_body={self.request_body})" - ) - - def validate_request(self, spec: Spec, logger: logging.Logger = NullLogger()): - request = requests.Request( - self.method, - self.url, - params=self.params, - headers=self.headers, - json=self.request_body, - ) - openapi_request = RequestsOpenAPIRequest(request=request) - try: - unmarshal_request(openapi_request, spec=spec) - return True - except OpenAPIError as e: - logger.debug(f"Error validating request: {e}", exc_info=True) - return False - - -class RestApiOperation: - def __init__( - self, - id: str, - method: str, - server_url: str, - path: str, - summary: Optional[str] = None, - description: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, - request_body: Optional[Mapping[str, str]] = None, - ): - self.id = id - self.method = method - self.server_url = server_url - self.path = path - self.summary = summary - self.description = description - self.params = params - self.request_body = request_body - - """ - Fills in this RestApiOperation's parameters and payload with the provided values - :param path_params: A dictionary of path parameters - :param query_params: A dictionary of query parameters - :param headers: A dictionary of headers - :param request_body: The payload of the request - :return: A PreparedRestApiRequest object - """ - - def prepare_request( - self, path_params=None, query_params=None, headers=None, request_body=None - ) -> PreparedRestApiRequest: - path = self.path - if path_params: - path = path.format(**path_params) - - url = urljoin(self.server_url, path) - - processed_query_params, processed_headers = {}, {} - for param in self.params: - param_name = param["name"] - param_schema = param["schema"] - param_default = param_schema.get("default", None) - - if param["in"] == "query": - if query_params and param_name in query_params: - processed_query_params[param_name] = query_params[param_name] - elif param["schema"] and "default" in param["schema"] is not None: - processed_query_params[param_name] = param_default - elif param["in"] == "header": - if headers and param_name in headers: - processed_headers[param_name] = headers[param_name] - elif param_default is not None: - processed_headers[param_name] = param_default - elif param["in"] == "path": - if not path_params or param_name not in path_params: - raise ValueError( - f"Required path parameter {param_name} not provided" - ) - - processed_payload = None - if self.request_body: - if ( - request_body is None - and "required" in self.request_body - and self.request_body["required"] - ): - raise ValueError("Payload is required but was not provided") - content = self.request_body["content"] - content_type = list(content.keys())[0] - processed_headers["Content-Type"] = content_type - processed_payload = request_body - - req = PreparedRestApiRequest( - method=self.method, - url=url, - params=processed_query_params, - headers=processed_headers, - request_body=processed_payload, - ) - return req - - def __repr__(self): - return ( - "RestApiOperation(" - f"id={self.id}, " - f"method={self.method}, " - f"server_url={self.server_url}, " - f"path={self.path}, " - f"params={self.params}, " - f"request_body={self.request_body}, " - f"summary={self.summary}, " - f"description={self.description})" - ) - - -class OpenApiParser: - def __init__(self, logger: logging.Logger = NullLogger()): - self.logger = logger - - """ - Import an OpenAPI file. - :param openapi_file: The path to the OpenAPI file which can be local or a URL. - :return: The parsed OpenAPI file - """ - - def parse(self, openapi_document): - parser = ResolvingParser(openapi_document) - return parser.specification - - """ - Creates a RestApiOperation object for each path/method combination - :param parsed_document: The parsed OpenAPI document - :return: A dictionary of RestApiOperation objects keyed by operationId - """ - - def create_rest_api_operations( - self, parsed_document - ) -> Dict[str, RestApiOperation]: - paths = parsed_document.get("paths", {}) - request_objects = {} - for path, methods in paths.items(): - for method, details in methods.items(): - server_url = parsed_document.get("servers", []) - server_url = server_url[0].get("url") if server_url else "/" - - request_method = method.lower() - - parameters = details.get("parameters", []) - operationId = details.get("operationId", path + "_" + request_method) - summary = details.get("summary", None) - description = details.get("description", None) - - rest_api_operation = RestApiOperation( - id=operationId, - method=request_method, - server_url=server_url, - path=path, - params=parameters, - request_body=details.get("requestBody", None), - summary=summary, - description=description, - ) - - request_objects[operationId] = rest_api_operation - return request_objects - - -class OpenApiRunner: - def __init__( - self, - parsed_openapi_document: Mapping[str, str], - logger: logging.Logger = NullLogger(), - ): - self.logger = logger - self.spec = Spec.from_dict(parsed_openapi_document) - - async def run_operation( - self, - operation: RestApiOperation, - path_params: Optional[Dict[str, str]] = None, - query_params: Optional[Dict[str, str]] = None, - headers: Optional[Dict[str, str]] = None, - request_body: Optional[Union[str, Dict[str, str]]] = None, - ) -> aiohttp.ClientResponse: - prepared_request = operation.prepare_request( - path_params=path_params, - query_params=query_params, - headers=headers, - request_body=request_body, - ) - is_valid = prepared_request.validate_request(spec=self.spec, logger=self.logger) - if not is_valid: - return None - - async with aiohttp.ClientSession(raise_for_status=True) as session: - async with session.request( - prepared_request.method, - prepared_request.url, - params=prepared_request.params, - headers=prepared_request.headers, - json=prepared_request.request_body, - ) as response: - return await response.text() - - -""" -Registers a skill with the kernel that can run OpenAPI operations. -:param kernel: The kernel to register the skill with -:param skill_name: The name of the skill -:param openapi_document: The OpenAPI document to register. Can be a filename or URL -:return: A dictionary of SKFunctions keyed by operationId -""" - - -def register_openapi_skill( - kernel: Kernel, - skill_name: str, - openapi_document: str, -) -> Dict[str, SKFunctionBase]: - parser = OpenApiParser(logger=kernel.logger) - parsed_doc = parser.parse(openapi_document) - operations = parser.create_rest_api_operations(parsed_doc) - openapi_runner = OpenApiRunner( - parsed_openapi_document=parsed_doc, logger=kernel.logger - ) - - skill = {} - - def create_run_operation_function( - runner: OpenApiRunner, operation: RestApiOperation - ): - @sk_function( - description=operation.summary - if operation.summary - else operation.description, - name=operation_id, - ) - @sk_function_context_parameter( - name="path_params", description="A dictionary of path parameters" - ) - @sk_function_context_parameter( - name="query_params", description="A dictionary of query parameters" - ) - @sk_function_context_parameter( - name="headers", description="A dictionary of headers" - ) - @sk_function_context_parameter( - name="request_body", description="A dictionary of the request body" - ) - async def run_openapi_operation(sk_context: SKContext) -> str: - path_params = sk_context.variables.get("path_params") - query_params = sk_context.variables.get("query_params") - headers = sk_context.variables.get("headers") - request_body = sk_context.variables.get("request_body") - - response = await runner.run_operation( - operation, - path_params=json.loads(path_params) if path_params else None, - query_params=json.loads(query_params) if query_params else None, - headers=json.loads(headers) if headers else None, - request_body=json.loads(request_body) if request_body else None, - ) - return response - - return run_openapi_operation - - for operation_id, operation in operations.items(): - kernel.logger.info( - f"Registering OpenAPI operation: {skill_name}.{operation_id}" - ) - skill[operation_id] = create_run_operation_function(openapi_runner, operation) - return kernel.import_skill(skill, skill_name) diff --git a/python/semantic_kernel/connectors/search_engine/bing_connector.py b/python/semantic_kernel/connectors/search_engine/bing_connector.py index 2e03a03f8c67..f83f8cefffeb 100644 --- a/python/semantic_kernel/connectors/search_engine/bing_connector.py +++ b/python/semantic_kernel/connectors/search_engine/bing_connector.py @@ -1,13 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. +import logging import urllib -from logging import Logger -from typing import List, Optional +from typing import List import aiohttp from semantic_kernel.connectors.search_engine.connector import ConnectorBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) class BingConnector(ConnectorBase): @@ -17,18 +18,15 @@ class BingConnector(ConnectorBase): _api_key: str - def __init__(self, api_key: str, logger: Optional[Logger] = None) -> None: + def __init__(self, api_key: str, **kwargs) -> None: + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") self._api_key = api_key - self._logger = logger if logger else NullLogger() if not self._api_key: - raise ValueError( - "Bing API key cannot be null. Please set environment variable BING_API_KEY." - ) + raise ValueError("Bing API key cannot be null. Please set environment variable BING_API_KEY.") - async def search_async( - self, query: str, num_results: str, offset: str - ) -> List[str]: + async def search(self, query: str, num_results: str, offset: str) -> List[str]: """ Returns the search results of the query provided by pinging the Bing web search API. Returns `num_results` results and ignores the first `offset`. @@ -57,7 +55,7 @@ async def search_async( if offset < 0: raise ValueError("offset must be greater than 0.") - self._logger.info( + logger.info( f"Received request for bing web search with \ params:\nquery: {query}\nnum_results: {num_results}\noffset: {offset}" ) @@ -65,20 +63,18 @@ async def search_async( _base_url = "https://api.bing.microsoft.com/v7.0/search" _request_url = f"{_base_url}?q={urllib.parse.quote_plus(query)}&count={num_results}&offset={offset}" - self._logger.info(f"Sending GET request to {_request_url}") + logger.info(f"Sending GET request to {_request_url}") headers = {"Ocp-Apim-Subscription-Key": self._api_key} async with aiohttp.ClientSession() as session: - async with session.get( - _request_url, headers=headers, raise_for_status=True - ) as response: + async with session.get(_request_url, headers=headers, raise_for_status=True) as response: if response.status == 200: data = await response.json() pages = data["webPages"]["value"] - self._logger.info(pages) + logger.info(pages) result = list(map(lambda x: x["snippet"], pages)) - self._logger.info(result) + logger.info(result) return result else: return [] diff --git a/python/semantic_kernel/connectors/search_engine/connector.py b/python/semantic_kernel/connectors/search_engine/connector.py index 16d34b245f6c..d779b4a2da14 100644 --- a/python/semantic_kernel/connectors/search_engine/connector.py +++ b/python/semantic_kernel/connectors/search_engine/connector.py @@ -8,5 +8,5 @@ class ConnectorBase: Base class for search engine connectors """ - def search_async(self, query: str, num_results: str, offset: str) -> List[str]: + def search(self, query: str, num_results: str, offset: str) -> List[str]: pass diff --git a/python/semantic_kernel/connectors/search_engine/google_connector.py b/python/semantic_kernel/connectors/search_engine/google_connector.py index 771df6622acd..0e1abd73ea55 100644 --- a/python/semantic_kernel/connectors/search_engine/google_connector.py +++ b/python/semantic_kernel/connectors/search_engine/google_connector.py @@ -1,13 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. +import logging import urllib -from logging import Logger -from typing import List, Optional +from typing import List import aiohttp from semantic_kernel.connectors.search_engine.connector import ConnectorBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) class GoogleConnector(ConnectorBase): @@ -17,14 +18,12 @@ class GoogleConnector(ConnectorBase): _api_key: str _search_engine_id: str - _logger: Logger - def __init__( - self, api_key: str, search_engine_id: str, logger: Optional[Logger] = None - ) -> None: + def __init__(self, api_key: str, search_engine_id: str, **kwargs) -> None: + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") self._api_key = api_key self._search_engine_id = search_engine_id - self._logger = logger if logger else NullLogger() if not self._api_key: raise ValueError("Google Custom Search API key cannot be null.") @@ -32,9 +31,7 @@ def __init__( if not self._search_engine_id: raise ValueError("Google search engine ID cannot be null.") - async def search_async( - self, query: str, num_results: str, offset: str - ) -> List[str]: + async def search(self, query: str, num_results: str, offset: str) -> List[str]: """ Returns the search results of the query provided by pinging the Google Custom search API. Returns `num_results` results and ignores the first `offset`. @@ -63,7 +60,7 @@ async def search_async( if offset < 0: raise ValueError("offset must be greater than 0.") - self._logger.info( + logger.info( f"Received request for google search with \ params:\nquery: {query}\nnum_results: {num_results}\noffset: {offset}" ) @@ -75,19 +72,17 @@ async def search_async( f"&num={num_results}&start={offset}" ) - self._logger.info("Sending GET request to Google Search API.") + logger.info("Sending GET request to Google Search API.") async with aiohttp.ClientSession() as session: async with session.get(_request_url, raise_for_status=True) as response: if response.status == 200: data = await response.json() - self._logger.info("Request successful.") - self._logger.info(f"API Response: {data}") + logger.info("Request successful.") + logger.info(f"API Response: {data}") items = data["items"] result = [x["snippet"] for x in items] return result else: - self._logger.error( - f"Request to Google Search API failed with status code: {response.status}." - ) + logger.error(f"Request to Google Search API failed with status code: {response.status}.") return [] diff --git a/python/semantic_kernel/connectors/telemetry.py b/python/semantic_kernel/connectors/telemetry.py new file mode 100644 index 000000000000..8ac831d4ff37 --- /dev/null +++ b/python/semantic_kernel/connectors/telemetry.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +from importlib.metadata import version + +TELEMETRY_DISABLED_ENV_VAR = "AZURE_TELEMETRY_DISABLED" + +IS_TELEMETRY_ENABLED = os.environ.get(TELEMETRY_DISABLED_ENV_VAR, "false").lower() not in ["true", "1"] + +HTTP_USER_AGENT = "Semantic-Kernel" + +APP_INFO = ( + { + "name": HTTP_USER_AGENT, + "version": version("semantic-kernel"), + "url": "", + } + if IS_TELEMETRY_ENABLED + else None +) diff --git a/python/semantic_kernel/core_plugins/__init__.py b/python/semantic_kernel/core_plugins/__init__.py new file mode 100644 index 000000000000..11456e06c362 --- /dev/null +++ b/python/semantic_kernel/core_plugins/__init__.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.core_plugins.conversation_summary_plugin import ( + ConversationSummaryPlugin, +) +from semantic_kernel.core_plugins.file_io_plugin import FileIOPlugin +from semantic_kernel.core_plugins.http_plugin import HttpPlugin +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.text_memory_plugin import TextMemoryPlugin +from semantic_kernel.core_plugins.text_plugin import TextPlugin +from semantic_kernel.core_plugins.time_plugin import TimePlugin +from semantic_kernel.core_plugins.web_search_engine_plugin import WebSearchEnginePlugin + +__all__ = [ + "TextMemoryPlugin", + "TextPlugin", + "FileIOPlugin", + "TimePlugin", + "HttpPlugin", + "ConversationSummaryPlugin", + "MathPlugin", + "WebSearchEnginePlugin", +] diff --git a/python/semantic_kernel/core_plugins/conversation_summary_plugin.py b/python/semantic_kernel/core_plugins/conversation_summary_plugin.py new file mode 100644 index 000000000000..0329fd843425 --- /dev/null +++ b/python/semantic_kernel/core_plugins/conversation_summary_plugin.py @@ -0,0 +1,60 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from semantic_kernel.kernel import Kernel + from semantic_kernel.orchestration.kernel_context import KernelContext + + +class ConversationSummaryPlugin: + """ + Semantic plugin that enables conversations summarization. + """ + + from semantic_kernel.plugin_definition import kernel_function + + # The max tokens to process in a single semantic function call. + _max_tokens = 1024 + + _summarize_conversation_prompt_template = ( + "BEGIN CONTENT TO SUMMARIZE:\n{{" + + "$INPUT" + + "}}\nEND CONTENT TO SUMMARIZE.\nSummarize the conversation in 'CONTENT TO" + " SUMMARIZE', identifying main points of discussion and any" + " conclusions that were reached.\nDo not incorporate other general" + " knowledge.\nSummary is in plain text, in complete sentences, with no markup" + " or tags.\n\nBEGIN SUMMARY:\n" + ) + + def __init__(self, kernel: "Kernel"): + self._summarizeConversationFunction = kernel.create_semantic_function( + ConversationSummaryPlugin._summarize_conversation_prompt_template, + plugin_name=ConversationSummaryPlugin.__name__, + description=("Given a section of a conversation transcript, summarize the part of" " the conversation."), + max_tokens=ConversationSummaryPlugin._max_tokens, + temperature=0.1, + top_p=0.5, + ) + + @kernel_function( + description="Given a long conversation transcript, summarize the conversation.", + name="SummarizeConversation", + input_description="A long conversation transcript.", + ) + async def summarize_conversation(self, input: str, context: "KernelContext") -> "KernelContext": + """ + Given a long conversation transcript, summarize the conversation. + + :param input: A long conversation transcript. + :param context: The KernelContext for function execution. + :return: KernelContext with the summarized conversation result. + """ + from semantic_kernel.text import text_chunker + from semantic_kernel.text.function_extension import ( + aggregate_chunked_results, + ) + + lines = text_chunker._split_text_lines(input, ConversationSummaryPlugin._max_tokens, True) + paragraphs = text_chunker._split_text_paragraph(lines, ConversationSummaryPlugin._max_tokens) + + return await aggregate_chunked_results(self._summarizeConversationFunction, paragraphs, context) diff --git a/python/semantic_kernel/core_plugins/file_io_plugin.py b/python/semantic_kernel/core_plugins/file_io_plugin.py new file mode 100644 index 000000000000..b66e2bc7b15d --- /dev/null +++ b/python/semantic_kernel/core_plugins/file_io_plugin.py @@ -0,0 +1,75 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +import typing as t + +import aiofiles + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter + +if t.TYPE_CHECKING: + from semantic_kernel.orchestration.kernel_context import KernelContext + + +class FileIOPlugin(KernelBaseModel): + """ + Description: Read and write from a file. + + Usage: + kernel.import_plugin(FileIOPlugin(), plugin_name="file") + + Examples: + + {{file.readAsync $path }} => "hello world" + {{file.writeAsync}} + """ + + @kernel_function( + description="Read a file", + name="readAsync", + input_description="Path of the source file", + ) + async def read(self, path: str) -> str: + """ + Read a file + + Example: + {{file.readAsync $path }} => "hello world" + Args: + path -- The path to the file to read + + Returns: + The contents of the file + """ + + assert os.path.exists(path), f"File {path} does not exist" + + async with aiofiles.open(path, "r", encoding="UTF-8") as fp: + content = await fp.read() + return content + + @kernel_function( + description="Write a file", + name="writeAsync", + ) + @kernel_function_context_parameter(name="path", description="Destination path") + @kernel_function_context_parameter(name="content", description="File content") + async def write(self, context: "KernelContext") -> None: + """ + Write a file + + Example: + {{file.writeAsync}} + Args: + Contains the 'path' for the Destination file and + the 'content' of the file to write. + """ + path = context.variables.get("path") + content = context.variables.get("content") + + assert path, "Path is required" + assert content, "Content is required" + + async with aiofiles.open(path, "w") as fp: + await fp.write(content) diff --git a/python/semantic_kernel/core_plugins/http_plugin.py b/python/semantic_kernel/core_plugins/http_plugin.py new file mode 100644 index 000000000000..45f5c95b7c3e --- /dev/null +++ b/python/semantic_kernel/core_plugins/http_plugin.py @@ -0,0 +1,106 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +import typing as t + +import aiohttp + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter + +if t.TYPE_CHECKING: + from semantic_kernel.orchestration.kernel_context import KernelContext + + +class HttpPlugin(KernelBaseModel): + """ + A plugin that provides HTTP functionality. + + Usage: + kernel.import_plugin(HttpPlugin(), "http") + + Examples: + + {{http.getAsync $url}} + {{http.postAsync $url}} + {{http.putAsync $url}} + {{http.deleteAsync $url}} + """ + + @kernel_function(description="Makes a GET request to a uri", name="getAsync") + async def get(self, url: str) -> str: + """ + Sends an HTTP GET request to the specified URI and returns + the response body as a string. + params: + uri: The URI to send the request to. + returns: + The response body as a string. + """ + if not url: + raise ValueError("url cannot be `None` or empty") + + async with aiohttp.ClientSession() as session: + async with session.get(url, raise_for_status=True) as response: + return await response.text() + + @kernel_function(description="Makes a POST request to a uri", name="postAsync") + @kernel_function_context_parameter(name="body", description="The body of the request") + async def post(self, url: str, context: "KernelContext") -> str: + """ + Sends an HTTP POST request to the specified URI and returns + the response body as a string. + params: + url: The URI to send the request to. + context: Contains the body of the request + returns: + The response body as a string. + """ + if not url: + raise ValueError("url cannot be `None` or empty") + + body = context.variables.get("body") + + headers = {"Content-Type": "application/json"} + data = json.dumps(body) + async with aiohttp.ClientSession() as session: + async with session.post(url, headers=headers, data=data, raise_for_status=True) as response: + return await response.text() + + @kernel_function(description="Makes a PUT request to a uri", name="putAsync") + @kernel_function_context_parameter(name="body", description="The body of the request") + async def put(self, url: str, context: "KernelContext") -> str: + """ + Sends an HTTP PUT request to the specified URI and returns + the response body as a string. + params: + url: The URI to send the request to. + returns: + The response body as a string. + """ + if not url: + raise ValueError("url cannot be `None` or empty") + + body = context.variables.get("body") + + headers = {"Content-Type": "application/json"} + data = json.dumps(body) + async with aiohttp.ClientSession() as session: + async with session.put(url, headers=headers, data=data, raise_for_status=True) as response: + return await response.text() + + @kernel_function(description="Makes a DELETE request to a uri", name="deleteAsync") + async def delete(self, url: str) -> str: + """ + Sends an HTTP DELETE request to the specified URI and returns + the response body as a string. + params: + uri: The URI to send the request to. + returns: + The response body as a string. + """ + if not url: + raise ValueError("url cannot be `None` or empty") + async with aiohttp.ClientSession() as session: + async with session.delete(url, raise_for_status=True) as response: + return await response.text() diff --git a/python/semantic_kernel/core_plugins/math_plugin.py b/python/semantic_kernel/core_plugins/math_plugin.py new file mode 100644 index 000000000000..9d42f2be1c81 --- /dev/null +++ b/python/semantic_kernel/core_plugins/math_plugin.py @@ -0,0 +1,89 @@ +# Copyright (c) Microsoft. All rights reserved. +import typing as t + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter + +if t.TYPE_CHECKING: + from semantic_kernel.orchestration.kernel_context import KernelContext + + +class MathPlugin(KernelBaseModel): + """ + Description: MathPlugin provides a set of functions to make Math calculations. + + Usage: + kernel.import_plugin(MathPlugin(), plugin_name="math") + + Examples: + {{math.Add}} => Returns the sum of initial_value_text and Amount (provided in the KernelContext) + """ + + @kernel_function( + description="Adds value to a value", + name="Add", + input_description="The value to add", + ) + @kernel_function_context_parameter( + name="Amount", + description="Amount to add", + type="number", + required=True, + ) + def add(self, initial_value_text: str, context: "KernelContext") -> str: + """ + Returns the Addition result of initial and amount values provided. + + :param initial_value_text: Initial value as string to add the specified amount + :param context: Contains the context to get the numbers from + :return: The resulting sum as a string + """ + return MathPlugin.add_or_subtract(initial_value_text, context, add=True) + + @kernel_function( + description="Subtracts value to a value", + name="Subtract", + input_description="The value to subtract", + ) + @kernel_function_context_parameter( + name="Amount", + description="Amount to subtract", + type="number", + required=True, + ) + def subtract(self, initial_value_text: str, context: "KernelContext") -> str: + """ + Returns the difference of numbers provided. + + :param initial_value_text: Initial value as string to subtract the specified amount + :param context: Contains the context to get the numbers from + :return: The resulting subtraction as a string + """ + return MathPlugin.add_or_subtract(initial_value_text, context, add=False) + + @staticmethod + def add_or_subtract(initial_value_text: str, context: "KernelContext", add: bool) -> str: + """ + Helper function to perform addition or subtraction based on the add flag. + + :param initial_value_text: Initial value as string to add or subtract the specified amount + :param context: Contains the context to get the numbers from + :param add: If True, performs addition, otherwise performs subtraction + :return: The resulting sum or subtraction as a string + """ + try: + initial_value = int(initial_value_text) + except ValueError: + raise ValueError(f"Initial value provided is not in numeric format: {initial_value_text}") + + context_amount = context["Amount"] + if context_amount is not None: + try: + amount = int(context_amount) + except ValueError: + raise ValueError("Context amount provided is not in numeric format:" f" {context_amount}") + + result = initial_value + amount if add else initial_value - amount + return str(result) + else: + raise ValueError("Context amount should not be None.") diff --git a/python/semantic_kernel/core_plugins/text_memory_plugin.py b/python/semantic_kernel/core_plugins/text_memory_plugin.py new file mode 100644 index 000000000000..263dbd2d379b --- /dev/null +++ b/python/semantic_kernel/core_plugins/text_memory_plugin.py @@ -0,0 +1,135 @@ +# Copyright (c) Microsoft. All rights reserved. +import json +import logging +import typing as t +from typing import ClassVar + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter + +if t.TYPE_CHECKING: + from semantic_kernel.orchestration.kernel_context import KernelContext + +logger: logging.Logger = logging.getLogger(__name__) + + +class TextMemoryPlugin(KernelBaseModel): + COLLECTION_PARAM: ClassVar[str] = "collection" + RELEVANCE_PARAM: ClassVar[str] = "relevance" + KEY_PARAM: ClassVar[str] = "key" + LIMIT_PARAM: ClassVar[str] = "limit" + DEFAULT_COLLECTION: ClassVar[str] = "generic" + DEFAULT_RELEVANCE: ClassVar[float] = "0.75" + DEFAULT_LIMIT: ClassVar[int] = "1" + + # @staticmethod + @kernel_function( + description="Recall a fact from the long term memory", + name="recall", + input_description="The information to retrieve", + ) + @kernel_function_context_parameter( + name=COLLECTION_PARAM, + description="The collection to search for information", + default_value=DEFAULT_COLLECTION, + ) + @kernel_function_context_parameter( + name=RELEVANCE_PARAM, + description="The relevance score, from 0.0 to 1.0; 1.0 means perfect match", + default_value=DEFAULT_RELEVANCE, + ) + @kernel_function_context_parameter( + name=LIMIT_PARAM, + description="The maximum number of relevant memories to recall.", + default_value=DEFAULT_LIMIT, + ) + async def recall(self, ask: str, context: "KernelContext") -> str: + """ + Recall a fact from the long term memory. + + Example: + context["input"] = "what is the capital of France?" + {{memory.recall $input}} => "Paris" + + Args: + ask -- The question to ask the memory + context -- Contains the 'collection' to search for information + , the 'relevance' score to use when searching + and the 'limit' of relevant memories to retrieve. + + Returns: + The nearest item from the memory store as a string or empty string if not found. + """ + + if context.variables is None: + raise ValueError("The context doesn't have the variables required to know how to recall memory") + if context.memory is None: + raise ValueError("The context doesn't have a memory instance to search") + + collection = context.variables.get(TextMemoryPlugin.COLLECTION_PARAM, TextMemoryPlugin.DEFAULT_COLLECTION) + if not collection: + raise ValueError("Memory collection not defined for TextMemoryPlugin") + + relevance = context.variables.get(TextMemoryPlugin.RELEVANCE_PARAM, TextMemoryPlugin.DEFAULT_RELEVANCE) + if not relevance: + raise ValueError("Relevance value not defined for TextMemoryPlugin") + + limit = context.variables.get(TextMemoryPlugin.LIMIT_PARAM, TextMemoryPlugin.DEFAULT_LIMIT) + if limit is None or str(limit).strip() == "": + raise ValueError("Limit value not defined for TextMemoryPlugin") + + results = await context.memory.search( + collection=collection, + query=ask, + limit=int(limit), + min_relevance_score=float(relevance), + ) + if results is None or len(results) == 0: + logger.warning(f"Memory not found in collection: {collection}") + return "" + + return results[0].text if limit == 1 else json.dumps([r.text for r in results]) + + @kernel_function( + description="Save information to semantic memory", + name="save", + input_description="The information to save", + ) + @kernel_function_context_parameter( + name=COLLECTION_PARAM, + description="The collection to save the information", + default_value=DEFAULT_COLLECTION, + ) + @kernel_function_context_parameter( + name=KEY_PARAM, + description="The unique key to associate with the information", + ) + async def save(self, text: str, context: "KernelContext") -> None: + """ + Save a fact to the long term memory. + + Example: + context["input"] = "the capital of France is Paris" + context[TextMemoryPlugin.KEY_PARAM] = "countryInfo1" + {{memory.save $input}} + + Args: + text -- The text to save to the memory + context -- Contains the 'collection' to save the information + and unique 'key' to associate with the information + """ + + if context.variables is None: + raise ValueError("The context doesn't have the variables required to know how to recall memory") + if context.memory is None: + raise ValueError("The context doesn't have a memory instance to search") + + collection = context.variables.get(TextMemoryPlugin.COLLECTION_PARAM, TextMemoryPlugin.DEFAULT_COLLECTION) + if not collection: + raise ValueError("Memory collection not defined for TextMemoryPlugin") + + key = context.variables.get(TextMemoryPlugin.KEY_PARAM, None) + if not key: + raise ValueError("Memory key not defined for TextMemoryPlugin") + + await context.memory.save_information(collection, text=text, id=key) diff --git a/python/semantic_kernel/core_plugins/text_plugin.py b/python/semantic_kernel/core_plugins/text_plugin.py new file mode 100644 index 000000000000..8ff12b370a6c --- /dev/null +++ b/python/semantic_kernel/core_plugins/text_plugin.py @@ -0,0 +1,84 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition import kernel_function + + +class TextPlugin(KernelBaseModel): + """ + TextPlugin provides a set of functions to manipulate strings. + + Usage: + kernel.import_plugin(TextPlugin(), plugin_name="text") + + Examples: + KernelContext["input"] = " hello world " + {{text.trim $input}} => "hello world" + + KernelContext["input"] = " hello world " + {{text.trimStart $input} => "hello world " + + KernelContext["input"] = " hello world " + {{text.trimEnd $input} => " hello world" + + KernelContext["input"] = "hello world" + {{text.uppercase $input}} => "HELLO WORLD" + + KernelContext["input"] = "HELLO WORLD" + {{text.lowercase $input}} => "hello world" + """ + + @kernel_function(description="Trim whitespace from the start and end of a string.") + def trim(self, text: str) -> str: + """ + Trim whitespace from the start and end of a string. + + Example: + KernelContext["input"] = " hello world " + {{text.trim $input}} => "hello world" + """ + return text.strip() + + @kernel_function(description="Trim whitespace from the start of a string.") + def trim_start(self, text: str) -> str: + """ + Trim whitespace from the start of a string. + + Example: + KernelContext["input"] = " hello world " + {{text.trim $input}} => "hello world " + """ + return text.lstrip() + + @kernel_function(description="Trim whitespace from the end of a string.") + def trim_end(self, text: str) -> str: + """ + Trim whitespace from the end of a string. + + Example: + KernelContext["input"] = " hello world " + {{text.trim $input}} => " hello world" + """ + return text.rstrip() + + @kernel_function(description="Convert a string to uppercase.") + def uppercase(self, text: str) -> str: + """ + Convert a string to uppercase. + + Example: + KernelContext["input"] = "hello world" + {{text.uppercase $input}} => "HELLO WORLD" + """ + return text.upper() + + @kernel_function(description="Convert a string to lowercase.") + def lowercase(self, text: str) -> str: + """ + Convert a string to lowercase. + + Example: + KernelContext["input"] = "HELLO WORLD" + {{text.lowercase $input}} => "hello world" + """ + return text.lower() diff --git a/python/semantic_kernel/core_plugins/time_plugin.py b/python/semantic_kernel/core_plugins/time_plugin.py new file mode 100644 index 000000000000..7f5fa81d6c3c --- /dev/null +++ b/python/semantic_kernel/core_plugins/time_plugin.py @@ -0,0 +1,267 @@ +# Copyright (c) Microsoft. All rights reserved. + +import datetime + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition import kernel_function + + +class TimePlugin(KernelBaseModel): + """ + Description: TimePlugin provides a set of functions + to get the current time and date. + + Usage: + kernel.import_plugin(TimePlugin(), plugin_name="time") + + Examples: + {{time.date}} => Sunday, 12 January, 2031 + {{time.today}} => Sunday, 12 January, 2031 + {{time.iso_date}} => 2031-01-12 + {{time.now}} => Sunday, January 12, 2031 9:15 PM + {{time.utcNow}} => Sunday, January 13, 2031 5:15 AM + {{time.time}} => 09:15:07 PM + {{time.year}} => 2031 + {{time.month}} => January + {{time.monthNumber}} => 01 + {{time.day}} => 12 + {{time.dayOfWeek}} => Sunday + {{time.hour}} => 9 PM + {{time.hourNumber}} => 21 + {{time.days_ago $days}} => Sunday, 7 May, 2023 + {{time.last_matching_day $dayName}} => Sunday, 7 May, 2023 + {{time.minute}} => 15 + {{time.minutes}} => 15 + {{time.second}} => 7 + {{time.seconds}} => 7 + {{time.timeZoneOffset}} => -0800 + {{time.timeZoneName}} => PST + """ + + @kernel_function(description="Get the current date.") + def date(self) -> str: + """ + Get the current date + + Example: + {{time.date}} => Sunday, 12 January, 2031 + """ + now = datetime.datetime.now() + return now.strftime("%A, %d %B, %Y") + + @kernel_function(description="Get the current date.") + def today(self) -> str: + """ + Get the current date + + Example: + {{time.today}} => Sunday, 12 January, 2031 + """ + return self.date() + + @kernel_function(description="Get the current date in iso format.") + def iso_date(self) -> str: + """ + Get the current date in iso format + + Example: + {{time.iso_date}} => 2031-01-12 + """ + today = datetime.date.today() + return today.isoformat() + + @kernel_function(description="Get the current date and time in the local time zone") + def now(self) -> str: + """ + Get the current date and time in the local time zone" + + Example: + {{time.now}} => Sunday, January 12, 2031 9:15 PM + """ + now = datetime.datetime.now() + return now.strftime("%A, %B %d, %Y %I:%M %p") + + @kernel_function(description="Get the current date and time in UTC", name="utcNow") + def utc_now(self) -> str: + """ + Get the current date and time in UTC + + Example: + {{time.utcNow}} => Sunday, January 13, 2031 5:15 AM + """ + now = datetime.datetime.utcnow() + return now.strftime("%A, %B %d, %Y %I:%M %p") + + @kernel_function(description="Get the current time in the local time zone") + def time(self) -> str: + """ + Get the current time in the local time zone + + Example: + {{time.time}} => 09:15:07 PM + """ + now = datetime.datetime.now() + return now.strftime("%I:%M:%S %p") + + @kernel_function(description="Get the current year") + def year(self) -> str: + """ + Get the current year + + Example: + {{time.year}} => 2031 + """ + now = datetime.datetime.now() + return now.strftime("%Y") + + @kernel_function(description="Get the current month") + def month(self) -> str: + """ + Get the current month + + Example: + {{time.month}} => January + """ + now = datetime.datetime.now() + return now.strftime("%B") + + @kernel_function(description="Get the current month number") + def month_number(self) -> str: + """ + Get the current month number + + Example: + {{time.monthNumber}} => 01 + """ + now = datetime.datetime.now() + return now.strftime("%m") + + @kernel_function(description="Get the current day") + def day(self) -> str: + """ + Get the current day of the month + + Example: + {{time.day}} => 12 + """ + now = datetime.datetime.now() + return now.strftime("%d") + + @kernel_function(description="Get the current day of the week", name="dayOfWeek") + def day_of_week(self) -> str: + """ + Get the current day of the week + + Example: + {{time.dayOfWeek}} => Sunday + """ + now = datetime.datetime.now() + return now.strftime("%A") + + @kernel_function(description="Get the current hour") + def hour(self) -> str: + """ + Get the current hour + + Example: + {{time.hour}} => 9 PM + """ + now = datetime.datetime.now() + return now.strftime("%I %p") + + @kernel_function(description="Get the current hour number", name="hourNumber") + def hour_number(self) -> str: + """ + Get the current hour number + + Example: + {{time.hourNumber}} => 21 + """ + now = datetime.datetime.now() + return now.strftime("%H") + + @kernel_function(description="Get the current minute") + def minute(self) -> str: + """ + Get the current minute + + Example: + {{time.minute}} => 15 + """ + now = datetime.datetime.now() + return now.strftime("%M") + + @kernel_function(description="Get the date of offset from today by a provided number of days") + def days_ago(self, days: str) -> str: + """ + Get the date a provided number of days in the past + + params: + days: The number of days to offset from today + returns: + The date of the offset day. + + Example: + KernelContext["input"] = "3" + {{time.days_ago $input}} => Sunday, 7 May, 2023 + """ + d = datetime.date.today() - datetime.timedelta(days=int(days)) + return d.strftime("%A, %d %B, %Y") + + @kernel_function( + description="""Get the date of the last day matching the supplied week day name in English. + Example: Che giorno era 'Martedi' scorso -> dateMatchingLastDayName 'Tuesday' => Tuesday, + 16 May, 2023""" + ) + def date_matching_last_day_name(self, day_name: str) -> str: + """ + Get the date of the last day matching the supplied day name + + params: + day_name: The day name to match with. + returns: + The date of the matching day. + + Example: + KernelContext["input"] = "Sunday" + {{time.date_matching_last_day_name $input}} => Sunday, 7 May, 2023 + """ + d = datetime.date.today() + for i in range(1, 8): + d = d - datetime.timedelta(days=1) + if d.strftime("%A") == day_name: + return d.strftime("%A, %d %B, %Y") + raise ValueError("day_name is not recognized") + + @kernel_function(description="Get the seconds on the current minute") + def second(self) -> str: + """ + Get the seconds on the current minute + + Example: + {{time.second}} => 7 + """ + now = datetime.datetime.now() + return now.strftime("%S") + + @kernel_function(description="Get the current time zone offset", name="timeZoneOffset") + def time_zone_offset(self) -> str: + """ + Get the current time zone offset + + Example: + {{time.timeZoneOffset}} => -08:00 + """ + now = datetime.datetime.now() + return now.strftime("%z") + + @kernel_function(description="Get the current time zone name", name="timeZoneName") + def time_zone_name(self) -> str: + """ + Get the current time zone name + + Example: + {{time.timeZoneName}} => PST + """ + now = datetime.datetime.now() + return now.strftime("%Z") diff --git a/python/semantic_kernel/core_plugins/wait_plugin.py b/python/semantic_kernel/core_plugins/wait_plugin.py new file mode 100644 index 000000000000..ccc77c6baaeb --- /dev/null +++ b/python/semantic_kernel/core_plugins/wait_plugin.py @@ -0,0 +1,26 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition import kernel_function + + +class WaitPlugin(KernelBaseModel): + """ + WaitPlugin provides a set of functions to wait for a certain amount of time. + + Usage: + kernel.import_plugin(WaitPlugin(), plugin_name="wait") + + Examples: + {{wait.seconds 5}} => Wait for 5 seconds + """ + + @kernel_function(description="Wait for a certain number of seconds.") + async def wait(self, seconds_text: str) -> None: + try: + seconds = max(float(seconds_text), 0) + except ValueError: + raise ValueError("seconds text must be a number") + await asyncio.sleep(seconds) diff --git a/python/semantic_kernel/core_plugins/web_search_engine_plugin.py b/python/semantic_kernel/core_plugins/web_search_engine_plugin.py new file mode 100644 index 000000000000..c80e6ea8c9b1 --- /dev/null +++ b/python/semantic_kernel/core_plugins/web_search_engine_plugin.py @@ -0,0 +1,54 @@ +import typing as t + +from semantic_kernel.connectors.search_engine.connector import ConnectorBase +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter + +if t.TYPE_CHECKING: + from semantic_kernel.orchestration.kernel_context import KernelContext + + +class WebSearchEnginePlugin: + """ + Description: A plugin that provides web search engine functionality + + Usage: + connector = BingConnector(bing_search_api_key) + kernel.import_plugin(WebSearchEnginePlugin(connector), plugin_name="WebSearch") + + Examples: + {{WebSearch.SearchAsync "What is semantic kernel?"}} + => Returns the first `num_results` number of results for the given search query + and ignores the first `offset` number of results + (num_results and offset are specified in KernelContext) + """ + + _connector: "ConnectorBase" + + def __init__(self, connector: "ConnectorBase") -> None: + self._connector = connector + + @kernel_function(description="Performs a web search for a given query", name="searchAsync") + @kernel_function_context_parameter( + name="num_results", + description="The number of search results to return", + default_value="1", + ) + @kernel_function_context_parameter( + name="offset", + description="The number of search results to skip", + default_value="0", + ) + async def search(self, query: str, context: "KernelContext") -> str: + """ + Returns the search results of the query provided. + Returns `num_results` results and ignores the first `offset`. + + :param query: search query + :param context: contains the context of count and offset parameters + :return: stringified list of search results + """ + + _num_results = context.variables.get("num_results") + _offset = context.variables.get("offset") + result = await self._connector.search(query, _num_results, _offset) + return str(result) diff --git a/python/semantic_kernel/core_skills/__init__.py b/python/semantic_kernel/core_skills/__init__.py deleted file mode 100644 index e9193dddea12..000000000000 --- a/python/semantic_kernel/core_skills/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.core_skills.conversation_summary_skill import ( - ConversationSummarySkill, -) -from semantic_kernel.core_skills.file_io_skill import FileIOSkill -from semantic_kernel.core_skills.http_skill import HttpSkill -from semantic_kernel.core_skills.math_skill import MathSkill -from semantic_kernel.core_skills.text_memory_skill import TextMemorySkill -from semantic_kernel.core_skills.text_skill import TextSkill -from semantic_kernel.core_skills.time_skill import TimeSkill -from semantic_kernel.core_skills.web_search_engine_skill import WebSearchEngineSkill - -__all__ = [ - "TextMemorySkill", - "TextSkill", - "FileIOSkill", - "TimeSkill", - "HttpSkill", - "ConversationSummarySkill", - "MathSkill", - "WebSearchEngineSkill", -] diff --git a/python/semantic_kernel/core_skills/conversation_summary_skill.py b/python/semantic_kernel/core_skills/conversation_summary_skill.py deleted file mode 100644 index 6e03bbed09e8..000000000000 --- a/python/semantic_kernel/core_skills/conversation_summary_skill.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from semantic_kernel.kernel import Kernel - from semantic_kernel.orchestration.sk_context import SKContext - - -class ConversationSummarySkill: - """ - Semantic skill that enables conversations summarization. - """ - - from semantic_kernel.skill_definition import sk_function - - # The max tokens to process in a single semantic function call. - _max_tokens = 1024 - - _summarize_conversation_prompt_template = ( - "BEGIN CONTENT TO SUMMARIZE:\n{{" - + "$INPUT" - + "}}\nEND CONTENT TO SUMMARIZE.\nSummarize the conversation in 'CONTENT TO" - " SUMMARIZE', identifying main points of discussion and any" - " conclusions that were reached.\nDo not incorporate other general" - " knowledge.\nSummary is in plain text, in complete sentences, with no markup" - " or tags.\n\nBEGIN SUMMARY:\n" - ) - - def __init__(self, kernel: "Kernel"): - self._summarizeConversationFunction = kernel.create_semantic_function( - ConversationSummarySkill._summarize_conversation_prompt_template, - skill_name=ConversationSummarySkill.__name__, - description=( - "Given a section of a conversation transcript, summarize the part of" - " the conversation." - ), - max_tokens=ConversationSummarySkill._max_tokens, - temperature=0.1, - top_p=0.5, - ) - - @sk_function( - description="Given a long conversation transcript, summarize the conversation.", - name="SummarizeConversation", - input_description="A long conversation transcript.", - ) - async def summarize_conversation_async( - self, input: str, context: "SKContext" - ) -> "SKContext": - """ - Given a long conversation transcript, summarize the conversation. - - :param input: A long conversation transcript. - :param context: The SKContext for function execution. - :return: SKContext with the summarized conversation result. - """ - from semantic_kernel.text import text_chunker - from semantic_kernel.text.function_extension import ( - aggregate_chunked_results_async, - ) - - lines = text_chunker._split_text_lines( - input, ConversationSummarySkill._max_tokens, True - ) - paragraphs = text_chunker._split_text_paragraph( - lines, ConversationSummarySkill._max_tokens - ) - - return await aggregate_chunked_results_async( - self._summarizeConversationFunction, paragraphs, context - ) diff --git a/python/semantic_kernel/core_skills/file_io_skill.py b/python/semantic_kernel/core_skills/file_io_skill.py deleted file mode 100644 index c2842b468aaa..000000000000 --- a/python/semantic_kernel/core_skills/file_io_skill.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os -import typing as t - -import aiofiles - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter - -if t.TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - - -class FileIOSkill(PydanticField): - """ - Description: Read and write from a file. - - Usage: - kernel.import_skill(FileIOSkill(), skill_name="file") - - Examples: - - {{file.readAsync $path }} => "hello world" - {{file.writeAsync}} - """ - - @sk_function( - description="Read a file", - name="readAsync", - input_description="Path of the source file", - ) - async def read_async(self, path: str) -> str: - """ - Read a file - - Example: - {{file.readAsync $path }} => "hello world" - Args: - path -- The path to the file to read - - Returns: - The contents of the file - """ - - assert os.path.exists(path), f"File {path} does not exist" - - async with aiofiles.open(path, "r", encoding="UTF-8") as fp: - content = await fp.read() - return content - - @sk_function( - description="Write a file", - name="writeAsync", - ) - @sk_function_context_parameter(name="path", description="Destination path") - @sk_function_context_parameter(name="content", description="File content") - async def write_async(self, context: "SKContext") -> None: - """ - Write a file - - Example: - {{file.writeAsync}} - Args: - Contains the 'path' for the Destination file and - the 'content' of the file to write. - """ - path = context.variables.get("path") - content = context.variables.get("content") - - assert path, "Path is required" - assert content, "Content is required" - - async with aiofiles.open(path, "w") as fp: - await fp.write(content) diff --git a/python/semantic_kernel/core_skills/http_skill.py b/python/semantic_kernel/core_skills/http_skill.py deleted file mode 100644 index 43ee46106c5f..000000000000 --- a/python/semantic_kernel/core_skills/http_skill.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import json -import typing as t - -import aiohttp - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter - -if t.TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - - -class HttpSkill(PydanticField): - """ - A skill that provides HTTP functionality. - - Usage: - kernel.import_skill(HttpSkill(), "http") - - Examples: - - {{http.getAsync $url}} - {{http.postAsync $url}} - {{http.putAsync $url}} - {{http.deleteAsync $url}} - """ - - @sk_function(description="Makes a GET request to a uri", name="getAsync") - async def get_async(self, url: str) -> str: - """ - Sends an HTTP GET request to the specified URI and returns - the response body as a string. - params: - uri: The URI to send the request to. - returns: - The response body as a string. - """ - if not url: - raise ValueError("url cannot be `None` or empty") - - async with aiohttp.ClientSession() as session: - async with session.get(url, raise_for_status=True) as response: - return await response.text() - - @sk_function(description="Makes a POST request to a uri", name="postAsync") - @sk_function_context_parameter(name="body", description="The body of the request") - async def post_async(self, url: str, context: "SKContext") -> str: - """ - Sends an HTTP POST request to the specified URI and returns - the response body as a string. - params: - url: The URI to send the request to. - context: Contains the body of the request - returns: - The response body as a string. - """ - if not url: - raise ValueError("url cannot be `None` or empty") - - body = context.variables.get("body") - - headers = {"Content-Type": "application/json"} - data = json.dumps(body) - async with aiohttp.ClientSession() as session: - async with session.post( - url, headers=headers, data=data, raise_for_status=True - ) as response: - return await response.text() - - @sk_function(description="Makes a PUT request to a uri", name="putAsync") - @sk_function_context_parameter(name="body", description="The body of the request") - async def put_async(self, url: str, context: "SKContext") -> str: - """ - Sends an HTTP PUT request to the specified URI and returns - the response body as a string. - params: - url: The URI to send the request to. - returns: - The response body as a string. - """ - if not url: - raise ValueError("url cannot be `None` or empty") - - body = context.variables.get("body") - - headers = {"Content-Type": "application/json"} - data = json.dumps(body) - async with aiohttp.ClientSession() as session: - async with session.put( - url, headers=headers, data=data, raise_for_status=True - ) as response: - return await response.text() - - @sk_function(description="Makes a DELETE request to a uri", name="deleteAsync") - async def delete_async(self, url: str) -> str: - """ - Sends an HTTP DELETE request to the specified URI and returns - the response body as a string. - params: - uri: The URI to send the request to. - returns: - The response body as a string. - """ - if not url: - raise ValueError("url cannot be `None` or empty") - async with aiohttp.ClientSession() as session: - async with session.delete(url, raise_for_status=True) as response: - return await response.text() diff --git a/python/semantic_kernel/core_skills/math_skill.py b/python/semantic_kernel/core_skills/math_skill.py deleted file mode 100644 index d0533feadbd8..000000000000 --- a/python/semantic_kernel/core_skills/math_skill.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import typing as t - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter - -if t.TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - - -class MathSkill(PydanticField): - """ - Description: MathSkill provides a set of functions to make Math calculations. - - Usage: - kernel.import_skill(MathSkill(), skill_name="math") - - Examples: - {{math.Add}} => Returns the sum of initial_value_text and Amount (provided in the SKContext) - """ - - @sk_function( - description="Adds value to a value", - name="Add", - input_description="The value to add", - ) - @sk_function_context_parameter( - name="Amount", - description="Amount to add", - type="number", - required=True, - ) - def add(self, initial_value_text: str, context: "SKContext") -> str: - """ - Returns the Addition result of initial and amount values provided. - - :param initial_value_text: Initial value as string to add the specified amount - :param context: Contains the context to get the numbers from - :return: The resulting sum as a string - """ - return MathSkill.add_or_subtract(initial_value_text, context, add=True) - - @sk_function( - description="Subtracts value to a value", - name="Subtract", - input_description="The value to subtract", - ) - @sk_function_context_parameter( - name="Amount", - description="Amount to subtract", - type="number", - required=True, - ) - def subtract(self, initial_value_text: str, context: "SKContext") -> str: - """ - Returns the difference of numbers provided. - - :param initial_value_text: Initial value as string to subtract the specified amount - :param context: Contains the context to get the numbers from - :return: The resulting subtraction as a string - """ - return MathSkill.add_or_subtract(initial_value_text, context, add=False) - - @staticmethod - def add_or_subtract( - initial_value_text: str, context: "SKContext", add: bool - ) -> str: - """ - Helper function to perform addition or subtraction based on the add flag. - - :param initial_value_text: Initial value as string to add or subtract the specified amount - :param context: Contains the context to get the numbers from - :param add: If True, performs addition, otherwise performs subtraction - :return: The resulting sum or subtraction as a string - """ - try: - initial_value = int(initial_value_text) - except ValueError: - raise ValueError( - f"Initial value provided is not in numeric format: {initial_value_text}" - ) - - context_amount = context["Amount"] - if context_amount is not None: - try: - amount = int(context_amount) - except ValueError: - raise ValueError( - "Context amount provided is not in numeric format:" - f" {context_amount}" - ) - - result = initial_value + amount if add else initial_value - amount - return str(result) - else: - raise ValueError("Context amount should not be None.") diff --git a/python/semantic_kernel/core_skills/text_memory_skill.py b/python/semantic_kernel/core_skills/text_memory_skill.py deleted file mode 100644 index 7248386d6679..000000000000 --- a/python/semantic_kernel/core_skills/text_memory_skill.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -import json -import typing as t - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter - -if t.TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - - -class TextMemorySkill(PydanticField): - COLLECTION_PARAM = "collection" - RELEVANCE_PARAM = "relevance" - KEY_PARAM = "key" - LIMIT_PARAM = "limit" - DEFAULT_COLLECTION = "generic" - DEFAULT_RELEVANCE = 0.75 - DEFAULT_LIMIT = 1 - - # @staticmethod - @sk_function( - description="Recall a fact from the long term memory", - name="recall", - input_description="The information to retrieve", - ) - @sk_function_context_parameter( - name=COLLECTION_PARAM, - description="The collection to search for information", - default_value=DEFAULT_COLLECTION, - ) - @sk_function_context_parameter( - name=RELEVANCE_PARAM, - description="The relevance score, from 0.0 to 1.0; 1.0 means perfect match", - default_value=DEFAULT_RELEVANCE, - ) - @sk_function_context_parameter( - name=LIMIT_PARAM, - description="The maximum number of relevant memories to recall.", - default_value=DEFAULT_LIMIT, - ) - async def recall_async(self, ask: str, context: "SKContext") -> str: - """ - Recall a fact from the long term memory. - - Example: - sk_context["input"] = "what is the capital of France?" - {{memory.recall $input}} => "Paris" - - Args: - ask -- The question to ask the memory - context -- Contains the 'collection' to search for information - , the 'relevance' score to use when searching - and the 'limit' of relevant memories to retrieve. - - Returns: - The nearest item from the memory store as a string or empty string if not found. - """ - - if context.variables is None: - raise ValueError( - "The context doesn't have the variables required to know how to recall memory" - ) - if context.memory is None: - raise ValueError("The context doesn't have a memory instance to search") - - collection = context.variables.get( - TextMemorySkill.COLLECTION_PARAM, TextMemorySkill.DEFAULT_COLLECTION - ) - if not collection: - raise ValueError("Memory collection not defined for TextMemorySkill") - - relevance = context.variables.get( - TextMemorySkill.RELEVANCE_PARAM, TextMemorySkill.DEFAULT_RELEVANCE - ) - if not relevance: - raise ValueError("Relevance value not defined for TextMemorySkill") - - limit = context.variables.get( - TextMemorySkill.LIMIT_PARAM, TextMemorySkill.DEFAULT_LIMIT - ) - if limit is None or str(limit).strip() == "": - raise ValueError("Limit value not defined for TextMemorySkill") - - results = await context.memory.search_async( - collection=collection, - query=ask, - limit=int(limit), - min_relevance_score=float(relevance), - ) - if results is None or len(results) == 0: - if context.log is not None: - context.log.warning(f"Memory not found in collection: {collection}") - return "" - - return results[0].text if limit == 1 else json.dumps([r.text for r in results]) - - @sk_function( - description="Save information to semantic memory", - name="save", - input_description="The information to save", - ) - @sk_function_context_parameter( - name=COLLECTION_PARAM, - description="The collection to save the information", - default_value=DEFAULT_COLLECTION, - ) - @sk_function_context_parameter( - name=KEY_PARAM, - description="The unique key to associate with the information", - ) - async def save_async(self, text: str, context: "SKContext") -> None: - """ - Save a fact to the long term memory. - - Example: - sk_context["input"] = "the capital of France is Paris" - sk_context[TextMemorySkill.KEY_PARAM] = "countryInfo1" - {{memory.save $input}} - - Args: - text -- The text to save to the memory - context -- Contains the 'collection' to save the information - and unique 'key' to associate with the information - """ - - if context.variables is None: - raise ValueError( - "The context doesn't have the variables required to know how to recall memory" - ) - if context.memory is None: - raise ValueError("The context doesn't have a memory instance to search") - - collection = context.variables.get( - TextMemorySkill.COLLECTION_PARAM, TextMemorySkill.DEFAULT_COLLECTION - ) - if not collection: - raise ValueError("Memory collection not defined for TextMemorySkill") - - key = context.variables.get(TextMemorySkill.KEY_PARAM, None) - if not key: - raise ValueError("Memory key not defined for TextMemorySkill") - - await context.memory.save_information_async(collection, text=text, id=key) diff --git a/python/semantic_kernel/core_skills/text_skill.py b/python/semantic_kernel/core_skills/text_skill.py deleted file mode 100644 index 4a4bd7fc6a3c..000000000000 --- a/python/semantic_kernel/core_skills/text_skill.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition import sk_function - - -class TextSkill(PydanticField): - """ - TextSkill provides a set of functions to manipulate strings. - - Usage: - kernel.import_skill(TextSkill(), skill_name="text") - - Examples: - SKContext["input"] = " hello world " - {{text.trim $input}} => "hello world" - - SKContext["input"] = " hello world " - {{text.trimStart $input} => "hello world " - - SKContext["input"] = " hello world " - {{text.trimEnd $input} => " hello world" - - SKContext["input"] = "hello world" - {{text.uppercase $input}} => "HELLO WORLD" - - SKContext["input"] = "HELLO WORLD" - {{text.lowercase $input}} => "hello world" - """ - - @sk_function(description="Trim whitespace from the start and end of a string.") - def trim(self, text: str) -> str: - """ - Trim whitespace from the start and end of a string. - - Example: - SKContext["input"] = " hello world " - {{text.trim $input}} => "hello world" - """ - return text.strip() - - @sk_function(description="Trim whitespace from the start of a string.") - def trim_start(self, text: str) -> str: - """ - Trim whitespace from the start of a string. - - Example: - SKContext["input"] = " hello world " - {{text.trim $input}} => "hello world " - """ - return text.lstrip() - - @sk_function(description="Trim whitespace from the end of a string.") - def trim_end(self, text: str) -> str: - """ - Trim whitespace from the end of a string. - - Example: - SKContext["input"] = " hello world " - {{text.trim $input}} => " hello world" - """ - return text.rstrip() - - @sk_function(description="Convert a string to uppercase.") - def uppercase(self, text: str) -> str: - """ - Convert a string to uppercase. - - Example: - SKContext["input"] = "hello world" - {{text.uppercase $input}} => "HELLO WORLD" - """ - return text.upper() - - @sk_function(description="Convert a string to lowercase.") - def lowercase(self, text: str) -> str: - """ - Convert a string to lowercase. - - Example: - SKContext["input"] = "HELLO WORLD" - {{text.lowercase $input}} => "hello world" - """ - return text.lower() diff --git a/python/semantic_kernel/core_skills/time_skill.py b/python/semantic_kernel/core_skills/time_skill.py deleted file mode 100644 index 94914831dbef..000000000000 --- a/python/semantic_kernel/core_skills/time_skill.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import datetime - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition import sk_function - - -class TimeSkill(PydanticField): - """ - Description: TimeSkill provides a set of functions - to get the current time and date. - - Usage: - kernel.import_skill(TimeSkill(), skill_name="time") - - Examples: - {{time.date}} => Sunday, 12 January, 2031 - {{time.today}} => Sunday, 12 January, 2031 - {{time.iso_date}} => 2031-01-12 - {{time.now}} => Sunday, January 12, 2031 9:15 PM - {{time.utcNow}} => Sunday, January 13, 2031 5:15 AM - {{time.time}} => 09:15:07 PM - {{time.year}} => 2031 - {{time.month}} => January - {{time.monthNumber}} => 01 - {{time.day}} => 12 - {{time.dayOfWeek}} => Sunday - {{time.hour}} => 9 PM - {{time.hourNumber}} => 21 - {{time.days_ago $days}} => Sunday, 7 May, 2023 - {{time.last_matching_day $dayName}} => Sunday, 7 May, 2023 - {{time.minute}} => 15 - {{time.minutes}} => 15 - {{time.second}} => 7 - {{time.seconds}} => 7 - {{time.timeZoneOffset}} => -0800 - {{time.timeZoneName}} => PST - """ - - @sk_function(description="Get the current date.") - def date(self) -> str: - """ - Get the current date - - Example: - {{time.date}} => Sunday, 12 January, 2031 - """ - now = datetime.datetime.now() - return now.strftime("%A, %d %B, %Y") - - @sk_function(description="Get the current date.") - def today(self) -> str: - """ - Get the current date - - Example: - {{time.today}} => Sunday, 12 January, 2031 - """ - return self.date() - - @sk_function(description="Get the current date in iso format.") - def iso_date(self) -> str: - """ - Get the current date in iso format - - Example: - {{time.iso_date}} => 2031-01-12 - """ - today = datetime.date.today() - return today.isoformat() - - @sk_function(description="Get the current date and time in the local time zone") - def now(self) -> str: - """ - Get the current date and time in the local time zone" - - Example: - {{time.now}} => Sunday, January 12, 2031 9:15 PM - """ - now = datetime.datetime.now() - return now.strftime("%A, %B %d, %Y %I:%M %p") - - @sk_function(description="Get the current date and time in UTC", name="utcNow") - def utc_now(self) -> str: - """ - Get the current date and time in UTC - - Example: - {{time.utcNow}} => Sunday, January 13, 2031 5:15 AM - """ - now = datetime.datetime.utcnow() - return now.strftime("%A, %B %d, %Y %I:%M %p") - - @sk_function(description="Get the current time in the local time zone") - def time(self) -> str: - """ - Get the current time in the local time zone - - Example: - {{time.time}} => 09:15:07 PM - """ - now = datetime.datetime.now() - return now.strftime("%I:%M:%S %p") - - @sk_function(description="Get the current year") - def year(self) -> str: - """ - Get the current year - - Example: - {{time.year}} => 2031 - """ - now = datetime.datetime.now() - return now.strftime("%Y") - - @sk_function(description="Get the current month") - def month(self) -> str: - """ - Get the current month - - Example: - {{time.month}} => January - """ - now = datetime.datetime.now() - return now.strftime("%B") - - @sk_function(description="Get the current month number") - def month_number(self) -> str: - """ - Get the current month number - - Example: - {{time.monthNumber}} => 01 - """ - now = datetime.datetime.now() - return now.strftime("%m") - - @sk_function(description="Get the current day") - def day(self) -> str: - """ - Get the current day of the month - - Example: - {{time.day}} => 12 - """ - now = datetime.datetime.now() - return now.strftime("%d") - - @sk_function(description="Get the current day of the week", name="dayOfWeek") - def day_of_week(self) -> str: - """ - Get the current day of the week - - Example: - {{time.dayOfWeek}} => Sunday - """ - now = datetime.datetime.now() - return now.strftime("%A") - - @sk_function(description="Get the current hour") - def hour(self) -> str: - """ - Get the current hour - - Example: - {{time.hour}} => 9 PM - """ - now = datetime.datetime.now() - return now.strftime("%I %p") - - @sk_function(description="Get the current hour number", name="hourNumber") - def hour_number(self) -> str: - """ - Get the current hour number - - Example: - {{time.hourNumber}} => 21 - """ - now = datetime.datetime.now() - return now.strftime("%H") - - @sk_function(description="Get the current minute") - def minute(self) -> str: - """ - Get the current minute - - Example: - {{time.minute}} => 15 - """ - now = datetime.datetime.now() - return now.strftime("%M") - - @sk_function( - description="Get the date of offset from today by a provided number of days" - ) - def days_ago(self, days: str) -> str: - """ - Get the date a provided number of days in the past - - params: - days: The number of days to offset from today - returns: - The date of the offset day. - - Example: - SKContext["input"] = "3" - {{time.days_ago $input}} => Sunday, 7 May, 2023 - """ - d = datetime.date.today() - datetime.timedelta(days=int(days)) - return d.strftime("%A, %d %B, %Y") - - @sk_function( - description="""Get the date of the last day matching the supplied week day name in English. - Example: Che giorno era 'Martedi' scorso -> dateMatchingLastDayName 'Tuesday' => Tuesday, - 16 May, 2023""" - ) - def date_matching_last_day_name(self, day_name: str) -> str: - """ - Get the date of the last day matching the supplied day name - - params: - day_name: The day name to match with. - returns: - The date of the matching day. - - Example: - SKContext["input"] = "Sunday" - {{time.date_matching_last_day_name $input}} => Sunday, 7 May, 2023 - """ - d = datetime.date.today() - for i in range(1, 8): - d = d - datetime.timedelta(days=1) - if d.strftime("%A") == day_name: - return d.strftime("%A, %d %B, %Y") - raise ValueError("day_name is not recognized") - - @sk_function(description="Get the seconds on the current minute") - def second(self) -> str: - """ - Get the seconds on the current minute - - Example: - {{time.second}} => 7 - """ - now = datetime.datetime.now() - return now.strftime("%S") - - @sk_function(description="Get the current time zone offset", name="timeZoneOffset") - def time_zone_offset(self) -> str: - """ - Get the current time zone offset - - Example: - {{time.timeZoneOffset}} => -08:00 - """ - now = datetime.datetime.now() - return now.strftime("%z") - - @sk_function(description="Get the current time zone name", name="timeZoneName") - def time_zone_name(self) -> str: - """ - Get the current time zone name - - Example: - {{time.timeZoneName}} => PST - """ - now = datetime.datetime.now() - return now.strftime("%Z") diff --git a/python/semantic_kernel/core_skills/wait_skill.py b/python/semantic_kernel/core_skills/wait_skill.py deleted file mode 100644 index c4cd79b75a08..000000000000 --- a/python/semantic_kernel/core_skills/wait_skill.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition import sk_function - - -class WaitSkill(PydanticField): - """ - WaitSkill provides a set of functions to wait for a certain amount of time. - - Usage: - kernel.import_skill(WaitSkill(), skill_name="wait") - - Examples: - {{wait.seconds 5}} => Wait for 5 seconds - """ - - @sk_function(description="Wait for a certain number of seconds.") - async def wait(self, seconds_text: str) -> None: - try: - seconds = max(float(seconds_text), 0) - except ValueError: - raise ValueError("seconds text must be a number") - await asyncio.sleep(seconds) diff --git a/python/semantic_kernel/core_skills/web_search_engine_skill.py b/python/semantic_kernel/core_skills/web_search_engine_skill.py deleted file mode 100644 index 1b6d40cb2c3c..000000000000 --- a/python/semantic_kernel/core_skills/web_search_engine_skill.py +++ /dev/null @@ -1,56 +0,0 @@ -import typing as t - -from semantic_kernel.connectors.search_engine.connector import ConnectorBase -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter - -if t.TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - - -class WebSearchEngineSkill: - """ - Description: A skill that provides web search engine functionality - - Usage: - connector = BingConnector(bing_search_api_key) - kernel.import_skill(WebSearchEngineSkill(connector), skill_name="WebSearch") - - Examples: - {{WebSearch.SearchAsync "What is semantic kernel?"}} - => Returns the first `num_results` number of results for the given search query - and ignores the first `offset` number of results - (num_results and offset are specified in SKContext) - """ - - _connector: "ConnectorBase" - - def __init__(self, connector: "ConnectorBase") -> None: - self._connector = connector - - @sk_function( - description="Performs a web search for a given query", name="searchAsync" - ) - @sk_function_context_parameter( - name="num_results", - description="The number of search results to return", - default_value="1", - ) - @sk_function_context_parameter( - name="offset", - description="The number of search results to skip", - default_value="0", - ) - async def search_async(self, query: str, context: "SKContext") -> str: - """ - Returns the search results of the query provided. - Returns `num_results` results and ignores the first `offset`. - - :param query: search query - :param context: contains the context of count and offset parameters - :return: stringified list of search results - """ - - _num_results = context.variables.get("num_results") - _offset = context.variables.get("offset") - result = await self._connector.search_async(query, _num_results, _offset) - return str(result) diff --git a/python/semantic_kernel/events/__init__.py b/python/semantic_kernel/events/__init__.py new file mode 100644 index 000000000000..88a686950872 --- /dev/null +++ b/python/semantic_kernel/events/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.events.function_invoked_event_args import FunctionInvokedEventArgs +from semantic_kernel.events.function_invoking_event_args import ( + FunctionInvokingEventArgs, +) + +__all__ = [ + "FunctionInvokedEventArgs", + "FunctionInvokingEventArgs", +] diff --git a/python/semantic_kernel/events/function_invoked_event_args.py b/python/semantic_kernel/events/function_invoked_event_args.py new file mode 100644 index 000000000000..68f3e3c1cb3f --- /dev/null +++ b/python/semantic_kernel/events/function_invoked_event_args.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.events.kernel_events_args import KernelEventArgs + + +class FunctionInvokedEventArgs(KernelEventArgs): + def __init__(self, function_view, context): + super().__init__(function_view, context) + self._repeat_requested = False + + @property + def is_repeat_requested(self): + return self._repeat_requested + + def repeat(self): + self._repeat_requested = True diff --git a/python/semantic_kernel/events/function_invoking_event_args.py b/python/semantic_kernel/events/function_invoking_event_args.py new file mode 100644 index 000000000000..6f0e42705d3b --- /dev/null +++ b/python/semantic_kernel/events/function_invoking_event_args.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.events.kernel_events_args import KernelEventArgs + + +class FunctionInvokingEventArgs(KernelEventArgs): + def __init__(self, function_view, context): + super().__init__(function_view, context) + self._skip_requested = False + + @property + def is_skip_requested(self): + return self._skip_requested + + def skip(self): + self._skip_requested = True diff --git a/python/semantic_kernel/events/kernel_events_args.py b/python/semantic_kernel/events/kernel_events_args.py new file mode 100644 index 000000000000..fac02e46c136 --- /dev/null +++ b/python/semantic_kernel/events/kernel_events_args.py @@ -0,0 +1,31 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.plugin_definition.function_view import FunctionView + + +class KernelEventArgs: + def __init__(self, function_view: FunctionView, context: KernelContext): + if context is None or function_view is None: + raise ValueError("function_view and context cannot be None") + + self._function_view = function_view + self._context = context + + # Temporal cancellationToken to sync C# + self._is_cancel_requested = False + + @property + def function_view(self): + return self._function_view + + @property + def context(self): + return self._context + + @property + def is_cancel_requested(self): + return self._is_cancel_requested + + def cancel(self): + self._is_cancel_requested = True diff --git a/python/semantic_kernel/kernel.py b/python/semantic_kernel/kernel.py index 9a94bbbfe527..99e968e98f3e 100644 --- a/python/semantic_kernel/kernel.py +++ b/python/semantic_kernel/kernel.py @@ -3,34 +3,38 @@ import glob import importlib import inspect +import logging import os -from logging import Logger from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union -from uuid import uuid4 + +from pydantic import Field from semantic_kernel.connectors.ai.ai_exception import AIException from semantic_kernel.connectors.ai.chat_completion_client_base import ( ChatCompletionClientBase, ) -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, -) from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( EmbeddingGeneratorBase, ) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.connectors.ai.text_completion_client_base import ( TextCompletionClientBase, ) +from semantic_kernel.events import FunctionInvokedEventArgs, FunctionInvokingEventArgs from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.memory.memory_store_base import MemoryStoreBase from semantic_kernel.memory.null_memory import NullMemory from semantic_kernel.memory.semantic_text_memory import SemanticTextMemory from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, +) from semantic_kernel.reliability.pass_through_without_retry import ( PassThroughWithoutRetry, ) @@ -42,134 +46,179 @@ from semantic_kernel.semantic_functions.semantic_function_config import ( SemanticFunctionConfig, ) -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) -from semantic_kernel.skill_definition.skill_collection import SkillCollection -from semantic_kernel.skill_definition.skill_collection_base import SkillCollectionBase from semantic_kernel.template_engine.prompt_template_engine import PromptTemplateEngine from semantic_kernel.template_engine.protocols.prompt_templating_engine import ( PromptTemplatingEngine, ) -from semantic_kernel.utils.null_logger import NullLogger -from semantic_kernel.utils.validation import validate_function_name, validate_skill_name +from semantic_kernel.utils.naming import generate_random_ascii_name +from semantic_kernel.utils.validation import ( + validate_function_name, + validate_plugin_name, +) T = TypeVar("T") - -class Kernel: - _log: Logger - _skill_collection: SkillCollectionBase - _prompt_template_engine: PromptTemplatingEngine - _memory: SemanticTextMemoryBase +logger: logging.Logger = logging.getLogger(__name__) + + +class Kernel(KernelBaseModel): + """ + The Kernel class is the main entry point for the Semantic Kernel. It provides the ability to run + semantic/native functions, and manage plugins, memory, and AI services. + + Attributes: + plugins (Optional[KernelPluginCollection]): The collection of plugins to be used by the kernel + prompt_template_engine (Optional[PromptTemplatingEngine]): The prompt template engine to be used by the kernel + memory (Optional[SemanticTextMemoryBase]): The memory to be used by the kernel + text_completion_services (Dict[str, Callable[["Kernel"], TextCompletionClientBase]]): The text + completion services + chat_services (Dict[str, Callable[["Kernel"], ChatCompletionClientBase]]): The chat services + text_embedding_generation_services (Dict[str, Callable[["Kernel"], EmbeddingGeneratorBase]]): The text embedding + default_text_completion_service (Optional[str]): The default text completion service + default_chat_service (Optional[str]): The default chat service + default_text_embedding_generation_service (Optional[str]): The default text embedding generation service + retry_mechanism (RetryMechanismBase): The retry mechanism to be used by the kernel + function_invoking_handlers (Dict): The function invoking handlers + function_invoked_handlers (Dict): The function invoked handlers + """ + + plugins: Optional[KernelPluginCollection] = Field(default_factory=KernelPluginCollection) + prompt_template_engine: Optional[PromptTemplatingEngine] = Field(default_factory=PromptTemplateEngine) + memory: Optional[SemanticTextMemoryBase] = Field(default_factory=SemanticTextMemory) + text_completion_services: Dict[str, Callable[["Kernel"], TextCompletionClientBase]] = Field(default_factory=dict) + chat_services: Dict[str, Callable[["Kernel"], ChatCompletionClientBase]] = Field(default_factory=dict) + text_embedding_generation_services: Dict[str, Callable[["Kernel"], EmbeddingGeneratorBase]] = Field( + default_factory=dict + ) + default_text_completion_service: Optional[str] = Field(default=None) + default_chat_service: Optional[str] = Field(default=None) + default_text_embedding_generation_service: Optional[str] = Field(default=None) + retry_mechanism: RetryMechanismBase = Field(default_factory=PassThroughWithoutRetry) + function_invoking_handlers: Dict = Field(default_factory=dict) + function_invoked_handlers: Dict = Field(default_factory=dict) def __init__( self, - skill_collection: Optional[SkillCollectionBase] = None, + plugins: Optional[KernelPluginCollection] = None, prompt_template_engine: Optional[PromptTemplatingEngine] = None, memory: Optional[SemanticTextMemoryBase] = None, - log: Optional[Logger] = None, + **kwargs: Any, ) -> None: - self._log = log if log else NullLogger() - self._skill_collection = ( - skill_collection if skill_collection else SkillCollection(self._log) - ) - self._prompt_template_engine = ( - prompt_template_engine - if prompt_template_engine - else PromptTemplateEngine(self._log) - ) - self._memory = memory if memory else NullMemory() - - self._text_completion_services: Dict[ - str, Callable[["Kernel"], TextCompletionClientBase] - ] = {} - self._chat_services: Dict[ - str, Callable[["Kernel"], ChatCompletionClientBase] - ] = {} - self._text_embedding_generation_services: Dict[ - str, Callable[["Kernel"], EmbeddingGeneratorBase] - ] = {} - - self._default_text_completion_service: Optional[str] = None - self._default_chat_service: Optional[str] = None - self._default_text_embedding_generation_service: Optional[str] = None - - self._retry_mechanism: RetryMechanismBase = PassThroughWithoutRetry() - - @property - def logger(self) -> Logger: - return self._log - - @property - def memory(self) -> SemanticTextMemoryBase: - return self._memory - - @property - def prompt_template_engine(self) -> PromptTemplatingEngine: - return self._prompt_template_engine - - @property - def skills(self) -> ReadOnlySkillCollectionBase: - return self._skill_collection.read_only_skill_collection + """ + Initialize a new instance of the Kernel class. + + Args: + plugins (Optional[KernelPluginCollection]): The collection of plugins to be used by the kernel + prompt_template_engine (Optional[PromptTemplatingEngine]): The prompt template engine to be + used by the kernel + memory (Optional[SemanticTextMemoryBase]): The memory to be used by the kernel + **kwargs (Any): Additional fields to be passed to the Kernel model + """ + plugins = plugins if plugins else KernelPluginCollection() + prompt_template_engine = prompt_template_engine if prompt_template_engine else PromptTemplateEngine() + memory = memory if memory else NullMemory() + + super().__init__(plugins=plugins, prompt_template_engine=prompt_template_engine, memory=memory, **kwargs) + + def add_plugin( + self, plugin_name: str, functions: List[KernelFunction], plugin: Optional[KernelPlugin] = None + ) -> None: + """ + Adds a plugin to the kernel's collection of plugins. If a plugin instance is provided, + it uses that instance instead of creating a new KernelPlugin. + + Args: + plugin_name (str): The name of the plugin + functions (List[KernelFunction]): The functions to add to the plugin + plugin (Optional[KernelPlugin]): An optional pre-defined plugin instance + """ + if plugin is None: + # If no plugin instance is provided, create a new KernelPlugin + plugin = KernelPlugin(name=plugin_name, functions=functions) + + if plugin_name in self.plugins: + self.plugins.add_functions_to_plugin(functions=functions, plugin_name=plugin_name) + else: + self.plugins.add(plugin) def register_semantic_function( self, - skill_name: Optional[str], + plugin_name: Optional[str], function_name: str, function_config: SemanticFunctionConfig, - ) -> SKFunctionBase: - if skill_name is None or skill_name == "": - skill_name = SkillCollection.GLOBAL_SKILL - assert skill_name is not None # for type checker - - validate_skill_name(skill_name) + ) -> KernelFunction: + """ + Creates a semantic function from the plugin name, function name and function config + + Args: + plugin_name (Optional[str]): The name of the plugin. If empty, a random name will be generated. + function_name (str): The name of the function + function_config (SemanticFunctionConfig): The function config + + Returns: + KernelFunction: The created semantic function + + Raises: + ValueError: If the plugin name or function name are invalid + """ + if plugin_name is None or plugin_name == "": + plugin_name = f"p_{generate_random_ascii_name()}" + assert plugin_name is not None # for type checker + + validate_plugin_name(plugin_name) validate_function_name(function_name) - function = self._create_semantic_function( - skill_name, function_name, function_config - ) - self._skill_collection.add_semantic_function(function) + function = self._create_semantic_function(plugin_name, function_name, function_config) + self.add_plugin(plugin_name, [function]) + function.set_default_plugin_collection(self.plugins) return function def register_native_function( self, - skill_name: Optional[str], - sk_function: Callable, - ) -> SKFunctionBase: - if not hasattr(sk_function, "__sk_function__"): + plugin_name: Optional[str], + kernel_function: Callable, + ) -> KernelFunction: + """ + Creates a native function from the plugin name and kernel function + + Args: + plugin_name (Optional[str]): The name of the plugin. If empty, a random name will be generated. + kernel_function (Callable): The kernel function + + Returns: + KernelFunction: The created native function + """ + if not hasattr(kernel_function, "__kernel_function__"): raise KernelException( KernelException.ErrorCodes.InvalidFunctionType, - "sk_function argument must be decorated with @sk_function", + "kernel_function argument must be decorated with @kernel_function", ) - function_name = sk_function.__sk_function_name__ + function_name = kernel_function.__kernel_function_name__ - if skill_name is None or skill_name == "": - skill_name = SkillCollection.GLOBAL_SKILL - assert skill_name is not None # for type checker + if plugin_name is None or plugin_name == "": + plugin_name = f"p_{generate_random_ascii_name()}" + assert plugin_name is not None # for type checker - validate_skill_name(skill_name) + validate_plugin_name(plugin_name) validate_function_name(function_name) - function = SKFunction.from_native_method(sk_function, skill_name, self.logger) - - if self.skills.has_function(skill_name, function_name): + if plugin_name in self.plugins and function_name in self.plugins[plugin_name]: raise KernelException( KernelException.ErrorCodes.FunctionOverloadNotSupported, - "Overloaded functions are not supported, " - "please differentiate function names.", + "Overloaded functions are not supported, " "please differentiate function names.", ) - function.set_default_skill_collection(self.skills) - self._skill_collection.add_native_function(function) + function = KernelFunction.from_native_method(kernel_function, plugin_name) + self.add_plugin(plugin_name, [function]) + function.set_default_plugin_collection(self.plugins) return function - async def run_stream_async( + async def run_stream( self, *functions: Any, - input_context: Optional[SKContext] = None, + input_context: Optional[KernelContext] = None, input_vars: Optional[ContextVariables] = None, input_str: Optional[str] = None, ): @@ -178,21 +227,17 @@ async def run_stream_async( stream_function = functions[-1] # run pipeline functions - context = await self.run_async( - pipeline_functions, input_context, input_vars, input_str - ) + context = await self.run(pipeline_functions, input_context, input_vars, input_str) elif len(functions) == 1: stream_function = functions[0] - # TODO: Preparing context for function invoke can be refactored as code below are same as run_async + # TODO: Preparing context for function invoke can be refactored as code below are same as run # if the user passed in a context, prioritize it, but merge with any other inputs if input_context is not None: context = input_context if input_vars is not None: - context.variables = input_vars.merge_or_overwrite( - new_vars=context.variables, overwrite=False - ) + context.variables = input_vars.merge_or_overwrite(new_vars=context.variables, overwrite=False) if input_str is not None: context.variables = ContextVariables(input_str).merge_or_overwrite( @@ -208,33 +253,26 @@ async def run_stream_async( variables = input_vars elif input_str is not None and input_vars is not None: variables = ContextVariables(input_str) - variables = variables.merge_or_overwrite( - new_vars=input_vars, overwrite=False - ) + variables = variables.merge_or_overwrite(new_vars=input_vars, overwrite=False) else: variables = ContextVariables() - context = SKContext( - variables, - self._memory, - self._skill_collection.read_only_skill_collection, - self._log, + context = KernelContext( + variables=variables, + memory=self.memory, + plugins=self.plugins, ) else: raise ValueError("No functions passed to run") try: - completion = "" - async for stream_message in stream_function.invoke_stream_async( - input=None, context=context - ): - completion += stream_message + async for stream_message in stream_function.invoke_stream(input=None, context=context): yield stream_message except Exception as ex: # TODO: "critical exceptions" - self._log.error( + logger.error( "Something went wrong in stream function. During function invocation:" - f" '{stream_function.skill_name}.{stream_function.name}'. Error" + f" '{stream_function.plugin_name}.{stream_function.name}'. Error" f" description: '{str(ex)}'" ) raise KernelException( @@ -242,21 +280,19 @@ async def run_stream_async( "Error occurred while invoking stream function", ) - async def run_async( + async def run( self, *functions: Any, - input_context: Optional[SKContext] = None, + input_context: Optional[KernelContext] = None, input_vars: Optional[ContextVariables] = None, input_str: Optional[str] = None, **kwargs: Dict[str, Any], - ) -> SKContext: + ) -> KernelContext: # if the user passed in a context, prioritize it, but merge with any other inputs if input_context is not None: context = input_context if input_vars is not None: - context.variables = input_vars.merge_or_overwrite( - new_vars=context.variables, overwrite=False - ) + context.variables = input_vars.merge_or_overwrite(new_vars=context.variables, overwrite=False) if input_str is not None: context.variables = ContextVariables(input_str).merge_or_overwrite( @@ -272,60 +308,97 @@ async def run_async( variables = input_vars elif input_str is not None and input_vars is not None: variables = ContextVariables(input_str) - variables = variables.merge_or_overwrite( - new_vars=input_vars, overwrite=False - ) + variables = variables.merge_or_overwrite(new_vars=input_vars, overwrite=False) else: variables = ContextVariables() - context = SKContext( - variables, - self._memory, - self._skill_collection.read_only_skill_collection, - self._log, + context = KernelContext( + variables=variables, + memory=self.memory, + plugins=self.plugins, ) pipeline_step = 0 for func in functions: - assert isinstance(func, SKFunctionBase), ( - "All func arguments to Kernel.run*(inputs, func1, func2, ...) " - "must be SKFunctionBase instances" - ) - - if context.error_occurred: - self._log.error( - f"Something went wrong in pipeline step {pipeline_step}. " - f"Error description: '{context.last_error_description}'" + while True: + assert isinstance(func, KernelFunction), ( + "All func arguments to Kernel.run*(inputs, func1, func2, ...) " "must be KernelFunction instances" ) - return context - - pipeline_step += 1 - - try: - context = await func.invoke_async(input=None, context=context, **kwargs) if context.error_occurred: - self._log.error( + logger.error( f"Something went wrong in pipeline step {pipeline_step}. " - f"During function invocation: '{func.skill_name}.{func.name}'. " f"Error description: '{context.last_error_description}'" ) return context - except Exception as ex: - self._log.error( - f"Something went wrong in pipeline step {pipeline_step}. " - f"During function invocation: '{func.skill_name}.{func.name}'. " - f"Error description: '{str(ex)}'" - ) - context.fail(str(ex), ex) - return context - return context + try: + function_details = func.describe() + + function_invoking_args = self.on_function_invoking(function_details, context) + if ( + isinstance(function_invoking_args, FunctionInvokingEventArgs) + and function_invoking_args.is_cancel_requested + ): + cancel_message = "Execution was cancelled on function invoking event of pipeline step" + logger.info(f"{cancel_message} {pipeline_step}: {func.plugin_name}.{func.name}.") + return context + + if ( + isinstance(function_invoking_args, FunctionInvokingEventArgs) + and function_invoking_args.is_skip_requested + ): + skip_message = "Execution was skipped on function invoking event of pipeline step" + logger.info(f"{skip_message} {pipeline_step}: {func.plugin_name}.{func.name}.") + break + + context = await func.invoke(input=None, context=context, **kwargs) + + if context.error_occurred: + logger.error( + f"Something went wrong in pipeline step {pipeline_step}. " + f"During function invocation: '{func.plugin_name}.{func.name}'. " + f"Error description: '{context.last_error_description}'" + ) + return context + + function_invoked_args = self.on_function_invoked(function_details, context) + + if ( + isinstance(function_invoked_args, FunctionInvokedEventArgs) + and function_invoked_args.is_cancel_requested + ): + cancel_message = "Execution was cancelled on function invoked event of pipeline step" + logger.info(f"{cancel_message} {pipeline_step}: {func.plugin_name}.{func.name}.") + return context + if ( + isinstance(function_invoked_args, FunctionInvokedEventArgs) + and function_invoked_args.is_repeat_requested + ): + repeat_message = "Execution was repeated on function invoked event of pipeline step" + logger.info(f"{repeat_message} {pipeline_step}: {func.plugin_name}.{func.name}.") + continue + else: + break + + except Exception as ex: + logger.error( + f"Something went wrong in pipeline step {pipeline_step}. " + f"During function invocation: '{func.plugin_name}.{func.name}'. " + f"Error description: '{str(ex)}'" + ) + context.fail(str(ex), ex) + return context - def func(self, skill_name: str, function_name: str) -> SKFunctionBase: - if self.skills.has_native_function(skill_name, function_name): - return self.skills.get_native_function(skill_name, function_name) + pipeline_step += 1 + + return context - return self.skills.get_semantic_function(skill_name, function_name) + def func(self, plugin_name: str, function_name: str) -> KernelFunction: + if plugin_name not in self.plugins: + raise ValueError(f"Plugin '{plugin_name}' not found") + if function_name not in self.plugins[plugin_name]: + raise ValueError(f"Function '{function_name}' not found in plugin '{plugin_name}'") + return self.plugins[plugin_name][function_name] def use_memory( self, @@ -351,302 +424,294 @@ def use_memory( self.register_memory(SemanticTextMemory(storage, embeddings_generator)) def register_memory(self, memory: SemanticTextMemoryBase) -> None: - self._memory = memory + self.memory = memory def register_memory_store(self, memory_store: MemoryStoreBase) -> None: self.use_memory(memory_store) - def create_new_context( - self, variables: Optional[ContextVariables] = None - ) -> SKContext: - return SKContext( + def create_new_context(self, variables: Optional[ContextVariables] = None) -> KernelContext: + return KernelContext( ContextVariables() if not variables else variables, - self._memory, - self.skills, - self._log, + self.memory, + self.plugins, ) - def import_skill( - self, skill_instance: Any, skill_name: str = "" - ) -> Dict[str, SKFunctionBase]: - if skill_name.strip() == "": - skill_name = SkillCollection.GLOBAL_SKILL - self._log.debug(f"Importing skill {skill_name} into the global namespace") - else: - self._log.debug(f"Importing skill {skill_name}") + def on_function_invoking(self, function_view: FunctionView, context: KernelContext) -> FunctionInvokingEventArgs: + if self.function_invoking_handlers: + args = FunctionInvokingEventArgs(function_view, context) + for handler in self.function_invoking_handlers.values(): + handler(self, args) + return args + return None + + def on_function_invoked(self, function_view: FunctionView, context: KernelContext) -> FunctionInvokedEventArgs: + if self.function_invoked_handlers: + args = FunctionInvokedEventArgs(function_view, context) + for handler in self.function_invoked_handlers.values(): + handler(self, args) + return args + return None + + def import_plugin(self, plugin_instance: Union[Any, Dict[str, Any]], plugin_name: str) -> KernelPlugin: + """ + Import a plugin into the kernel. + + Args: + plugin_instance (Any | Dict[str, Any]): The plugin instance. This can be a custom class or a + dictionary of classes that contains methods with the kernel_function decorator for one or + several methods. See `TextMemoryPlugin` as an example. + plugin_name (str): The name of the plugin. Allows chars: upper, lower ASCII and underscores. + + Returns: + KernelPlugin: The imported plugin of type KernelPlugin. + """ + if not plugin_name.strip(): + logger.warn("Unable to import plugin due to missing plugin_name") + raise KernelException( + KernelException.ErrorCodes.InvalidPluginName, + "Plugin name cannot be empty", + ) + logger.debug(f"Importing plugin {plugin_name}") functions = [] - if isinstance(skill_instance, dict): - candidates = skill_instance.items() + if isinstance(plugin_instance, dict): + candidates = plugin_instance.items() else: - candidates = inspect.getmembers(skill_instance, inspect.ismethod) - # Read every method from the skill instance + candidates = inspect.getmembers(plugin_instance, inspect.ismethod) + # Read every method from the plugin instance for _, candidate in candidates: # If the method is a semantic function, register it - if not hasattr(candidate, "__sk_function__"): + if not hasattr(candidate, "__kernel_function__"): continue - functions.append( - SKFunction.from_native_method(candidate, skill_name, self.logger) - ) + functions.append(KernelFunction.from_native_method(candidate, plugin_name)) - self.logger.debug(f"Methods imported: {len(functions)}") + logger.debug(f"Methods imported: {len(functions)}") # Uniqueness check on function names function_names = [f.name for f in functions] if len(function_names) != len(set(function_names)): raise KernelException( KernelException.ErrorCodes.FunctionOverloadNotSupported, - ( - "Overloaded functions are not supported, " - "please differentiate function names." - ), + ("Overloaded functions are not supported, " "please differentiate function names."), ) - skill = {} - for function in functions: - function.set_default_skill_collection(self.skills) - self._skill_collection.add_native_function(function) - skill[function.name] = function + # This is legacy - figure out why we're setting all plugins on each function? + for func in functions: + func.set_default_plugin_collection(self.plugins) + + plugin = KernelPlugin(name=plugin_name, functions=functions) + # TODO: we shouldn't have to be adding functions to a plugin after the fact + # This isn't done in dotnet, and needs to be revisited as we move to v1.0 + # This is to support the current state of the code + if plugin_name in self.plugins: + self.plugins.add_functions_to_plugin(functions=functions, plugin_name=plugin_name) + else: + self.plugins.add(plugin) - return skill + return plugin - def get_ai_service( + def get_prompt_execution_settings_from_service( self, type: Type[T], service_id: Optional[str] = None - ) -> Callable[["Kernel"], T]: + ) -> PromptExecutionSettings: + """Get the specific request settings from the service, instantiated with the service_id and ai_model_id.""" + service = self.get_ai_service(type, service_id) + service_instance = service.__closure__[0].cell_contents + req_settings_type = service_instance.get_prompt_execution_settings_class() + return req_settings_type( + service_id=service_id, + extension_data={"ai_model_id": service_instance.ai_model_id}, + ) + + def get_ai_service(self, type: Type[T], service_id: Optional[str] = None) -> Callable[["Kernel"], T]: matching_type = {} if type == TextCompletionClientBase: - service_id = service_id or self._default_text_completion_service - matching_type = self._text_completion_services + service_id = service_id or self.default_text_completion_service + matching_type = self.text_completion_services elif type == ChatCompletionClientBase: - service_id = service_id or self._default_chat_service - matching_type = self._chat_services + service_id = service_id or self.default_chat_service + matching_type = self.chat_services elif type == EmbeddingGeneratorBase: - service_id = service_id or self._default_text_embedding_generation_service - matching_type = self._text_embedding_generation_services + service_id = service_id or self.default_text_embedding_generation_service + matching_type = self.text_embedding_generation_services else: raise ValueError(f"Unknown AI service type: {type.__name__}") if service_id not in matching_type: - raise ValueError( - f"{type.__name__} service with service_id '{service_id}' not found" - ) + raise ValueError(f"{type.__name__} service with service_id '{service_id}' not found") return matching_type[service_id] def all_text_completion_services(self) -> List[str]: - return list(self._text_completion_services.keys()) + return list(self.text_completion_services.keys()) def all_chat_services(self) -> List[str]: - return list(self._chat_services.keys()) + return list(self.chat_services.keys()) def all_text_embedding_generation_services(self) -> List[str]: - return list(self._text_embedding_generation_services.keys()) + return list(self.text_embedding_generation_services.keys()) def add_text_completion_service( self, service_id: str, - service: Union[ - TextCompletionClientBase, Callable[["Kernel"], TextCompletionClientBase] - ], + service: Union[TextCompletionClientBase, Callable[["Kernel"], TextCompletionClientBase]], overwrite: bool = True, ) -> "Kernel": if not service_id: raise ValueError("service_id must be a non-empty string") - if not overwrite and service_id in self._text_completion_services: - raise ValueError( - f"Text service with service_id '{service_id}' already exists" - ) + if not overwrite and service_id in self.text_completion_services: + raise ValueError(f"Text service with service_id '{service_id}' already exists") - self._text_completion_services[service_id] = ( - service if isinstance(service, Callable) else lambda _: service - ) - if self._default_text_completion_service is None: - self._default_text_completion_service = service_id + self.text_completion_services[service_id] = service if isinstance(service, Callable) else lambda _: service + if self.default_text_completion_service is None: + self.default_text_completion_service = service_id return self def add_chat_service( self, service_id: str, - service: Union[ - ChatCompletionClientBase, Callable[["Kernel"], ChatCompletionClientBase] - ], + service: Union[ChatCompletionClientBase, Callable[["Kernel"], ChatCompletionClientBase]], overwrite: bool = True, ) -> "Kernel": if not service_id: raise ValueError("service_id must be a non-empty string") - if not overwrite and service_id in self._chat_services: - raise ValueError( - f"Chat service with service_id '{service_id}' already exists" - ) + if not overwrite and service_id in self.chat_services: + raise ValueError(f"Chat service with service_id '{service_id}' already exists") - self._chat_services[service_id] = ( - service if isinstance(service, Callable) else lambda _: service - ) - if self._default_chat_service is None: - self._default_chat_service = service_id + self.chat_services[service_id] = service if isinstance(service, Callable) else lambda _: service + if self.default_chat_service is None: + self.default_chat_service = service_id if isinstance(service, TextCompletionClientBase): self.add_text_completion_service(service_id, service) - if self._default_text_completion_service is None: - self._default_text_completion_service = service_id return self def add_text_embedding_generation_service( self, service_id: str, - service: Union[ - EmbeddingGeneratorBase, Callable[["Kernel"], EmbeddingGeneratorBase] - ], + service: Union[EmbeddingGeneratorBase, Callable[["Kernel"], EmbeddingGeneratorBase]], overwrite: bool = False, ) -> "Kernel": if not service_id: raise ValueError("service_id must be a non-empty string") - if not overwrite and service_id in self._text_embedding_generation_services: - raise ValueError( - f"Embedding service with service_id '{service_id}' already exists" - ) + if not overwrite and service_id in self.text_embedding_generation_services: + raise ValueError(f"Embedding service with service_id '{service_id}' already exists") - self._text_embedding_generation_services[service_id] = ( + self.text_embedding_generation_services[service_id] = ( service if isinstance(service, Callable) else lambda _: service ) - if self._default_text_embedding_generation_service is None: - self._default_text_embedding_generation_service = service_id + if self.default_text_embedding_generation_service is None: + self.default_text_embedding_generation_service = service_id return self def set_default_text_completion_service(self, service_id: str) -> "Kernel": - if service_id not in self._text_completion_services: - raise ValueError( - f"AI service with service_id '{service_id}' does not exist" - ) + if service_id not in self.text_completion_services: + raise ValueError(f"AI service with service_id '{service_id}' does not exist") - self._default_text_completion_service = service_id + self.default_text_completion_service = service_id return self def set_default_chat_service(self, service_id: str) -> "Kernel": - if service_id not in self._chat_services: - raise ValueError( - f"AI service with service_id '{service_id}' does not exist" - ) + if service_id not in self.chat_services: + raise ValueError(f"AI service with service_id '{service_id}' does not exist") - self._default_chat_service = service_id + self.default_chat_service = service_id return self - def set_default_text_embedding_generation_service( - self, service_id: str - ) -> "Kernel": - if service_id not in self._text_embedding_generation_services: - raise ValueError( - f"AI service with service_id '{service_id}' does not exist" - ) + def set_default_text_embedding_generation_service(self, service_id: str) -> "Kernel": + if service_id not in self.text_embedding_generation_services: + raise ValueError(f"AI service with service_id '{service_id}' does not exist") - self._default_text_embedding_generation_service = service_id + self.default_text_embedding_generation_service = service_id return self - def get_text_completion_service_service_id( - self, service_id: Optional[str] = None - ) -> str: - if service_id is None or service_id not in self._text_completion_services: - if self._default_text_completion_service is None: + def get_text_completion_service_service_id(self, service_id: Optional[str] = None) -> str: + if service_id is None or service_id not in self.text_completion_services: + if self.default_text_completion_service is None: raise ValueError("No default text service is set") - return self._default_text_completion_service + return self.default_text_completion_service return service_id def get_chat_service_service_id(self, service_id: Optional[str] = None) -> str: - if service_id is None or service_id not in self._chat_services: - if self._default_chat_service is None: + if service_id is None or service_id not in self.chat_services: + if self.default_chat_service is None: raise ValueError("No default chat service is set") - return self._default_chat_service + return self.default_chat_service return service_id - def get_text_embedding_generation_service_id( - self, service_id: Optional[str] = None - ) -> str: - if ( - service_id is None - or service_id not in self._text_embedding_generation_services - ): - if self._default_text_embedding_generation_service is None: + def get_text_embedding_generation_service_id(self, service_id: Optional[str] = None) -> str: + if service_id is None or service_id not in self.text_embedding_generation_services: + if self.default_text_embedding_generation_service is None: raise ValueError("No default embedding service is set") - return self._default_text_embedding_generation_service + return self.default_text_embedding_generation_service return service_id def remove_text_completion_service(self, service_id: str) -> "Kernel": - if service_id not in self._text_completion_services: - raise ValueError( - f"AI service with service_id '{service_id}' does not exist" - ) + if service_id not in self.text_completion_services: + raise ValueError(f"AI service with service_id '{service_id}' does not exist") - del self._text_completion_services[service_id] - if self._default_text_completion_service == service_id: - self._default_text_completion_service = next( - iter(self._text_completion_services), None - ) + del self.text_completion_services[service_id] + if self.default_text_completion_service == service_id: + self.default_text_completion_service = next(iter(self.text_completion_services), None) return self def remove_chat_service(self, service_id: str) -> "Kernel": - if service_id not in self._chat_services: - raise ValueError( - f"AI service with service_id '{service_id}' does not exist" - ) + if service_id not in self.chat_services: + raise ValueError(f"AI service with service_id '{service_id}' does not exist") - del self._chat_services[service_id] - if self._default_chat_service == service_id: - self._default_chat_service = next(iter(self._chat_services), None) + del self.chat_services[service_id] + if self.default_chat_service == service_id: + self.default_chat_service = next(iter(self.chat_services), None) return self def remove_text_embedding_generation_service(self, service_id: str) -> "Kernel": - if service_id not in self._text_embedding_generation_services: - raise ValueError( - f"AI service with service_id '{service_id}' does not exist" - ) + if service_id not in self.text_embedding_generation_services: + raise ValueError(f"AI service with service_id '{service_id}' does not exist") - del self._text_embedding_generation_services[service_id] - if self._default_text_embedding_generation_service == service_id: - self._default_text_embedding_generation_service = next( - iter(self._text_embedding_generation_services), None - ) + del self.text_embedding_generation_services[service_id] + if self.default_text_embedding_generation_service == service_id: + self.default_text_embedding_generation_service = next(iter(self.text_embedding_generation_services), None) return self def clear_all_text_completion_services(self) -> "Kernel": - self._text_completion_services = {} - self._default_text_completion_service = None + self.text_completion_services = {} + self.default_text_completion_service = None return self def clear_all_chat_services(self) -> "Kernel": - self._chat_services = {} - self._default_chat_service = None + self.chat_services = {} + self.default_chat_service = None return self def clear_all_text_embedding_generation_services(self) -> "Kernel": - self._text_embedding_generation_services = {} - self._default_text_embedding_generation_service = None + self.text_embedding_generation_services = {} + self.default_text_embedding_generation_service = None return self def clear_all_services(self) -> "Kernel": - self._text_completion_services = {} - self._chat_services = {} - self._text_embedding_generation_services = {} + self.text_completion_services = {} + self.chat_services = {} + self.text_embedding_generation_services = {} - self._default_text_completion_service = None - self._default_chat_service = None - self._default_text_embedding_generation_service = None + self.default_text_completion_service = None + self.default_chat_service = None + self.default_text_embedding_generation_service = None return self def _create_semantic_function( self, - skill_name: str, + plugin_name: str, function_name: str, function_config: SemanticFunctionConfig, - ) -> SKFunctionBase: + ) -> KernelFunction: function_type = function_config.prompt_template_config.type if not function_type == "completion": raise AIException( @@ -654,17 +719,10 @@ def _create_semantic_function( f"Function type not supported: {function_type}", ) - function = SKFunction.from_semantic_config( - skill_name, function_name, function_config + function = KernelFunction.from_semantic_config(plugin_name, function_name, function_config) + function.prompt_execution_settings.update_from_prompt_execution_settings( + function_config.prompt_template_config.execution_settings ) - function.request_settings.update_from_completion_config( - function_config.prompt_template_config.completion - ) - - # Connect the function to the current kernel skill - # collection, in case the function is invoked manually - # without a context and without a way to find other functions. - function.set_default_skill_collection(self.skills) if function_config.has_chat_prompt: service = self.get_ai_service( @@ -673,10 +731,11 @@ def _create_semantic_function( if len(function_config.prompt_template_config.default_services) > 0 else None, ) + req_settings_type = service.__closure__[0].cell_contents.get_prompt_execution_settings_class() function.set_chat_configuration( - ChatRequestSettings.from_completion_config( - function_config.prompt_template_config.completion + req_settings_type.from_prompt_execution_settings( + function_config.prompt_template_config.execution_settings ) ) @@ -698,10 +757,11 @@ def _create_semantic_function( if len(function_config.prompt_template_config.default_services) > 0 else None, ) + req_settings_type = service.__closure__[0].cell_contents.get_prompt_execution_settings_class() function.set_ai_configuration( - CompleteRequestSettings.from_completion_config( - function_config.prompt_template_config.completion + req_settings_type.from_prompt_execution_settings( + function_config.prompt_template_config.execution_settings ) ) @@ -719,60 +779,48 @@ def _create_semantic_function( return function - def import_native_skill_from_directory( - self, parent_directory: str, skill_directory_name: str - ) -> Dict[str, SKFunctionBase]: + def import_native_plugin_from_directory(self, parent_directory: str, plugin_directory_name: str) -> KernelPlugin: MODULE_NAME = "native_function" - validate_skill_name(skill_directory_name) + validate_plugin_name(plugin_directory_name) - skill_directory = os.path.abspath( - os.path.join(parent_directory, skill_directory_name) - ) - native_py_file_path = os.path.join(skill_directory, f"{MODULE_NAME}.py") + plugin_directory = os.path.abspath(os.path.join(parent_directory, plugin_directory_name)) + native_py_file_path = os.path.join(plugin_directory, f"{MODULE_NAME}.py") if not os.path.exists(native_py_file_path): - raise ValueError( - f"Native Skill Python File does not exist: {native_py_file_path}" - ) + raise ValueError(f"Native Plugin Python File does not exist: {native_py_file_path}") - skill_name = os.path.basename(skill_directory) + plugin_name = os.path.basename(plugin_directory) spec = importlib.util.spec_from_file_location(MODULE_NAME, native_py_file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) class_name = next( - ( - name - for name, cls in inspect.getmembers(module, inspect.isclass) - if cls.__module__ == MODULE_NAME - ), + (name for name, cls in inspect.getmembers(module, inspect.isclass) if cls.__module__ == MODULE_NAME), None, ) if class_name: - skill_obj = getattr(module, class_name)() - return self.import_skill(skill_obj, skill_name) + plugin_obj = getattr(module, class_name)() + return self.import_plugin(plugin_obj, plugin_name) return {} - def import_semantic_skill_from_directory( - self, parent_directory: str, skill_directory_name: str - ) -> Dict[str, SKFunctionBase]: + def import_semantic_plugin_from_directory(self, parent_directory: str, plugin_directory_name: str) -> KernelPlugin: CONFIG_FILE = "config.json" PROMPT_FILE = "skprompt.txt" - validate_skill_name(skill_directory_name) + validate_plugin_name(plugin_directory_name) - skill_directory = os.path.join(parent_directory, skill_directory_name) - skill_directory = os.path.abspath(skill_directory) + plugin_directory = os.path.join(parent_directory, plugin_directory_name) + plugin_directory = os.path.abspath(plugin_directory) - if not os.path.exists(skill_directory): - raise ValueError(f"Skill directory does not exist: {skill_directory_name}") + if not os.path.exists(plugin_directory): + raise ValueError(f"Plugin directory does not exist: {plugin_directory_name}") - skill = {} + functions = [] - directories = glob.glob(skill_directory + "/*/") + directories = glob.glob(plugin_directory + "/*/") for directory in directories: dir_name = os.path.dirname(directory) function_name = os.path.basename(dir_name) @@ -782,71 +830,58 @@ def import_semantic_skill_from_directory( if not os.path.exists(prompt_path): continue - config = PromptTemplateConfig() config_path = os.path.join(directory, CONFIG_FILE) with open(config_path, "r") as config_file: - config = config.from_json(config_file.read()) + config = PromptTemplateConfig.from_json(config_file.read()) # Load Prompt Template with open(prompt_path, "r") as prompt_file: - template = PromptTemplate( - prompt_file.read(), self.prompt_template_engine, config - ) + template = PromptTemplate(prompt_file.read(), self.prompt_template_engine, config) # Prepare lambda wrapping AI logic function_config = SemanticFunctionConfig(config, template) - skill[function_name] = self.register_semantic_function( - skill_directory_name, function_name, function_config - ) + functions += [self.register_semantic_function(plugin_directory_name, function_name, function_config)] + + plugin = KernelPlugin(name=plugin_directory_name, functions=functions) - return skill + return plugin def create_semantic_function( self, prompt_template: str, function_name: Optional[str] = None, - skill_name: Optional[str] = None, + plugin_name: Optional[str] = None, description: Optional[str] = None, - max_tokens: int = 256, - temperature: float = 0.0, - top_p: float = 1.0, - presence_penalty: float = 0.0, - frequency_penalty: float = 0.0, - number_of_responses: int = 1, - stop_sequences: Optional[List[str]] = None, - ) -> "SKFunctionBase": - function_name = ( - function_name - if function_name is not None - else f"f_{str(uuid4()).replace('-', '_')}" - ) + **kwargs: Any, + ) -> "KernelFunction": + function_name = function_name if function_name is not None else f"f_{generate_random_ascii_name()}" config = PromptTemplateConfig( - description=( - description - if description is not None - else "Generic function, unknown purpose" - ), + description=(description if description is not None else "Generic function, unknown purpose"), type="completion", - completion=PromptTemplateConfig.CompletionConfig( - temperature, - top_p, - presence_penalty, - frequency_penalty, - max_tokens, - number_of_responses, - stop_sequences if stop_sequences is not None else [], - ), + execution_settings=PromptExecutionSettings(extension_data=kwargs), ) validate_function_name(function_name) - if skill_name is not None: - validate_skill_name(skill_name) + if plugin_name is not None: + validate_plugin_name(plugin_name) template = PromptTemplate(prompt_template, self.prompt_template_engine, config) function_config = SemanticFunctionConfig(config, template) - return self.register_semantic_function( - skill_name, function_name, function_config - ) + return self.register_semantic_function(plugin_name, function_name, function_config) + + def add_function_invoking_handler(self, handler: Callable) -> None: + self.function_invoking_handlers[id(handler)] = handler + + def add_function_invoked_handler(self, handler: Callable) -> None: + self.function_invoked_handlers[id(handler)] = handler + + def remove_function_invoking_handler(self, handler: Callable) -> None: + if id(handler) in self.function_invoking_handlers: + del self.function_invoking_handlers[id(handler)] + + def remove_function_invoked_handler(self, handler: Callable) -> None: + if id(handler) in self.function_invoked_handlers: + del self.function_invoked_handlers[id(handler)] diff --git a/python/semantic_kernel/kernel_exception.py b/python/semantic_kernel/kernel_exception.py index 910fce76ac4f..fb8b27542afa 100644 --- a/python/semantic_kernel/kernel_exception.py +++ b/python/semantic_kernel/kernel_exception.py @@ -22,12 +22,14 @@ class ErrorCodes(Enum): InvalidServiceConfiguration = 5 # Service not found. ServiceNotFound = 6 - # Skill collection not set. - SkillCollectionNotSet = 7 + # Plugin collection not set. + PluginCollectionNotSet = 7 # Represents an error that occurs when invoking a function. FunctionInvokeError = 8 # Ambiguous implementation. AmbiguousImplementation = 9 + # Invalid plugin name + InvalidPluginName = 10 # The error code. _error_code: ErrorCodes diff --git a/python/semantic_kernel/kernel_pydantic.py b/python/semantic_kernel/kernel_pydantic.py new file mode 100644 index 000000000000..f718e748f5bf --- /dev/null +++ b/python/semantic_kernel/kernel_pydantic.py @@ -0,0 +1,22 @@ +import sys + +if sys.version_info >= (3, 9): + from typing import Annotated +else: + from typing_extensions import Annotated + +from pydantic import BaseModel, ConfigDict, UrlConstraints +from pydantic.networks import Url + +HttpsUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=["https"])] + + +class KernelBaseModel(BaseModel): + """Base class for all pydantic models in the SK.""" + + model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True, validate_assignment=True) + + +# TODO: remove these aliases in SK v1 +PydanticField = KernelBaseModel +KernelGenericModel = KernelBaseModel diff --git a/python/semantic_kernel/memory/memory_record.py b/python/semantic_kernel/memory/memory_record.py index 7d674006c5fb..43a532345e04 100644 --- a/python/semantic_kernel/memory/memory_record.py +++ b/python/semantic_kernel/memory/memory_record.py @@ -53,10 +53,6 @@ def __init__( self._additional_metadata = additional_metadata self._embedding = embedding - @property - def embedding(self) -> ndarray: - return self._embedding - @staticmethod def reference_record( external_id: str, @@ -119,3 +115,27 @@ def local_record( timestamp=timestamp, embedding=embedding, ) + + @property + def id(self): + return self._id + + @property + def embedding(self) -> ndarray: + return self._embedding + + @property + def text(self): + return self._text + + @property + def additional_metadata(self): + return self._additional_metadata + + @property + def description(self): + return self._description + + @property + def timestamp(self): + return self._timestamp diff --git a/python/semantic_kernel/memory/memory_store_base.py b/python/semantic_kernel/memory/memory_store_base.py index 0b11b34b9358..aba2760c42e4 100644 --- a/python/semantic_kernel/memory/memory_store_base.py +++ b/python/semantic_kernel/memory/memory_store_base.py @@ -13,14 +13,14 @@ async def __aenter__(self): return self async def __aexit__(self, *args): - await self.close_async() + await self.close() - async def close_async(self): + async def close(self): """Async close connection, invoked by MemoryStoreBase.__aexit__()""" pass @abstractmethod - async def create_collection_async(self, collection_name: str) -> None: + async def create_collection(self, collection_name: str) -> None: """Creates a new collection in the data store. Arguments: @@ -32,7 +32,7 @@ async def create_collection_async(self, collection_name: str) -> None: pass @abstractmethod - async def get_collections_async( + async def get_collections( self, ) -> List[str]: """Gets all collection names in the data store. @@ -43,7 +43,7 @@ async def get_collections_async( pass @abstractmethod - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection from the data store. Arguments: @@ -55,7 +55,7 @@ async def delete_collection_async(self, collection_name: str) -> None: pass @abstractmethod - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Determines if a collection exists in the data store. Arguments: @@ -68,7 +68,7 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: pass @abstractmethod - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upserts a memory record into the data store. Does not guarantee that the collection exists. If the record already exists, it will be updated. If the record does not exist, it will be created. @@ -83,9 +83,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: pass @abstractmethod - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """Upserts a group of memory records into the data store. Does not guarantee that the collection exists. If the record already exists, it will be updated. If the record does not exist, it will be created. @@ -100,9 +98,7 @@ async def upsert_batch_async( pass @abstractmethod - async def get_async( - self, collection_name: str, key: str, with_embedding: bool - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: """Gets a memory record from the data store. Does not guarantee that the collection exists. Arguments: @@ -116,9 +112,7 @@ async def get_async( pass @abstractmethod - async def get_batch_async( - self, collection_name: str, keys: List[str], with_embeddings: bool - ) -> List[MemoryRecord]: + async def get_batch(self, collection_name: str, keys: List[str], with_embeddings: bool) -> List[MemoryRecord]: """Gets a batch of memory records from the data store. Does not guarantee that the collection exists. Arguments: @@ -132,7 +126,7 @@ async def get_batch_async( pass @abstractmethod - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Removes a memory record from the data store. Does not guarantee that the collection exists. Arguments: @@ -145,7 +139,7 @@ async def remove_async(self, collection_name: str, key: str) -> None: pass @abstractmethod - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Removes a batch of memory records from the data store. Does not guarantee that the collection exists. Arguments: @@ -158,7 +152,7 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non pass @abstractmethod - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -182,7 +176,7 @@ async def get_nearest_matches_async( pass @abstractmethod - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, diff --git a/python/semantic_kernel/memory/null_memory.py b/python/semantic_kernel/memory/null_memory.py index 94c2ef0232cf..1c639156206d 100644 --- a/python/semantic_kernel/memory/null_memory.py +++ b/python/semantic_kernel/memory/null_memory.py @@ -7,7 +7,7 @@ class NullMemory(SemanticTextMemoryBase): - async def save_information_async( + async def save_information( self, collection: str, text: str, @@ -15,10 +15,10 @@ async def save_information_async( description: Optional[str] = None, additional_metadata: Optional[str] = None, ) -> None: - """Nullifies behavior of SemanticTextMemoryBase.save_information_async()""" + """Nullifies behavior of SemanticTextMemoryBase.save_information()""" return None - async def save_reference_async( + async def save_reference( self, collection: str, text: str, @@ -27,27 +27,25 @@ async def save_reference_async( description: Optional[str] = None, additional_metadata: Optional[str] = None, ) -> None: - """Nullifies behavior of SemanticTextMemoryBase.save_reference_async()""" + """Nullifies behavior of SemanticTextMemoryBase.save_reference()""" return None - async def get_async( - self, collection: str, query: str - ) -> Optional[MemoryQueryResult]: - """Nullifies behavior of SemanticTextMemoryBase.get_async()""" + async def get(self, collection: str, query: str) -> Optional[MemoryQueryResult]: + """Nullifies behavior of SemanticTextMemoryBase.get()""" return None - async def search_async( + async def search( self, collection: str, query: str, limit: int = 1, min_relevance_score: float = 0.7, ) -> List[MemoryQueryResult]: - """Nullifies behavior of SemanticTextMemoryBase.search_async()""" + """Nullifies behavior of SemanticTextMemoryBase.search()""" return [] - async def get_collections_async(self) -> List[str]: - """Nullifies behavior of SemanticTextMemoryBase.get_collections_async()""" + async def get_collections(self) -> List[str]: + """Nullifies behavior of SemanticTextMemoryBase.get_collections()""" return [] diff --git a/python/semantic_kernel/memory/semantic_text_memory.py b/python/semantic_kernel/memory/semantic_text_memory.py index fce46f34e01f..2ad3b025eff4 100644 --- a/python/semantic_kernel/memory/semantic_text_memory.py +++ b/python/semantic_kernel/memory/semantic_text_memory.py @@ -2,6 +2,8 @@ from typing import List, Optional +from pydantic import PrivateAttr + from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( EmbeddingGeneratorBase, ) @@ -12,12 +14,10 @@ class SemanticTextMemory(SemanticTextMemoryBase): - _storage: MemoryStoreBase - _embeddings_generator: EmbeddingGeneratorBase + _storage: MemoryStoreBase = PrivateAttr() + _embeddings_generator: EmbeddingGeneratorBase = PrivateAttr() - def __init__( - self, storage: MemoryStoreBase, embeddings_generator: EmbeddingGeneratorBase - ) -> None: + def __init__(self, storage: MemoryStoreBase, embeddings_generator: EmbeddingGeneratorBase) -> None: """Initialize a new instance of SemanticTextMemory. Arguments: @@ -28,10 +28,11 @@ def __init__( Returns: None -- None. """ + super().__init__() self._storage = storage self._embeddings_generator = embeddings_generator - async def save_information_async( + async def save_information( self, collection: str, text: str, @@ -51,14 +52,10 @@ async def save_information_async( None -- None. """ # TODO: not the best place to create collection, but will address this behavior together with .NET SK - if not await self._storage.does_collection_exist_async( - collection_name=collection - ): - await self._storage.create_collection_async(collection_name=collection) - - embedding = ( - await self._embeddings_generator.generate_embeddings_async([text]) - )[0] + if not await self._storage.does_collection_exist(collection_name=collection): + await self._storage.create_collection(collection_name=collection) + + embedding = (await self._embeddings_generator.generate_embeddings([text]))[0] data = MemoryRecord.local_record( id=id, text=text, @@ -67,9 +64,9 @@ async def save_information_async( embedding=embedding, ) - await self._storage.upsert_async(collection_name=collection, record=data) + await self._storage.upsert(collection_name=collection, record=data) - async def save_reference_async( + async def save_reference( self, collection: str, text: str, @@ -91,14 +88,10 @@ async def save_reference_async( None -- None. """ # TODO: not the best place to create collection, but will address this behavior together with .NET SK - if not await self._storage.does_collection_exist_async( - collection_name=collection - ): - await self._storage.create_collection_async(collection_name=collection) - - embedding = ( - await self._embeddings_generator.generate_embeddings_async([text]) - )[0] + if not await self._storage.does_collection_exist(collection_name=collection): + await self._storage.create_collection(collection_name=collection) + + embedding = (await self._embeddings_generator.generate_embeddings([text]))[0] data = MemoryRecord.reference_record( external_id=external_id, source_name=external_source_name, @@ -107,9 +100,9 @@ async def save_reference_async( embedding=embedding, ) - await self._storage.upsert_async(collection_name=collection, record=data) + await self._storage.upsert(collection_name=collection, record=data) - async def get_async( + async def get( self, collection: str, key: str, @@ -123,10 +116,10 @@ async def get_async( Returns: Optional[MemoryQueryResult] -- The MemoryQueryResult if found, None otherwise. """ - record = await self._storage.get_async(collection_name=collection, key=key) + record = await self._storage.get(collection_name=collection, key=key) return MemoryQueryResult.from_memory_record(record, 1.0) if record else None - async def search_async( + async def search( self, collection: str, query: str, @@ -146,10 +139,8 @@ async def search_async( Returns: List[MemoryQueryResult] -- The list of MemoryQueryResult found. """ - query_embedding = ( - await self._embeddings_generator.generate_embeddings_async([query]) - )[0] - results = await self._storage.get_nearest_matches_async( + query_embedding = (await self._embeddings_generator.generate_embeddings([query]))[0] + results = await self._storage.get_nearest_matches( collection_name=collection, embedding=query_embedding, limit=limit, @@ -159,10 +150,10 @@ async def search_async( return [MemoryQueryResult.from_memory_record(r[0], r[1]) for r in results] - async def get_collections_async(self) -> List[str]: + async def get_collections(self) -> List[str]: """Get the list of collections in the memory (calls the memory store's get_collections method). Returns: List[str] -- The list of all the memory collection names. """ - return await self._storage.get_collections_async() + return await self._storage.get_collections() diff --git a/python/semantic_kernel/memory/semantic_text_memory_base.py b/python/semantic_kernel/memory/semantic_text_memory_base.py index 2027b6813126..2b22cbe2ddef 100644 --- a/python/semantic_kernel/memory/semantic_text_memory_base.py +++ b/python/semantic_kernel/memory/semantic_text_memory_base.py @@ -3,15 +3,15 @@ from abc import abstractmethod from typing import List, Optional, TypeVar +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.memory.memory_query_result import MemoryQueryResult -from semantic_kernel.sk_pydantic import PydanticField SemanticTextMemoryT = TypeVar("SemanticTextMemoryT", bound="SemanticTextMemoryBase") -class SemanticTextMemoryBase(PydanticField): +class SemanticTextMemoryBase(KernelBaseModel): @abstractmethod - async def save_information_async( + async def save_information( self, collection: str, text: str, @@ -34,7 +34,7 @@ async def save_information_async( pass @abstractmethod - async def save_reference_async( + async def save_reference( self, collection: str, text: str, @@ -58,7 +58,7 @@ async def save_reference_async( pass @abstractmethod - async def get_async( + async def get( self, collection: str, query: str, @@ -76,7 +76,7 @@ async def get_async( pass @abstractmethod - async def search_async( + async def search( self, collection: str, query: str, @@ -99,7 +99,7 @@ async def search_async( pass @abstractmethod - async def get_collections_async(self) -> List[str]: + async def get_collections(self) -> List[str]: """Get the list of collections in the memory (calls the memory store's get_collections method). Returns: diff --git a/python/semantic_kernel/memory/volatile_memory_store.py b/python/semantic_kernel/memory/volatile_memory_store.py index 5f8f90477e7e..93fff1e6843f 100644 --- a/python/semantic_kernel/memory/volatile_memory_store.py +++ b/python/semantic_kernel/memory/volatile_memory_store.py @@ -1,30 +1,27 @@ # Copyright (c) Microsoft. All rights reserved. +import logging from copy import deepcopy -from logging import Logger -from typing import Dict, List, Optional, Tuple +from typing import Dict, List, Tuple from numpy import array, linalg, ndarray from semantic_kernel.memory.memory_record import MemoryRecord from semantic_kernel.memory.memory_store_base import MemoryStoreBase -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) class VolatileMemoryStore(MemoryStoreBase): _store: Dict[str, Dict[str, MemoryRecord]] - _logger: Logger - def __init__(self, logger: Optional[Logger] = None) -> None: + def __init__(self, **kwargs) -> None: + """Initializes a new instance of the VolatileMemoryStore class.""" + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") self._store = {} - self._logger = logger or NullLogger() - """Initializes a new instance of the VolatileMemoryStore class. - - Arguments: - logger {Optional[Logger]} -- The logger to use. (default: {None}) - """ - async def create_collection_async(self, collection_name: str) -> None: + async def create_collection(self, collection_name: str) -> None: """Creates a new collection if it does not exist. Arguments: @@ -38,7 +35,7 @@ async def create_collection_async(self, collection_name: str) -> None: else: self._store[collection_name] = {} - async def get_collections_async( + async def get_collections( self, ) -> List[str]: """Gets the list of collections. @@ -48,7 +45,7 @@ async def get_collections_async( """ return list(self._store.keys()) - async def delete_collection_async(self, collection_name: str) -> None: + async def delete_collection(self, collection_name: str) -> None: """Deletes a collection. Arguments: @@ -60,7 +57,7 @@ async def delete_collection_async(self, collection_name: str) -> None: if collection_name in self._store: del self._store[collection_name] - async def does_collection_exist_async(self, collection_name: str) -> bool: + async def does_collection_exist(self, collection_name: str) -> bool: """Checks if a collection exists. Arguments: @@ -71,7 +68,7 @@ async def does_collection_exist_async(self, collection_name: str) -> bool: """ return collection_name in self._store - async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: + async def upsert(self, collection_name: str, record: MemoryRecord) -> str: """Upserts a record. Arguments: @@ -88,9 +85,7 @@ async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str: self._store[collection_name][record._key] = record return record._key - async def upsert_batch_async( - self, collection_name: str, records: List[MemoryRecord] - ) -> List[str]: + async def upsert_batch(self, collection_name: str, records: List[MemoryRecord]) -> List[str]: """Upserts a batch of records. Arguments: @@ -108,9 +103,7 @@ async def upsert_batch_async( self._store[collection_name][record._key] = record return [record._key for record in records] - async def get_async( - self, collection_name: str, key: str, with_embedding: bool = False - ) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool = False) -> MemoryRecord: """Gets a record. Arguments: @@ -135,7 +128,7 @@ async def get_async( result._embedding = None return result - async def get_batch_async( + async def get_batch( self, collection_name: str, keys: List[str], with_embeddings: bool = False ) -> List[MemoryRecord]: """Gets a batch of records. @@ -151,11 +144,7 @@ async def get_batch_async( if collection_name not in self._store: raise Exception(f"Collection '{collection_name}' does not exist") - results = [ - self._store[collection_name][key] - for key in keys - if key in self._store[collection_name] - ] + results = [self._store[collection_name][key] for key in keys if key in self._store[collection_name]] if not with_embeddings: # create copy of results without embeddings @@ -164,7 +153,7 @@ async def get_batch_async( result._embedding = None return results - async def remove_async(self, collection_name: str, key: str) -> None: + async def remove(self, collection_name: str, key: str) -> None: """Removes a record. Arguments: @@ -182,7 +171,7 @@ async def remove_async(self, collection_name: str, key: str) -> None: del self._store[collection_name][key] - async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None: + async def remove_batch(self, collection_name: str, keys: List[str]) -> None: """Removes a batch of records. Arguments: @@ -199,7 +188,7 @@ async def remove_batch_async(self, collection_name: str, keys: List[str]) -> Non if key in self._store[collection_name]: del self._store[collection_name][key] - async def get_nearest_match_async( + async def get_nearest_match( self, collection_name: str, embedding: ndarray, @@ -217,7 +206,7 @@ async def get_nearest_match_async( Returns: Tuple[MemoryRecord, float] -- The record and the relevance score. """ - return self.get_nearest_matches_async( + return self.get_nearest_matches( collection_name=collection_name, embedding=embedding, limit=1, @@ -225,7 +214,7 @@ async def get_nearest_match_async( with_embeddings=with_embedding, ) - async def get_nearest_matches_async( + async def get_nearest_matches( self, collection_name: str, embedding: ndarray, @@ -286,9 +275,7 @@ async def get_nearest_matches_async( result[0]._embedding = None return top_results - def compute_similarity_scores( - self, embedding: ndarray, embedding_array: ndarray - ) -> ndarray: + def compute_similarity_scores(self, embedding: ndarray, embedding_array: ndarray) -> ndarray: """Computes the cosine similarity scores between a query embedding and a group of embeddings. Arguments: @@ -309,11 +296,11 @@ def compute_similarity_scores( similarity_scores = array([-1.0] * embedding_array.shape[0]) if valid_indices.any(): - similarity_scores[valid_indices] = embedding.dot( - embedding_array[valid_indices].T - ) / (query_norm * collection_norm[valid_indices]) + similarity_scores[valid_indices] = embedding.dot(embedding_array[valid_indices].T) / ( + query_norm * collection_norm[valid_indices] + ) if not valid_indices.all(): - self._logger.warning( + logger.warning( "Some vectors in the embedding collection are zero vectors." "Ignoring cosine similarity score computation for those vectors." ) diff --git a/python/semantic_kernel/models/chat/chat_message.py b/python/semantic_kernel/models/chat/chat_message.py index 1f132e730f6e..196fed793081 100644 --- a/python/semantic_kernel/models/chat/chat_message.py +++ b/python/semantic_kernel/models/chat/chat_message.py @@ -1,40 +1,39 @@ -"""Class to hold chat messages.""" +# Copyright (c) Microsoft. All rights reserved. from typing import TYPE_CHECKING, Dict, Optional from pydantic import Field +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.semantic_functions.prompt_template import PromptTemplate -from semantic_kernel.sk_pydantic import SKBaseModel if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext -class ChatMessage(SKBaseModel): +class ChatMessage(KernelBaseModel): """Class to hold chat messages.""" role: Optional[str] = "assistant" - fixed_content: Optional[str] = Field(default=None, init=False, alias="content") - content_template: Optional[PromptTemplate] = Field( - default=None, init=True, repr=False - ) + fixed_content: Optional[str] = Field(default=None, init_var=False, serialization_alias="content") + content_template: Optional[PromptTemplate] = Field(default=None, init_var=True, repr=False) @property def content(self) -> Optional[str]: """Return the content of the message.""" return self.fixed_content - async def render_message_async(self, context: "SKContext") -> None: + async def render_message(self, context: "KernelContext") -> None: """Render the message. The first time this is called for a given message, it will render the message with the context at that time. Subsequent calls will do nothing. """ if self.fixed_content is None: - self.fixed_content = await self.content_template.render_async(context) + if self.content_template is not None: + self.fixed_content = await self.content_template.render(context) def as_dict(self) -> Dict[str, str]: """Return the message as a dict. - Make sure to call render_message_async first to embed the context in the content. + Make sure to call render_message first to embed the context in the content. """ - return self.dict(exclude_none=True, by_alias=True, exclude={"content_template"}) + return self.model_dump(exclude_none=True, by_alias=True, exclude={"content_template"}) diff --git a/python/semantic_kernel/models/chat/chat_role.py b/python/semantic_kernel/models/chat/chat_role.py new file mode 100644 index 000000000000..c687f6eac2df --- /dev/null +++ b/python/semantic_kernel/models/chat/chat_role.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. +from enum import Enum + + +class ChatRole(str, Enum): + """Chat role enum""" + + SYSTEM = "system" + USER = "user" + ASSISTANT = "assistant" + TOOL = "tool" diff --git a/python/semantic_kernel/models/chat/finish_reason.py b/python/semantic_kernel/models/chat/finish_reason.py new file mode 100644 index 000000000000..bc1292d7e079 --- /dev/null +++ b/python/semantic_kernel/models/chat/finish_reason.py @@ -0,0 +1,12 @@ +# Copyright (c) Microsoft. All rights reserved. +from enum import Enum + + +class FinishReason(str, Enum): + """Finish Reason enum""" + + STOP = "stop" + LENGTH = "length" + CONTENT_FILTER = "content_filter" + TOOL_CALLS = "tool_calls" + FUNCTION_CALL = "function_call" diff --git a/python/semantic_kernel/models/contents/__init__.py b/python/semantic_kernel/models/contents/__init__.py new file mode 100644 index 000000000000..8413dd826389 --- /dev/null +++ b/python/semantic_kernel/models/contents/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.models.contents.chat_message_content import ChatMessageContent +from semantic_kernel.models.contents.kernel_content import KernelContent +from semantic_kernel.models.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.models.contents.streaming_kernel_content import StreamingKernelContent +from semantic_kernel.models.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.models.contents.text_content import TextContent + +__all__ = [ + "ChatMessageContent", + "KernelContent", + "TextContent", + "StreamingKernelContent", + "StreamingChatMessageContent", + "StreamingTextContent", +] diff --git a/python/semantic_kernel/models/contents/chat_message_content.py b/python/semantic_kernel/models/contents/chat_message_content.py new file mode 100644 index 000000000000..392f226bbb61 --- /dev/null +++ b/python/semantic_kernel/models/contents/chat_message_content.py @@ -0,0 +1,33 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Optional + +from semantic_kernel.models.chat.chat_role import ChatRole +from semantic_kernel.models.contents.kernel_content import KernelContent + + +class ChatMessageContent(KernelContent): + """This is the base class for chat message response content. + + All Chat Completion Services should return a instance of this class as response. + Or they can implement their own subclass of this class and return an instance. + + Args: + inner_content: Optional[Any] - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + role: ChatRole - The role of the chat message. + content: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + + Methods: + __str__: Returns the content of the response. + """ + + role: ChatRole + content: Optional[str] = None + encoding: Optional[str] = None + + def __str__(self) -> str: + return self.content diff --git a/python/semantic_kernel/models/contents/kernel_content.py b/python/semantic_kernel/models/contents/kernel_content.py new file mode 100644 index 000000000000..18efc2d5818c --- /dev/null +++ b/python/semantic_kernel/models/contents/kernel_content.py @@ -0,0 +1,19 @@ +# Copyright (c) Microsoft. All rights reserved. +from abc import ABC, abstractmethod +from typing import Any, Dict, Optional + +from pydantic import Field + +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class KernelContent(KernelBaseModel, ABC): + """Base class for all kernel contents.""" + + inner_content: Optional[Any] = None + ai_model_id: Optional[str] = None + metadata: Dict[str, Any] = Field(default_factory=dict) + + @abstractmethod + def __str__(self) -> str: + pass diff --git a/python/semantic_kernel/models/contents/streaming_chat_message_content.py b/python/semantic_kernel/models/contents/streaming_chat_message_content.py new file mode 100644 index 000000000000..1b68534c4c08 --- /dev/null +++ b/python/semantic_kernel/models/contents/streaming_chat_message_content.py @@ -0,0 +1,68 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Optional + +from semantic_kernel.models.chat.chat_role import ChatRole +from semantic_kernel.models.chat.finish_reason import FinishReason +from semantic_kernel.models.contents.streaming_kernel_content import StreamingKernelContent + + +class StreamingChatMessageContent(StreamingKernelContent): + """This is the base class for streaming chat message response content. + + All Chat Completion Services should return a instance of this class as streaming response, + where each part of the response as it is streamed is converted to a instance of this class, + the end-user will have to either do something directly or gather them and combine them into a + new instance. A service can implement their own subclass of this class and return instances of that. + + Args: + choice_index: int - The index of the choice that generated this response. + inner_content: Optional[Any] - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + role: Optional[ChatRole] - The role of the chat message, defaults to ASSISTANT. + content: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + + Methods: + __str__: Returns the content of the response. + __bytes__: Returns the content of the response encoded in the encoding. + __add__: Combines two StreamingChatMessageContent instances. + """ + + role: Optional[ChatRole] = ChatRole.ASSISTANT + content: Optional[str] = None + encoding: Optional[str] = None + finish_reason: Optional[FinishReason] = None + + def __str__(self) -> str: + return self.content or "" + + def __bytes__(self) -> bytes: + return self.content.encode(self.encoding if self.encoding else "utf-8") if self.content else b"" + + def __add__(self, other: "StreamingChatMessageContent") -> "StreamingChatMessageContent": + """When combining two StreamingChatMessageContent instances, the content fields are combined. + + The inner_content of the first one is used, ai_model_id and encoding should be the same, + if role is set, they should be the same. + """ + if self.choice_index != other.choice_index: + raise ValueError("Cannot add StreamingChatMessageContent with different choice_index") + if self.ai_model_id != other.ai_model_id: + raise ValueError("Cannot add StreamingChatMessageContent from different ai_model_id") + if self.encoding != other.encoding: + raise ValueError("Cannot add StreamingChatMessageContent with different encoding") + if self.role and other.role and self.role != other.role: + raise ValueError("Cannot add StreamingChatMessageContent with different role") + return StreamingChatMessageContent( + choice_index=self.choice_index, + inner_content=self.inner_content, + ai_model_id=self.ai_model_id, + metadata=self.metadata, + role=self.role, + content=(self.content or "") + (other.content or ""), + encoding=self.encoding, + finish_reason=self.finish_reason or other.finish_reason, + ) diff --git a/python/semantic_kernel/models/contents/streaming_kernel_content.py b/python/semantic_kernel/models/contents/streaming_kernel_content.py new file mode 100644 index 000000000000..a1ad73790fe2 --- /dev/null +++ b/python/semantic_kernel/models/contents/streaming_kernel_content.py @@ -0,0 +1,28 @@ +# Copyright (c) Microsoft. All rights reserved. +from abc import ABC, abstractmethod +from typing import Any, Dict, Optional + +from pydantic import Field + +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class StreamingKernelContent(KernelBaseModel, ABC): + """Base class for all streaming kernel contents.""" + + choice_index: int + inner_content: Optional[Any] = None + ai_model_id: Optional[str] = None + metadata: Dict[str, Any] = Field(default_factory=dict) + + @abstractmethod + def __str__(self) -> str: + pass + + @abstractmethod + def __bytes__(self) -> bytes: + pass + + @abstractmethod + def __add__(self, other: "StreamingKernelContent") -> "StreamingKernelContent": + pass diff --git a/python/semantic_kernel/models/contents/streaming_text_content.py b/python/semantic_kernel/models/contents/streaming_text_content.py new file mode 100644 index 000000000000..0b9bfb29f933 --- /dev/null +++ b/python/semantic_kernel/models/contents/streaming_text_content.py @@ -0,0 +1,56 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Optional + +from semantic_kernel.models.contents.streaming_kernel_content import StreamingKernelContent + + +class StreamingTextContent(StreamingKernelContent): + """This is the base class for streaming text response content. + + All Text Completion Services should return a instance of this class as streaming response. + Or they can implement their own subclass of this class and return an instance. + + Args: + choice_index: int - The index of the choice that generated this response. + inner_content: Optional[Any] - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + text: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + + Methods: + __str__: Returns the text of the response. + __bytes__: Returns the content of the response encoded in the encoding. + __add__: Combines two StreamingTextContent instances. + """ + + text: Optional[str] = None + encoding: Optional[str] = None + + def __str__(self) -> str: + return self.text + + def __bytes__(self) -> bytes: + return self.text.encode(self.encoding if self.encoding else "utf-8") if self.text else b"" + + def __add__(self, other: "StreamingTextContent") -> "StreamingTextContent": + """When combining two StreamingTextContent instances, the text fields are combined. + + The inner_content of the first one is used, choice_index, ai_model_id and encoding should be the same. + """ + if self.choice_index != other.choice_index: + raise ValueError("Cannot add StreamingTextContent with different choice_index") + if self.ai_model_id != other.ai_model_id: + raise ValueError("Cannot add StreamingTextContent from different ai_model_id") + if self.encoding != other.encoding: + raise ValueError("Cannot add StreamingTextContent with different encoding") + return StreamingTextContent( + choice_index=self.choice_index, + inner_content=self.inner_content, + ai_model_id=self.ai_model_id, + metadata=self.metadata, + text=(self.text or "") + (other.text or ""), + encoding=self.encoding, + ) diff --git a/python/semantic_kernel/models/contents/text_content.py b/python/semantic_kernel/models/contents/text_content.py new file mode 100644 index 000000000000..03f555e34874 --- /dev/null +++ b/python/semantic_kernel/models/contents/text_content.py @@ -0,0 +1,30 @@ +# Copyright (c) Microsoft. All rights reserved. +from typing import Optional + +from semantic_kernel.models.contents.kernel_content import KernelContent + + +class TextContent(KernelContent): + """This is the base class for text response content. + + All Text Completion Services should return a instance of this class as response. + Or they can implement their own subclass of this class and return an instance. + + Args: + inner_content: Optional[Any] - The inner content of the response, + this should hold all the information from the response so even + when not creating a subclass a developer can leverage the full thing. + ai_model_id: Optional[str] - The id of the AI model that generated this response. + metadata: Dict[str, Any] - Any metadata that should be attached to the response. + text: Optional[str] - The text of the response. + encoding: Optional[str] - The encoding of the text. + + Methods: + __str__: Returns the text of the response. + """ + + text: Optional[str] = None + encoding: Optional[str] = None + + def __str__(self) -> str: + return self.text diff --git a/python/semantic_kernel/orchestration/__init__.py b/python/semantic_kernel/orchestration/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/python/semantic_kernel/orchestration/context_variables.py b/python/semantic_kernel/orchestration/context_variables.py index 8f7767b5776f..da2690a59cca 100644 --- a/python/semantic_kernel/orchestration/context_variables.py +++ b/python/semantic_kernel/orchestration/context_variables.py @@ -3,10 +3,10 @@ import pydantic as pdt -from semantic_kernel.sk_pydantic import SKBaseModel +from semantic_kernel.kernel_pydantic import KernelBaseModel -class ContextVariables(SKBaseModel): +class ContextVariables(KernelBaseModel): """Class for the context variables, maintains a dict with keys and values for the variables. The keys are all converted to lower case, both in setting and in getting. @@ -25,9 +25,7 @@ class ContextVariables(SKBaseModel): variables: Dict[str, str] = pdt.Field(default_factory=dict) _main_key: str = pdt.PrivateAttr(default="input") - def __init__( - self, content: Optional[str] = None, variables: Optional[Dict[str, str]] = {} - ) -> None: + def __init__(self, content: Optional[str] = None, variables: Optional[Dict[str, str]] = {}) -> None: """ Initialize the ContextVariables instance with an optional content string. @@ -52,9 +50,7 @@ def update(self, content: str) -> "ContextVariables": self.variables[self._main_key] = content return self - def merge_or_overwrite( - self, new_vars: "ContextVariables", overwrite: bool = False - ) -> "ContextVariables": + def merge_or_overwrite(self, new_vars: "ContextVariables", overwrite: bool = False) -> "ContextVariables": """Merge or overwrite the current variables with the new variables. Arguments: diff --git a/python/semantic_kernel/orchestration/delegate_handlers.py b/python/semantic_kernel/orchestration/delegate_handlers.py index b745ee5eb874..de9910086108 100644 --- a/python/semantic_kernel/orchestration/delegate_handlers.py +++ b/python/semantic_kernel/orchestration/delegate_handlers.py @@ -1,9 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. - from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.orchestration.delegate_types import DelegateTypes -from semantic_kernel.sk_pydantic import PydanticField def _handles(delegate_type): @@ -14,7 +13,7 @@ def decorator(function): return decorator -class DelegateHandlers(PydanticField): +class DelegateHandlers(KernelBaseModel): @staticmethod @_handles(DelegateTypes.Void) async def handle_void(function, context): @@ -34,28 +33,26 @@ async def handle_out_task_string(function, context): return context @staticmethod - @_handles(DelegateTypes.InSKContext) - async def handle_in_sk_context(function, context): + @_handles(DelegateTypes.InKernelContext) + async def handle_in_kernel_context(function, context): function(context) return context @staticmethod - @_handles(DelegateTypes.InSKContextOutString) - async def handle_in_sk_context_out_string(function, context): + @_handles(DelegateTypes.InKernelContextOutString) + async def handle_in_kernel_context_out_string(function, context): context.variables.update(function(context)) return context @staticmethod - @_handles(DelegateTypes.InSKContextOutTaskString) - async def handle_in_sk_context_out_task_string(function, context): + @_handles(DelegateTypes.InKernelContextOutTaskString) + async def handle_in_kernel_context_out_task_string(function, context): context.variables.update(await function(context)) return context @staticmethod - @_handles(DelegateTypes.ContextSwitchInSKContextOutTaskSKContext) - async def handle_context_switch_in_sk_context_out_task_sk_context( - function, context - ): + @_handles(DelegateTypes.ContextSwitchInKernelContextOutTaskKernelContext) + async def handle_context_switch_in_kernel_context_out_task_kernel_context(function, context): # Note: Context Switching: allows the function to replace with a # new context, e.g. to branch execution path context = await function(context) @@ -99,9 +96,7 @@ async def handle_in_string_and_context_out_task_string(function, context): @staticmethod @_handles(DelegateTypes.ContextSwitchInStringAndContextOutTaskContext) - async def handle_context_switch_in_string_and_context_out_task_context( - function, context - ): + async def handle_context_switch_in_string_and_context_out_task_context(function, context): # Note: Context Switching: allows the function to replace with a # new context, e.g. to branch execution path context = await function(context.variables.input, context) @@ -131,6 +126,30 @@ async def handle_out_task(function, context): await function() return context + @staticmethod + @_handles(DelegateTypes.OutAsyncGenerator) + async def handle_out_async_generator(function, context): + async for partial in function(): + yield partial + + @staticmethod + @_handles(DelegateTypes.InStringOutAsyncGenerator) + async def handle_in_string_out_async_generator(function, context): + async for partial in function(context.variables.input): + yield partial + + @staticmethod + @_handles(DelegateTypes.InContextOutAsyncGenerator) + async def handle_in_context_out_async_generator(function, context): + async for partial in function(context): + yield partial + + @staticmethod + @_handles(DelegateTypes.InStringAndContextOutAsyncGenerator) + async def handle_in_string_and_context_out_async_generator(function, context): + async for partial in function(context.variables.input, context): + yield partial + @staticmethod @_handles(DelegateTypes.Unknown) async def handle_unknown(function, context): diff --git a/python/semantic_kernel/orchestration/delegate_inference.py b/python/semantic_kernel/orchestration/delegate_inference.py index da59c50df21f..49ce6c05ff1d 100644 --- a/python/semantic_kernel/orchestration/delegate_inference.py +++ b/python/semantic_kernel/orchestration/delegate_inference.py @@ -1,11 +1,11 @@ # Copyright (c) Microsoft. All rights reserved. -from inspect import Signature, iscoroutinefunction, signature +from inspect import Signature, isasyncgenfunction, iscoroutinefunction, signature from typing import NoReturn from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.orchestration.delegate_types import DelegateTypes -from semantic_kernel.sk_pydantic import PydanticField def _infers(delegate_type): @@ -19,7 +19,7 @@ def decorator(function): def _is_annotation_of_type(annotation, type_to_match) -> bool: return (annotation is type_to_match) or ( # Handle cases where the annotation is provided as a string to avoid circular imports - # for example: `async def read_async(self, context: "SKContext"):` in file_io_skill.py + # for example: `async def read(self, context: "KernelContext"):` in file_io_plugin.py isinstance(annotation, str) and annotation == type_to_match.__name__ ) @@ -34,9 +34,9 @@ def _return_is_str(signature: Signature) -> bool: def _return_is_context(signature: Signature) -> bool: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext - return _is_annotation_of_type(signature.return_annotation, SKContext) + return _is_annotation_of_type(signature.return_annotation, KernelContext) def _return_is_none(signature: Signature) -> bool: @@ -47,9 +47,7 @@ def _no_return(signature: Signature) -> bool: return signature.return_annotation is Signature.empty -def _has_first_param_with_type( - signature: Signature, annotation, only: bool = True -) -> bool: +def _has_first_param_with_type(signature: Signature, annotation, only: bool = True) -> bool: if len(signature.parameters) < 1: return False if only and len(signature.parameters) != 1: @@ -60,12 +58,12 @@ def _has_first_param_with_type( def _has_two_params_second_is_context(signature: Signature) -> bool: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext if len(signature.parameters) < 2: return False second_param = list(signature.parameters.values())[1] - return _is_annotation_of_type(second_param.annotation, SKContext) + return _is_annotation_of_type(second_param.annotation, KernelContext) def _first_param_is_str(signature: Signature, only: bool = True) -> bool: @@ -73,66 +71,64 @@ def _first_param_is_str(signature: Signature, only: bool = True) -> bool: def _first_param_is_context(signature: Signature) -> bool: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext - return _has_first_param_with_type(signature, SKContext) + return _has_first_param_with_type(signature, KernelContext) -class DelegateInference(PydanticField): +class DelegateInference(KernelBaseModel): @staticmethod @_infers(DelegateTypes.Void) - def infer_void(signature: Signature, awaitable: bool) -> bool: + def infer_void(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _has_no_params(signature) matches = matches and _return_is_none(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod @_infers(DelegateTypes.OutString) - def infer_out_string(signature: Signature, awaitable: bool) -> bool: + def infer_out_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _has_no_params(signature) matches = matches and _return_is_str(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod @_infers(DelegateTypes.OutTaskString) - def infer_out_task_string(signature: Signature, awaitable: bool) -> bool: + def infer_out_task_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _has_no_params(signature) matches = matches and _return_is_str(signature) matches = matches and awaitable return matches @staticmethod - @_infers(DelegateTypes.InSKContext) - def infer_in_sk_context(signature: Signature, awaitable: bool) -> bool: + @_infers(DelegateTypes.InKernelContext) + def infer_in_kernel_context(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_context(signature) matches = matches and _return_is_none(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod - @_infers(DelegateTypes.InSKContextOutString) - def infer_in_sk_context_out_string(signature: Signature, awaitable: bool) -> bool: + @_infers(DelegateTypes.InKernelContextOutString) + def infer_in_kernel_context_out_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_context(signature) matches = matches and _return_is_str(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod - @_infers(DelegateTypes.InSKContextOutTaskString) - def infer_in_sk_context_out_task_string( - signature: Signature, awaitable: bool - ) -> bool: + @_infers(DelegateTypes.InKernelContextOutTaskString) + def infer_in_kernel_context_out_task_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_context(signature) matches = matches and _return_is_str(signature) matches = matches and awaitable return matches @staticmethod - @_infers(DelegateTypes.ContextSwitchInSKContextOutTaskSKContext) - def infer_context_switch_in_sk_context_out_task_sk_context( - signature: Signature, awaitable: bool + @_infers(DelegateTypes.ContextSwitchInKernelContextOutTaskKernelContext) + def infer_context_switch_in_kernel_context_out_task_kernel_context( + signature: Signature, awaitable: bool, is_asyncgenfunc: bool ) -> bool: matches = _first_param_is_context(signature) matches = matches and _return_is_context(signature) @@ -141,23 +137,23 @@ def infer_context_switch_in_sk_context_out_task_sk_context( @staticmethod @_infers(DelegateTypes.InString) - def infer_in_string(signature: Signature, awaitable: bool) -> bool: + def infer_in_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_str(signature) matches = matches and _return_is_none(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod @_infers(DelegateTypes.InStringOutString) - def infer_in_string_out_string(signature: Signature, awaitable: bool) -> bool: + def infer_in_string_out_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_str(signature) matches = matches and _return_is_str(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod @_infers(DelegateTypes.InStringOutTaskString) - def infer_in_string_out_task_string(signature: Signature, awaitable: bool) -> bool: + def infer_in_string_out_task_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_str(signature) matches = matches and _return_is_str(signature) matches = matches and awaitable @@ -165,28 +161,26 @@ def infer_in_string_out_task_string(signature: Signature, awaitable: bool) -> bo @staticmethod @_infers(DelegateTypes.InStringAndContext) - def infer_in_string_and_context(signature: Signature, awaitable: bool) -> bool: + def infer_in_string_and_context(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_str(signature, only=False) matches = matches and _has_two_params_second_is_context(signature) matches = matches and _return_is_none(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod @_infers(DelegateTypes.InStringAndContextOutString) - def infer_in_string_and_context_out_string( - signature: Signature, awaitable: bool - ) -> bool: + def infer_in_string_and_context_out_string(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_str(signature, only=False) matches = matches and _has_two_params_second_is_context(signature) matches = matches and _return_is_str(signature) - matches = matches and not awaitable + matches = matches and not awaitable and not is_asyncgenfunc return matches @staticmethod @_infers(DelegateTypes.InStringAndContextOutTaskString) def infer_in_string_and_context_out_task_string( - signature: Signature, awaitable: bool + signature: Signature, awaitable: bool, is_asyncgenfunc: bool ) -> bool: matches = _first_param_is_str(signature, only=False) matches = matches and _has_two_params_second_is_context(signature) @@ -197,7 +191,7 @@ def infer_in_string_and_context_out_task_string( @staticmethod @_infers(DelegateTypes.ContextSwitchInStringAndContextOutTaskContext) def infer_context_switch_in_string_and_context_out_task_context( - signature: Signature, awaitable: bool + signature: Signature, awaitable: bool, is_asyncgenfunc: bool ) -> bool: matches = _first_param_is_str(signature, only=False) matches = matches and _has_two_params_second_is_context(signature) @@ -207,7 +201,7 @@ def infer_context_switch_in_string_and_context_out_task_context( @staticmethod @_infers(DelegateTypes.InStringOutTask) - def infer_in_string_out_task(signature: Signature, awaitable: bool) -> bool: + def infer_in_string_out_task(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_str(signature) matches = matches and _return_is_none(signature) matches = matches and awaitable @@ -215,7 +209,7 @@ def infer_in_string_out_task(signature: Signature, awaitable: bool) -> bool: @staticmethod @_infers(DelegateTypes.InContextOutTask) - def infer_in_context_out_task(signature: Signature, awaitable: bool) -> bool: + def infer_in_context_out_task(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_context(signature) matches = matches and _return_is_none(signature) matches = matches and awaitable @@ -223,9 +217,7 @@ def infer_in_context_out_task(signature: Signature, awaitable: bool) -> bool: @staticmethod @_infers(DelegateTypes.InStringAndContextOutTask) - def infer_in_string_and_context_out_task( - signature: Signature, awaitable: bool - ) -> bool: + def infer_in_string_and_context_out_task(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _first_param_is_str(signature, only=False) matches = matches and _has_two_params_second_is_context(signature) matches = matches and _return_is_none(signature) @@ -234,18 +226,48 @@ def infer_in_string_and_context_out_task( @staticmethod @_infers(DelegateTypes.OutTask) - def infer_out_task(signature: Signature, awaitable: bool) -> bool: + def infer_out_task(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: matches = _has_no_params(signature) matches = matches and awaitable return matches + @staticmethod + @_infers(DelegateTypes.OutAsyncGenerator) + def infer_out_async_generator(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: + matches = _has_no_params(signature) + matches = matches and is_asyncgenfunc + return matches + + @staticmethod + @_infers(DelegateTypes.InStringOutAsyncGenerator) + def infer_in_string_out_async_generator(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: + matches = _first_param_is_str(signature) + matches = matches and is_asyncgenfunc + return matches + + @staticmethod + @_infers(DelegateTypes.InContextOutAsyncGenerator) + def infer_in_context_out_async_generator(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> bool: + matches = _first_param_is_context(signature) + matches = matches and is_asyncgenfunc + return matches + + @staticmethod + @_infers(DelegateTypes.InStringAndContextOutAsyncGenerator) + def infer_in_string_and_context_out_async_generator( + signature: Signature, awaitable: bool, is_asyncgenfunc: bool + ) -> bool: + matches = _first_param_is_str(signature, only=False) + matches = matches and _has_two_params_second_is_context(signature) + matches = matches and is_asyncgenfunc + return matches + @staticmethod @_infers(DelegateTypes.Unknown) - def infer_unknown(signature: Signature, awaitable: bool) -> NoReturn: + def infer_unknown(signature: Signature, awaitable: bool, is_asyncgenfunc: bool) -> NoReturn: raise KernelException( KernelException.ErrorCodes.FunctionTypeNotSupported, - "Invalid function type detected, unable to infer DelegateType." - + f" Function: {signature}", + "Invalid function type detected, unable to infer DelegateType." + f" Function: {signature}", ) @staticmethod @@ -260,13 +282,14 @@ def infer_delegate_type(function) -> DelegateTypes: ) awaitable = iscoroutinefunction(function) + is_asyncgenfunc = isasyncgenfunction(function) for name, value in DelegateInference.__dict__.items(): wrapped = getattr(value, "__wrapped__", getattr(value, "__func__", None)) if name.startswith("infer_") and hasattr(wrapped, "_delegate_type"): # Get the delegate type - if wrapped(function_signature, awaitable): + if wrapped(function_signature, awaitable, is_asyncgenfunc): return wrapped._delegate_type return DelegateTypes.Unknown diff --git a/python/semantic_kernel/orchestration/delegate_types.py b/python/semantic_kernel/orchestration/delegate_types.py index 367062ca71e4..b0f505483c98 100644 --- a/python/semantic_kernel/orchestration/delegate_types.py +++ b/python/semantic_kernel/orchestration/delegate_types.py @@ -8,10 +8,10 @@ class DelegateTypes(Enum): Void = 1 OutString = 2 OutTaskString = 3 - InSKContext = 4 - InSKContextOutString = 5 - InSKContextOutTaskString = 6 - ContextSwitchInSKContextOutTaskSKContext = 7 + InKernelContext = 4 + InKernelContextOutString = 5 + InKernelContextOutTaskString = 6 + ContextSwitchInKernelContextOutTaskKernelContext = 7 InString = 8 InStringOutString = 9 InStringOutTaskString = 10 @@ -23,3 +23,7 @@ class DelegateTypes(Enum): InContextOutTask = 16 InStringAndContextOutTask = 17 OutTask = 18 + OutAsyncGenerator = 19 + InStringOutAsyncGenerator = 20 + InContextOutAsyncGenerator = 21 + InStringAndContextOutAsyncGenerator = 22 diff --git a/python/semantic_kernel/orchestration/kernel_context.py b/python/semantic_kernel/orchestration/kernel_context.py new file mode 100644 index 000000000000..04c8bd114ded --- /dev/null +++ b/python/semantic_kernel/orchestration/kernel_context.py @@ -0,0 +1,170 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import Any, Dict, Generic, Optional, Union + +from pydantic import Field, PrivateAttr + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.memory.semantic_text_memory_base import ( + SemanticTextMemoryBase, + SemanticTextMemoryT, +) +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.plugin_definition.kernel_plugin_collection import KernelPluginCollection + +logger: logging.Logger = logging.getLogger(__name__) + + +class KernelContext(KernelBaseModel, Generic[SemanticTextMemoryT]): + """Semantic Kernel context.""" + + memory: SemanticTextMemoryT + variables: ContextVariables + # This field can be used to hold anything that is not a string + plugins: KernelPluginCollection = Field(default_factory=KernelPluginCollection) + _objects: Dict[str, Any] = PrivateAttr(default_factory=dict) + _error_occurred: bool = PrivateAttr(False) + _last_exception: Optional[Exception] = PrivateAttr(None) + _last_error_description: str = PrivateAttr("") + + def __init__( + self, + variables: ContextVariables, + memory: SemanticTextMemoryBase, + plugins: Union[KernelPluginCollection, None], + **kwargs, + # TODO: cancellation token? + ) -> None: + """ + Initializes a new instance of the KernelContext class. + + Arguments: + variables {ContextVariables} -- The context variables. + memory {SemanticTextMemoryBase} -- The semantic text memory. + plugins {KernelPluginCollection} -- The kernel plugin collection. + """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + + if plugins is None: + plugins = KernelPluginCollection() + + super().__init__(variables=variables, memory=memory, plugins=plugins) + + def fail(self, error_description: str, exception: Optional[Exception] = None): + """ + Call this method to signal that an error occurred. + In the usual scenarios, this is also how execution is stopped + e.g., to inform the user or take necessary steps. + + Arguments: + error_description {str} -- The error description. + + Keyword Arguments: + exception {Exception} -- The exception (default: {None}). + """ + self._error_occurred = True + self._last_error_description = error_description + self._last_exception = exception + + @property + def result(self) -> str: + """ + Print the processed input, aka the current data + after any processing that has occurred. + + Returns: + str -- Processed input, aka result. + """ + return str(self.variables) + + @property + def error_occurred(self) -> bool: + """ + Whether an error occurred while executing functions in the pipeline. + + Returns: + bool -- Whether an error occurred. + """ + return self._error_occurred + + @property + def last_error_description(self) -> str: + """ + The last error description. + + Returns: + str -- The last error description. + """ + return self._last_error_description + + @property + def last_exception(self) -> Optional[Exception]: + """ + When an error occurs, this is the most recent exception. + + Returns: + Exception -- The most recent exception. + """ + return self._last_exception + + @property + def objects(self) -> Dict[str, Any]: + """ + The objects dictionary. + + Returns: + Dict[str, Any] -- The objects dictionary. + """ + return self._objects + + def __setitem__(self, key: str, value: Any) -> None: + """ + Sets a context variable. + + Arguments: + key {str} -- The variable name. + value {Any} -- The variable value. + """ + self.variables[key] = value + + def __getitem__(self, key: str) -> Any: + """ + Gets a context variable. + + Arguments: + key {str} -- The variable name. + + Returns: + Any -- The variable value. + """ + return self.variables[key] + + def func(self, plugin_name: str, function_name: str): + """ + Access registered functions by plugin + name. Not case sensitive. + The function might be native or semantic, it's up to the caller + handling it. + + Arguments: + plugin_name {str} -- The plugin name. + function_name {str} -- The function name. + + Returns: + KernelFunction -- The function. + """ + if self.plugins is None: + raise ValueError("The plugin collection hasn't been set") + assert self.plugins is not None # for type checker + + if self.plugins[plugin_name][function_name].is_native: + return self.plugins.get_native_function(plugin_name, function_name) + + return self.plugins[plugin_name][function_name] + + def __str__(self) -> str: + if self._error_occurred: + return f"Error: {self._last_error_description}" + + return self.result diff --git a/python/semantic_kernel/orchestration/kernel_function.py b/python/semantic_kernel/orchestration/kernel_function.py new file mode 100644 index 000000000000..83aed99061c2 --- /dev/null +++ b/python/semantic_kernel/orchestration/kernel_function.py @@ -0,0 +1,540 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +import platform +import sys +from enum import Enum +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union + +from pydantic import Field, StringConstraints + +if sys.version_info >= (3, 9): + from typing import Annotated +else: + from typing_extensions import Annotated + +from semantic_kernel.connectors.ai.chat_completion_client_base import ( + ChatCompletionClientBase, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.connectors.ai.text_completion_client_base import ( + TextCompletionClientBase, +) +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.memory.null_memory import NullMemory +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.models.contents.chat_message_content import ChatMessageContent +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.delegate_handlers import DelegateHandlers +from semantic_kernel.orchestration.delegate_inference import DelegateInference +from semantic_kernel.orchestration.delegate_types import DelegateTypes +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.parameter_view import ParameterView +from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate +from semantic_kernel.semantic_functions.semantic_function_config import ( + SemanticFunctionConfig, +) + +if TYPE_CHECKING: + from semantic_kernel.orchestration.kernel_context import KernelContext + from semantic_kernel.plugin_definition.kernel_plugin_collection import KernelPluginCollection + +# TODO: is this needed anymore after sync code removal? +if platform.system() == "Windows" and sys.version_info >= (3, 8, 0): + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + +logger: logging.Logger = logging.getLogger(__name__) + + +def store_results(chat_prompt: ChatPromptTemplate, results: List["ChatMessageContent"]): + """Stores specific results in the context and chat prompt.""" + if hasattr(results[0], "tool_message") and results[0].tool_message is not None: + chat_prompt.add_message(role="tool", message=results[0].tool_message) + chat_prompt.add_message( + "assistant", + message=results[0].content, + function_call=results[0].function_call if hasattr(results[0], "function_call") else None, + tool_calls=results[0].tool_calls if hasattr(results[0], "tool_calls") else None, + ) + return chat_prompt + + +class KernelFunction(KernelBaseModel): + """ + Semantic Kernel function. + + Attributes: + plugin_name (str): The name of the plugin that contains this function. Must be upper/lower + case letters and underscores with a minimum length of 1. + description (Optional[str]): The description of the function. + name (str): The name of the function. Must be upper/lower case letters and + underscores with a minimum length of 1. + is_semantic (bool): Whether the function is semantic. + stream_function (Optional[Callable[..., Any]]): The stream function for the function. + parameters (List[ParameterView]): The parameters for the function. + delegate_type (DelegateTypes): The delegate type for the function. + function (Callable[..., Any]): The function to call. + plugins (Optional[KernelPluginCollection]): The collection of plugins. + ai_service (Optional[Union[TextCompletionClientBase, ChatCompletionClientBase]]): The AI service. + ai_prompt_execution_settings (PromptExecutionSettings): The AI prompt execution settings. + chat_prompt_template (Optional[ChatPromptTemplate]): The chat prompt template. + """ + + plugin_name: Annotated[str, StringConstraints(pattern=r"^[A-Za-z_]+$", min_length=1)] + description: Optional[str] = Field(default=None) + name: Annotated[str, StringConstraints(pattern=r"^[A-Za-z_]+$", min_length=1)] + is_semantic: bool = Field(...) + stream_function: Optional[Callable[..., Any]] = Field(default=None) + parameters: List[ParameterView] = Field(...) + delegate_type: DelegateTypes = Field(...) + function: Callable[..., Any] = Field(...) + plugins: Optional["KernelPluginCollection"] = Field(default=None) + ai_service: Optional[Union[TextCompletionClientBase, ChatCompletionClientBase]] = Field(default=None) + prompt_execution_settings: PromptExecutionSettings = Field(default_factory=PromptExecutionSettings) + chat_prompt_template: Optional[ChatPromptTemplate] = Field(default=None) + + def __init__( + self, + delegate_type: DelegateTypes, + delegate_function: Callable[..., Any], + parameters: List[ParameterView], + description: str, + plugin_name: str, + function_name: str, + is_semantic: bool, + delegate_stream_function: Optional[Callable[..., Any]] = None, + **kwargs: Dict[str, Any], + ) -> None: + """ + Initializes a new instance of the KernelFunction class + + Args: + delegate_type (DelegateTypes): The delegate type for the function + delegate_function (Callable[..., Any]): The delegate function for the function + parameters (List[ParameterView]): The parameters for the function + description (str): The description for the function + plugin_name (str): The name of the plugin + name (str): The name of the function + is_semantic (bool): Whether the function is semantic + delegate_stream_function (Optional[Callable[..., Any]]): The delegate stream function for the function + kwargs (Dict[str, Any]): Additional keyword arguments + """ + chat_prompt_template = kwargs.pop("chat_prompt_template", None) + + super().__init__( + delegate_type=delegate_type, + function=delegate_function, + parameters=parameters, + description=description, + plugin_name=plugin_name, + name=function_name, + is_semantic=is_semantic, + stream_function=delegate_stream_function, + chat_prompt_template=chat_prompt_template, + **kwargs, + ) + + @staticmethod + def from_native_method(method: Callable[..., Any], plugin_name: str) -> "KernelFunction": + """ + Create a KernelFunction from a native method. + + Args: + method (Callable[..., Any]): The method to create the function from + plugin_name (str): The name of the plugin + + Returns: + KernelFunction: The kernel function + """ + if method is None: + raise ValueError("Method cannot be `None`") + + assert method.__kernel_function__ is not None, "Method is not a Kernel function" + assert method.__kernel_function_name__ is not None, "Method name is empty" + + parameters = [] + # kernel_function_context_parameters are optionals + if hasattr(method, "__kernel_function_context_parameters__"): + for param in method.__kernel_function_context_parameters__: + assert "name" in param, "Parameter name is empty" + assert "description" in param, "Parameter description is empty" + assert "default_value" in param, "Parameter default value is empty" + + parameters.append( + ParameterView( + name=param["name"], + description=param["description"], + default_value=param["default_value"], + type=param.get("type", "string"), + required=param.get("required", False), + ) + ) + + if ( + hasattr(method, "__kernel_function_input_description__") + and method.__kernel_function_input_description__ is not None + and method.__kernel_function_input_description__ != "" + ): + input_param = ParameterView( + name="input", + description=method.__kernel_function_input_description__, + default_value=method.__kernel_function_input_default_value__, + type="string", + required=False, + ) + parameters = [input_param] + parameters + + return KernelFunction( + delegate_type=DelegateInference.infer_delegate_type(method), + delegate_function=method, + delegate_stream_function=method, + parameters=parameters, + description=method.__kernel_function_description__, + plugin_name=plugin_name, + function_name=method.__kernel_function_name__, + is_semantic=False, + ) + + @staticmethod + def from_semantic_config( + plugin_name: str, + function_name: str, + function_config: SemanticFunctionConfig, + ) -> "KernelFunction": + """ + Create a KernelFunction from a semantic configuration. + + Args: + plugin_name (str): The name of the plugin + function_name (str): The name of the function + function_config (SemanticFunctionConfig): The function configuration + + Returns: + KernelFunction: The kernel function + """ + if function_config is None: + raise ValueError("Function configuration cannot be `None`") + + async def _local_func(client, prompt_execution_settings, context: "KernelContext", **kwargs): + if client is None: + raise ValueError("AI LLM service cannot be `None`") + + if not function_config.has_chat_prompt: + try: + prompt = await function_config.prompt_template.render(context) + results = await client.complete(prompt, prompt_execution_settings) + context.objects["results"] = results + context.variables.update(str(results[0])) + except Exception as e: + # TODO: "critical exceptions" + context.fail(str(e), e) + finally: + return context + + try: + chat_prompt = function_config.prompt_template + # Similar to non-chat, render prompt (which renders to a + # dict of messages) + messages = await chat_prompt.render_messages(context) + results = await client.complete_chat(messages, prompt_execution_settings) + context.objects["results"] = results + if results[0].content is not None: + context.variables.update(str(results[0])) + # TODO: most of this will be deleted once context is gone, just AIResponse object is then returned. + chat_prompt = store_results(chat_prompt, results) + except Exception as exc: + # TODO: "critical exceptions" + context.fail(str(exc), exc) + finally: + return context + + async def _local_stream_func(client, prompt_execution_settings, context): + if client is None: + raise ValueError("AI LLM service cannot be `None`") + + if not function_config.has_chat_prompt: + try: + prompt = await function_config.prompt_template.render(context) + result = client.complete_stream(prompt, prompt_execution_settings) + async for chunk in result: + yield chunk + except Exception as e: + # TODO: "critical exceptions" + context.fail(str(e), e) + return + + try: + chat_prompt = function_config.prompt_template + # Similar to non-chat, render prompt (which renders to a + # list of messages) + messages = await chat_prompt.render_messages(context) + result = client.complete_chat_stream(messages=messages, settings=prompt_execution_settings) + # context.objects["response_object"] = result + # TODO: most of this will be deleted once context is gone, just AIResponse object is then returned. + async for chunk in result: + yield chunk + # context, chat_prompt = store_results(context, result, chat_prompt) + except Exception as e: + # TODO: "critical exceptions" + logger.error(f"Error occurred while invoking stream function: {str(e)}") + context.fail(str(e), e) + + return KernelFunction( + delegate_type=DelegateTypes.ContextSwitchInKernelContextOutTaskKernelContext, + delegate_function=_local_func, + delegate_stream_function=_local_stream_func, + parameters=function_config.prompt_template.get_parameters(), + description=function_config.prompt_template_config.description, + plugin_name=plugin_name, + function_name=function_name, + is_semantic=True, + chat_prompt_template=function_config.prompt_template if function_config.has_chat_prompt else None, + ) + + def set_default_plugin_collection(self, plugins: "KernelPluginCollection") -> "KernelFunction": + self.plugins = plugins + return self + + def set_ai_service(self, ai_service: Callable[[], TextCompletionClientBase]) -> "KernelFunction": + if ai_service is None: + raise ValueError("AI LLM service factory cannot be `None`") + self._verify_is_semantic() + self.ai_service = ai_service() + return self + + def set_chat_service(self, chat_service: Callable[[], ChatCompletionClientBase]) -> "KernelFunction": + if chat_service is None: + raise ValueError("Chat LLM service factory cannot be `None`") + self._verify_is_semantic() + self.ai_service = chat_service() + return self + + def set_ai_configuration(self, settings: PromptExecutionSettings) -> "KernelFunction": + if settings is None: + raise ValueError("AI LLM request settings cannot be `None`") + self._verify_is_semantic() + self.prompt_execution_settings = settings + return self + + def set_chat_configuration(self, settings: PromptExecutionSettings) -> "KernelFunction": + if settings is None: + raise ValueError("Chat LLM request settings cannot be `None`") + self._verify_is_semantic() + self.prompt_execution_settings = settings + return self + + def describe(self) -> FunctionView: + return FunctionView( + name=self.name, + plugin_name=self.plugin_name, + description=self.description, + is_semantic=self.is_semantic, + parameters=self.parameters, + ) + + async def __call__( + self, + input: Optional[str] = None, + variables: ContextVariables = None, + context: Optional["KernelContext"] = None, + memory: Optional[SemanticTextMemoryBase] = None, + settings: Optional[PromptExecutionSettings] = None, + log: Optional[Any] = None, + ) -> "KernelContext": + """ + Override the call operator to allow calling the function directly + This operator is run asynchronously. + + Arguments: + input {Optional[str]} -- The input to the function + variables {ContextVariables} -- The variables for the function + context {Optional[KernelContext]} -- The context for the function + memory {Optional[SemanticTextMemoryBase]} -- The memory for the function + settings {Optional[PromptExecutionSettings]} -- The settings for the function + log {Optional[Any]} -- A logger to use for logging. (Optional) + + Returns: + KernelContext -- The context for the function + + Raises: + KernelException -- If the function is not semantic + """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + return await self.invoke( + input=input, + variables=variables, + context=context, + memory=memory, + settings=settings, + ) + + async def invoke( + self, + input: Optional[str] = None, + variables: ContextVariables = None, + context: Optional["KernelContext"] = None, + memory: Optional[SemanticTextMemoryBase] = None, + settings: Optional[PromptExecutionSettings] = None, + **kwargs: Dict[str, Any], + ) -> "KernelContext": + """ + Invoke the function asynchronously + + Arguments: + input {Optional[str]} -- The input to the function + variables {ContextVariables} -- The variables for the function + context {Optional[KernelContext]} -- The context for the function + memory {Optional[SemanticTextMemoryBase]} -- The memory for the function + settings {Optional[PromptExecutionSettings]} -- The settings for the function + kwargs {Dict[str, Any]} -- Additional keyword arguments + + Returns: + KernelContext -- The context for the function + + Raises: + KernelException -- If there is a problem invoking the function + """ + from semantic_kernel.orchestration.kernel_context import KernelContext + + if context is None: + context = KernelContext( + variables=ContextVariables("") if variables is None else variables, + memory=memory if memory is not None else NullMemory.instance, + plugins=self.plugins, + ) + else: + # If context is passed, we need to merge the variables + if variables is not None: + context.variables = variables.merge_or_overwrite(new_vars=context.variables, overwrite=False) + if memory is not None: + context.memory = memory + + if input is not None: + context.variables.update(input) + + try: + if self.is_semantic: + return await self._invoke_semantic(context, settings, **kwargs) + else: + return await self._invoke_native(context, **kwargs) + except Exception as e: + context.fail(str(e), e) + return context + + async def _invoke_semantic(self, context: "KernelContext", settings: PromptExecutionSettings, **kwargs): + self._verify_is_semantic() + self._ensure_context_has_plugins(context) + new_context = await self.function(self.ai_service, settings or self.prompt_execution_settings, context) + context.variables.merge_or_overwrite(new_context.variables) + return context + + async def _invoke_native(self, context): + self._verify_is_native() + + self._ensure_context_has_plugins(context) + + delegate = DelegateHandlers.get_handler(self.delegate_type) + # for python3.9 compatibility (staticmethod is not callable) + if not hasattr(delegate, "__call__"): + delegate = delegate.__func__ + new_context = await delegate(self.function, context) + + return new_context + + def _verify_is_semantic(self) -> None: + if self.is_semantic: + return + + logger.error("The function is not semantic") + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionType, + "Invalid operation, the method requires a semantic function", + ) + + def _verify_is_native(self) -> None: + if not self.is_semantic: + return + + logger.error("The function is not native") + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionType, + "Invalid operation, the method requires a native function", + ) + + async def invoke_stream( + self, + input: Optional[str] = None, + variables: ContextVariables = None, + context: Optional["KernelContext"] = None, + memory: Optional[SemanticTextMemoryBase] = None, + settings: Optional[PromptExecutionSettings] = None, + ): + from semantic_kernel.orchestration.kernel_context import KernelContext + + if context is None: + context = KernelContext( + variables=ContextVariables("") if variables is None else variables, + memory=memory if memory is not None else NullMemory.instance, + plugins=self.plugins, + ) + else: + # If context is passed, we need to merge the variables + if variables is not None: + context.variables = variables.merge_or_overwrite(new_vars=context.variables, overwrite=False) + if memory is not None: + context._memory = memory + + if input is not None: + context.variables.update(input) + + try: + if self.is_semantic: + async for stream_msg in self._invoke_semantic_stream(context, settings): + yield stream_msg + else: + async for stream_msg in self._invoke_native_stream(context): + yield stream_msg + except Exception as e: + logger.error(f"Error occurred while invoking stream function: {str(e)}") + context.fail(str(e), e) + raise KernelException( + KernelException.ErrorCodes.FunctionInvokeError, + "Error occurred while invoking stream function", + ) + + async def _invoke_semantic_stream(self, context, settings): + self._verify_is_semantic() + self._ensure_context_has_plugins(context) + async for stream_msg in self.stream_function( + self.ai_service, settings or self.prompt_execution_settings, context + ): + yield stream_msg + + async def _invoke_native_stream(self, context): + self._verify_is_native() + + self._ensure_context_has_plugins(context) + + delegate = DelegateHandlers.get_handler(self._delegate_type) + # for python3.9 compatibility (staticmethod is not callable) + if not hasattr(delegate, "__call__"): + delegate = delegate.__func__ + + completion = "" + async for partial in delegate(self.function, context): + completion += partial + yield partial + + context.variables.update(completion) + + def _ensure_context_has_plugins(self, context) -> None: + if context.plugins is not None: + return + + context.plugins = self.plugins + + def _trace_function_type_Call(self, type: Enum) -> None: + logger.debug(f"Executing function type {type}: {type.name}") diff --git a/python/semantic_kernel/orchestration/sk_context.py b/python/semantic_kernel/orchestration/sk_context.py deleted file mode 100644 index d8a0e5018fbb..000000000000 --- a/python/semantic_kernel/orchestration/sk_context.py +++ /dev/null @@ -1,253 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from logging import Logger -from typing import Any, Dict, Generic, Literal, Optional, Tuple, Union - -import pydantic as pdt - -from semantic_kernel.kernel_exception import KernelException -from semantic_kernel.memory.semantic_text_memory_base import ( - SemanticTextMemoryBase, - SemanticTextMemoryT, -) -from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.sk_pydantic import SKGenericModel -from semantic_kernel.skill_definition.read_only_skill_collection import ( - ReadOnlySkillCollection, -) -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) - - -class SKContext(SKGenericModel, Generic[SemanticTextMemoryT]): - """Semantic Kernel context.""" - - memory: SemanticTextMemoryT - variables: ContextVariables - # This field can be used to hold anything that is not a string - skill_collection: ReadOnlySkillCollection = pdt.Field( - default_factory=ReadOnlySkillCollection - ) - _objects: Dict[str, Any] = pdt.PrivateAttr(default_factory=dict) - _error_occurred: bool = pdt.PrivateAttr(False) - _last_exception: Optional[Exception] = pdt.PrivateAttr(None) - _last_error_description: str = pdt.PrivateAttr("") - _logger: Logger = pdt.PrivateAttr() - - def __init__( - self, - variables: ContextVariables, - memory: SemanticTextMemoryBase, - skill_collection: Union[ReadOnlySkillCollection, None], - logger: Optional[Logger] = None, - # TODO: cancellation token? - ) -> None: - """ - Initializes a new instance of the SKContext class. - - Arguments: - variables {ContextVariables} -- The context variables. - memory {SemanticTextMemoryBase} -- The semantic text memory. - skill_collection {ReadOnlySkillCollectionBase} -- The skill collection. - logger {Logger} -- The logger. - """ - # Local import to avoid circular dependency - from semantic_kernel import NullLogger - - if skill_collection is None: - skill_collection = ReadOnlySkillCollection() - - super().__init__( - variables=variables, memory=memory, skill_collection=skill_collection - ) - self._logger = logger or NullLogger() - - def fail(self, error_description: str, exception: Optional[Exception] = None): - """ - Call this method to signal that an error occurred. - In the usual scenarios, this is also how execution is stopped - e.g., to inform the user or take necessary steps. - - Arguments: - error_description {str} -- The error description. - - Keyword Arguments: - exception {Exception} -- The exception (default: {None}). - """ - self._error_occurred = True - self._last_error_description = error_description - self._last_exception = exception - - @property - def result(self) -> str: - """ - Print the processed input, aka the current data - after any processing that has occurred. - - Returns: - str -- Processed input, aka result. - """ - return str(self.variables) - - @property - def error_occurred(self) -> bool: - """ - Whether an error occurred while executing functions in the pipeline. - - Returns: - bool -- Whether an error occurred. - """ - return self._error_occurred - - @property - def last_error_description(self) -> str: - """ - The last error description. - - Returns: - str -- The last error description. - """ - return self._last_error_description - - @property - def last_exception(self) -> Optional[Exception]: - """ - When an error occurs, this is the most recent exception. - - Returns: - Exception -- The most recent exception. - """ - return self._last_exception - - @property - def objects(self) -> Dict[str, Any]: - """ - The objects dictionary. - - Returns: - Dict[str, Any] -- The objects dictionary. - """ - return self._objects - - @property - def skills(self) -> ReadOnlySkillCollectionBase: - """ - Read only skills collection. - - Returns: - ReadOnlySkillCollectionBase -- The skills collection. - """ - return self.skill_collection - - @skills.setter - def skills(self, value: ReadOnlySkillCollectionBase) -> None: - """ - Set the value of skills collection - """ - self.skill_collection = value - - @property - def log(self) -> Logger: - """ - The logger. - - Returns: - Logger -- The logger. - """ - return self._logger - - def __setitem__(self, key: str, value: Any) -> None: - """ - Sets a context variable. - - Arguments: - key {str} -- The variable name. - value {Any} -- The variable value. - """ - self.variables[key] = value - - def __getitem__(self, key: str) -> Any: - """ - Gets a context variable. - - Arguments: - key {str} -- The variable name. - - Returns: - Any -- The variable value. - """ - return self.variables[key] - - def func(self, skill_name: str, function_name: str): - """ - Access registered functions by skill + name. Not case sensitive. - The function might be native or semantic, it's up to the caller - handling it. - - Arguments: - skill_name {str} -- The skill name. - function_name {str} -- The function name. - - Returns: - SKFunctionBase -- The function. - """ - if self.skill_collection is None: - raise ValueError("The skill collection hasn't been set") - assert self.skill_collection is not None # for type checker - - if self.skill_collection.has_native_function(skill_name, function_name): - return self.skill_collection.get_native_function(skill_name, function_name) - - return self.skill_collection.get_semantic_function(skill_name, function_name) - - def __str__(self) -> str: - if self._error_occurred: - return f"Error: {self._last_error_description}" - - return self.result - - def throw_if_skill_collection_not_set(self) -> None: - """ - Throws an exception if the skill collection hasn't been set. - """ - if self.skill_collection is None: - raise KernelException( - KernelException.ErrorCodes.SkillCollectionNotSet, - "Skill collection not found in the context", - ) - - def is_function_registered( - self, skill_name: str, function_name: str - ) -> Union[Tuple[Literal[True], Any], Tuple[Literal[False], None]]: - """ - Checks whether a function is registered in this context. - - Arguments: - skill_name {str} -- The skill name. - function_name {str} -- The function name. - - Returns: - Tuple[bool, SKFunctionBase] -- A tuple with a boolean indicating - whether the function is registered and the function itself (or None). - """ - self.throw_if_skill_collection_not_set() - assert self.skill_collection is not None # for type checker - - if self.skill_collection.has_native_function(skill_name, function_name): - the_func = self.skill_collection.get_native_function( - skill_name, function_name - ) - return True, the_func - - if self.skill_collection.has_native_function(None, function_name): - the_func = self.skill_collection.get_native_function(None, function_name) - return True, the_func - - if self.skill_collection.has_semantic_function(skill_name, function_name): - the_func = self.skill_collection.get_semantic_function( - skill_name, function_name - ) - return True, the_func - - return False, None diff --git a/python/semantic_kernel/orchestration/sk_function.py b/python/semantic_kernel/orchestration/sk_function.py deleted file mode 100644 index c39d558d0b6f..000000000000 --- a/python/semantic_kernel/orchestration/sk_function.py +++ /dev/null @@ -1,599 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import asyncio -import platform -import sys -import threading -from enum import Enum -from logging import Logger -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional - -from semantic_kernel.connectors.ai.chat_completion_client_base import ( - ChatCompletionClientBase, -) -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, -) -from semantic_kernel.connectors.ai.text_completion_client_base import ( - TextCompletionClientBase, -) -from semantic_kernel.kernel_exception import KernelException -from semantic_kernel.memory.null_memory import NullMemory -from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase -from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.delegate_handlers import DelegateHandlers -from semantic_kernel.orchestration.delegate_inference import DelegateInference -from semantic_kernel.orchestration.delegate_types import DelegateTypes -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase -from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate -from semantic_kernel.semantic_functions.semantic_function_config import ( - SemanticFunctionConfig, -) -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.parameter_view import ParameterView -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) -from semantic_kernel.utils.null_logger import NullLogger - -if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - -if platform.system() == "Windows" and sys.version_info >= (3, 8, 0): - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - - -class SKFunction(SKFunctionBase): - """ - Semantic Kernel function. - """ - - _parameters: List[ParameterView] - _delegate_type: DelegateTypes - _function: Callable[..., Any] - _skill_collection: Optional[ReadOnlySkillCollectionBase] - _log: Logger - _ai_service: Optional[TextCompletionClientBase] - _ai_request_settings: CompleteRequestSettings - _chat_service: Optional[ChatCompletionClientBase] - _chat_request_settings: ChatRequestSettings - _chat_prompt_template: ChatPromptTemplate - - @staticmethod - def from_native_method(method, skill_name="", log=None) -> "SKFunction": - if method is None: - raise ValueError("Method cannot be `None`") - - assert method.__sk_function__ is not None, "Method is not a SK function" - assert method.__sk_function_name__ is not None, "Method name is empty" - - parameters = [] - # sk_function_context_parameters are optionals - if hasattr(method, "__sk_function_context_parameters__"): - for param in method.__sk_function_context_parameters__: - assert "name" in param, "Parameter name is empty" - assert "description" in param, "Parameter description is empty" - assert "default_value" in param, "Parameter default value is empty" - - parameters.append( - ParameterView( - name=param["name"], - description=param["description"], - default_value=param["default_value"], - type=param.get("type", "string"), - required=param.get("required", False), - ) - ) - - if ( - hasattr(method, "__sk_function_input_description__") - and method.__sk_function_input_description__ is not None - and method.__sk_function_input_description__ != "" - ): - input_param = ParameterView( - name="input", - description=method.__sk_function_input_description__, - default_value=method.__sk_function_input_default_value__, - type="string", - required=False, - ) - parameters = [input_param] + parameters - - return SKFunction( - delegate_type=DelegateInference.infer_delegate_type(method), - delegate_function=method, - delegate_stream_function=method, - parameters=parameters, - description=method.__sk_function_description__, - skill_name=skill_name, - function_name=method.__sk_function_name__, - is_semantic=False, - log=log, - ) - - @staticmethod - def from_semantic_config( - skill_name: str, - function_name: str, - function_config: SemanticFunctionConfig, - log: Optional[Logger] = None, - ) -> "SKFunction": - if function_config is None: - raise ValueError("Function configuration cannot be `None`") - - async def _local_func(client, request_settings, context: "SKContext", **kwargs): - if client is None: - raise ValueError("AI LLM service cannot be `None`") - - try: - if not function_config.has_chat_prompt: - prompt = await function_config.prompt_template.render_async(context) - completion = await client.complete_async(prompt, request_settings) - context.variables.update(completion) - return context - except Exception as e: - # TODO: "critical exceptions" - context.fail(str(e), e) - return context - - as_chat_prompt = function_config.prompt_template - # Similar to non-chat, render prompt (which renders to a - # dict of messages) - messages = await as_chat_prompt.render_messages_async(context) - - functions = ( - kwargs.get("functions") - if request_settings.function_call is not None - else None - ) - if request_settings.function_call is not None and functions is None: - log.warning("Function call is not None, but functions is None") - try: - if functions and hasattr(client, "complete_chat_with_functions_async"): - ( - completion, - function_call, - ) = await client.complete_chat_with_functions_async( - messages, functions, request_settings - ) - as_chat_prompt.add_message( - "assistant", message=completion, function_call=function_call - ) - if completion is not None: - context.variables.update(completion) - if function_call is not None: - context.objects["function_call"] = function_call - else: - completion = await client.complete_chat_async( - messages, request_settings - ) - as_chat_prompt.add_assistant_message(completion) - context.variables.update(completion) - except Exception as exc: - # TODO: "critical exceptions" - context.fail(str(exc), exc) - finally: - return context - - async def _local_stream_func(client, request_settings, context): - if client is None: - raise ValueError("AI LLM service cannot be `None`") - - try: - if function_config.has_chat_prompt: - as_chat_prompt = function_config.prompt_template - - # Similar to non-chat, render prompt (which renders to a - # list of messages) - completion = "" - messages = await as_chat_prompt.render_messages_async(context) - async for steam_message in client.complete_chat_stream_async( - messages, request_settings - ): - completion += steam_message - yield steam_message - - # Add the last message from the rendered chat prompt - # (which will be the user message) and the response - # from the model (the assistant message) - _, content = messages[-1] - as_chat_prompt.add_user_message(content) - as_chat_prompt.add_assistant_message(completion) - - # Update context - context.variables.update(completion) - else: - prompt = await function_config.prompt_template.render_async(context) - - completion = "" - async for stream_message in client.complete_stream_async( - prompt, request_settings - ): - completion += stream_message - yield stream_message - context.variables.update(completion) - except Exception as e: - # TODO: "critical exceptions" - context.fail(str(e), e) - - return SKFunction( - delegate_type=DelegateTypes.ContextSwitchInSKContextOutTaskSKContext, - delegate_function=_local_func, - delegate_stream_function=_local_stream_func, - parameters=function_config.prompt_template.get_parameters(), - description=function_config.prompt_template_config.description, - skill_name=skill_name, - function_name=function_name, - is_semantic=True, - log=log, - chat_prompt_template=function_config.prompt_template - if function_config.has_chat_prompt - else None, - ) - - @property - def name(self) -> str: - return self._name - - @property - def skill_name(self) -> str: - return self._skill_name - - @property - def description(self) -> str: - return self._description - - @property - def parameters(self) -> List[ParameterView]: - return self._parameters - - @property - def is_semantic(self) -> bool: - return self._is_semantic - - @property - def is_native(self) -> bool: - return not self._is_semantic - - @property - def request_settings(self) -> CompleteRequestSettings: - return self._ai_request_settings - - def __init__( - self, - delegate_type: DelegateTypes, - delegate_function: Callable[..., Any], - parameters: List[ParameterView], - description: str, - skill_name: str, - function_name: str, - is_semantic: bool, - log: Optional[Logger] = None, - delegate_stream_function: Optional[Callable[..., Any]] = None, - **kwargs: Dict[str, Any], - ) -> None: - self._delegate_type = delegate_type - self._function = delegate_function - self._parameters = parameters - self._description = description - self._skill_name = skill_name - self._name = function_name - self._is_semantic = is_semantic - self._log = log if log is not None else NullLogger() - self._stream_function = delegate_stream_function - self._skill_collection = None - self._ai_service = None - self._ai_request_settings = CompleteRequestSettings() - self._chat_service = None - self._chat_request_settings = ChatRequestSettings() - self._chat_prompt_template = kwargs.get("chat_prompt_template", None) - - def set_default_skill_collection( - self, skills: ReadOnlySkillCollectionBase - ) -> "SKFunction": - self._skill_collection = skills - return self - - def set_ai_service( - self, ai_service: Callable[[], TextCompletionClientBase] - ) -> "SKFunction": - if ai_service is None: - raise ValueError("AI LLM service factory cannot be `None`") - self._verify_is_semantic() - self._ai_service = ai_service() - return self - - def set_chat_service( - self, chat_service: Callable[[], ChatCompletionClientBase] - ) -> "SKFunction": - if chat_service is None: - raise ValueError("Chat LLM service factory cannot be `None`") - self._verify_is_semantic() - self._chat_service = chat_service() - return self - - def set_ai_configuration(self, settings: CompleteRequestSettings) -> "SKFunction": - if settings is None: - raise ValueError("AI LLM request settings cannot be `None`") - self._verify_is_semantic() - self._ai_request_settings = settings - return self - - def set_chat_configuration(self, settings: ChatRequestSettings) -> "SKFunction": - if settings is None: - raise ValueError("Chat LLM request settings cannot be `None`") - self._verify_is_semantic() - self._chat_request_settings = settings - return self - - def describe(self) -> FunctionView: - return FunctionView( - name=self.name, - skill_name=self.skill_name, - description=self.description, - is_semantic=self.is_semantic, - parameters=self._parameters, - ) - - def __call__( - self, - input: Optional[str] = None, - variables: ContextVariables = None, - context: Optional["SKContext"] = None, - memory: Optional[SemanticTextMemoryBase] = None, - settings: Optional[CompleteRequestSettings] = None, - log: Optional[Logger] = None, - ) -> "SKContext": - return self.invoke( - input=input, - variables=variables, - context=context, - memory=memory, - settings=settings, - log=log, - ) - - def invoke( - self, - input: Optional[str] = None, - variables: ContextVariables = None, - context: Optional["SKContext"] = None, - memory: Optional[SemanticTextMemoryBase] = None, - settings: Optional[CompleteRequestSettings] = None, - log: Optional[Logger] = None, - ) -> "SKContext": - from semantic_kernel.orchestration.sk_context import SKContext - - if context is None: - context = SKContext( - variables=ContextVariables("") if variables is None else variables, - skill_collection=self._skill_collection, - memory=memory if memory is not None else NullMemory.instance, - logger=log if log is not None else self._log, - ) - else: - # If context is passed, we need to merge the variables - if variables is not None: - context.variables = variables.merge_or_overwrite( - new_vars=context.variables, overwrite=False - ) - if memory is not None: - context.memory = memory - - if input is not None: - context.variables.update(input) - - # Check if there is an event loop - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = None - - # Handle "asyncio.run() cannot be called from a running event loop" - if loop and loop.is_running(): - if self.is_semantic: - return self._runThread(self._invoke_semantic_async(context, settings)) - else: - return self._runThread(self._invoke_native_async(context)) - else: - if self.is_semantic: - return asyncio.run(self._invoke_semantic_async(context, settings)) - else: - return asyncio.run(self._invoke_native_async(context)) - - async def invoke_async( - self, - input: Optional[str] = None, - variables: ContextVariables = None, - context: Optional["SKContext"] = None, - memory: Optional[SemanticTextMemoryBase] = None, - settings: Optional[CompleteRequestSettings] = None, - log: Optional[Logger] = None, - **kwargs: Dict[str, Any], - ) -> "SKContext": - from semantic_kernel.orchestration.sk_context import SKContext - - if context is None: - context = SKContext( - variables=ContextVariables("") if variables is None else variables, - skill_collection=self._skill_collection, - memory=memory if memory is not None else NullMemory.instance, - logger=log if log is not None else self._log, - ) - else: - # If context is passed, we need to merge the variables - if variables is not None: - context.variables = variables.merge_or_overwrite( - new_vars=context.variables, overwrite=False - ) - if memory is not None: - context.memory = memory - - if input is not None: - context.variables.update(input) - - try: - if self.is_semantic: - return await self._invoke_semantic_async(context, settings, **kwargs) - else: - return await self._invoke_native_async(context, **kwargs) - except Exception as e: - context.fail(str(e), e) - return context - - async def _invoke_semantic_async(self, context: "SKContext", settings, **kwargs): - self._verify_is_semantic() - - self._ensure_context_has_skills(context) - - if settings is None: - if self._ai_service is not None: - settings = self._ai_request_settings - elif self._chat_service is not None: - settings = self._chat_request_settings - else: - raise KernelException( - KernelException.ErrorCodes.UnknownError, - "Semantic functions must have either an AI service or Chat service", - ) - - service = ( - self._ai_service if self._ai_service is not None else self._chat_service - ) - new_context = await self._function( - service, settings, context, functions=kwargs.get("functions", None) - ) - context.variables.merge_or_overwrite(new_context.variables) - return context - - async def _invoke_native_async(self, context): - self._verify_is_native() - - self._ensure_context_has_skills(context) - - delegate = DelegateHandlers.get_handler(self._delegate_type) - # for python3.9 compatibility (staticmethod is not callable) - if not hasattr(delegate, "__call__"): - delegate = delegate.__func__ - new_context = await delegate(self._function, context) - - return new_context - - def _verify_is_semantic(self) -> None: - if self._is_semantic: - return - - self._log.error("The function is not semantic") - raise KernelException( - KernelException.ErrorCodes.InvalidFunctionType, - "Invalid operation, the method requires a semantic function", - ) - - def _verify_is_native(self) -> None: - if not self._is_semantic: - return - - self._log.error("The function is not native") - raise KernelException( - KernelException.ErrorCodes.InvalidFunctionType, - "Invalid operation, the method requires a native function", - ) - - async def invoke_stream_async( - self, - input: Optional[str] = None, - variables: ContextVariables = None, - context: Optional["SKContext"] = None, - memory: Optional[SemanticTextMemoryBase] = None, - settings: Optional[CompleteRequestSettings] = None, - log: Optional[Logger] = None, - ): - from semantic_kernel.orchestration.sk_context import SKContext - - if context is None: - context = SKContext( - variables=ContextVariables("") if variables is None else variables, - skill_collection=self._skill_collection, - memory=memory if memory is not None else NullMemory.instance, - logger=log if log is not None else self._log, - ) - else: - # If context is passed, we need to merge the variables - if variables is not None: - context.variables = variables.merge_or_overwrite( - new_vars=context.variables, overwrite=False - ) - if memory is not None: - context._memory = memory - - if input is not None: - context.variables.update(input) - - try: - if self.is_semantic: - async for stream_msg in self._invoke_semantic_stream_async( - context, settings - ): - yield stream_msg - else: - async for stream_msg in self._invoke_native_stream_async(context): - yield stream_msg - except Exception as e: - context.fail(str(e), e) - raise KernelException( - KernelException.ErrorCodes.FunctionInvokeError, - "Error occurred while invoking stream function", - ) - - async def _invoke_semantic_stream_async(self, context, settings): - self._verify_is_semantic() - - self._ensure_context_has_skills(context) - - if settings is None: - if self._ai_service is not None: - settings = self._ai_request_settings - elif self._chat_service is not None: - settings = self._chat_request_settings - else: - raise KernelException( - KernelException.ErrorCodes.UnknownError, - "Semantic functions must have either an AI service or Chat service", - ) - - service = ( - self._ai_service if self._ai_service is not None else self._chat_service - ) - - async for stream_msg in self._stream_function(service, settings, context): - yield stream_msg - - async def _invoke_native_stream_async(self, context): - result = await self._invoke_native_async(context) - - yield result - - def _ensure_context_has_skills(self, context) -> None: - if context.skills is not None: - return - - context.skills = self._skill_collection - - def _trace_function_type_Call(self, type: Enum, log: Logger) -> None: - log.debug(f"Executing function type {type}: {type.name}") - - """ - Async code wrapper to allow running async code inside external - event loops such as Jupyter notebooks. - """ - - def _runThread(self, code: Callable): - result = [] - thread = threading.Thread(target=self._runCode, args=(code, result)) - thread.start() - thread.join() - return result[0] - - def _runCode(self, code: Callable, result: List[Any]) -> None: - result.append(asyncio.run(code)) diff --git a/python/semantic_kernel/orchestration/sk_function_base.py b/python/semantic_kernel/orchestration/sk_function_base.py deleted file mode 100644 index 1e0f0e411dd0..000000000000 --- a/python/semantic_kernel/orchestration/sk_function_base.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from abc import abstractmethod -from logging import Logger -from typing import TYPE_CHECKING, Any, Callable, Dict, Optional - -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, -) -from semantic_kernel.connectors.ai.text_completion_client_base import ( - TextCompletionClientBase, -) -from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase -from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition.function_view import FunctionView - -if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, - ) - - -class SKFunctionBase(PydanticField): - FUNCTION_PARAM_NAME_REGEX = r"^[0-9A-Za-z_]*$" - FUNCTION_NAME_REGEX = r"^[0-9A-Za-z_]*$" - SKILL_NAME_REGEX = r"^[0-9A-Za-z_]*$" - - @property - @abstractmethod - def name(self) -> str: - """ - Name of the function. - - The name is used by the skill collection and in - prompt templates; e.g., {{skillName.functionName}} - """ - pass - - @property - @abstractmethod - def skill_name(self) -> str: - """ - Name of the skill that contains this function. - - The name is used by the skill collection and in - prompt templates; e.g., {{skillName.functionName}}""" - pass - - @property - @abstractmethod - def description(self) -> str: - """ - Function description. - - The description is used in combination with embeddings - when searching for relevant functions.""" - pass - - @property - @abstractmethod - def is_semantic(self) -> bool: - """ - Whether the function is semantic. - - IMPORTANT: native functions might use semantic functions - internally, so when this property is False, executing - the function might still involve AI calls. - """ - pass - - @property - @abstractmethod - def is_native(self) -> bool: - """ - Whether the function is native. - - IMPORTANT: native functions might use semantic functions - internally, so when this property is True, executing - the function might still involve AI calls. - """ - pass - - @property - @abstractmethod - def request_settings(self) -> CompleteRequestSettings: - """AI service settings""" - pass - - @abstractmethod - def describe() -> FunctionView: - """ - Returns a description of the function, - including its parameters - - Returns: - FunctionView -- The function description. - """ - pass - - @abstractmethod - def invoke( - self, - input: Optional[str] = None, - variables: ContextVariables = None, - context: Optional["SKContext"] = None, - memory: Optional[SemanticTextMemoryBase] = None, - settings: Optional[CompleteRequestSettings] = None, - log: Optional[Logger] = None, - ) -> "SKContext": - """ - Invokes the function with an explicit string input - Keyword Arguments: - input {str} -- The explicit string input (default: {None}) - variables {ContextVariables} -- The custom input - context {SKContext} -- The context to use - memory: {SemanticTextMemoryBase} -- The memory to use - settings {CompleteRequestSettings} -- LLM completion settings - log {Logger} -- Application logger - Returns: - SKContext -- The updated context, potentially a new one if - context switching is implemented. - """ - pass - - @abstractmethod - async def invoke_async( - self, - input: Optional[str] = None, - variables: ContextVariables = None, - context: Optional["SKContext"] = None, - memory: Optional[SemanticTextMemoryBase] = None, - settings: Optional[CompleteRequestSettings] = None, - log: Optional[Logger] = None, - **kwargs: Dict[str, Any], - ) -> "SKContext": - """ - Invokes the function with an explicit string input - Keyword Arguments: - input {str} -- The explicit string input (default: {None}) - variables {ContextVariables} -- The custom input - context {SKContext} -- The context to use - memory: {SemanticTextMemoryBase} -- The memory to use - settings {CompleteRequestSettings} -- LLM completion settings - log {Logger} -- Application logger - Returns: - SKContext -- The updated context, potentially a new one if - context switching is implemented. - """ - pass - - @abstractmethod - def set_default_skill_collection( - self, - skills: "ReadOnlySkillCollectionBase", - ) -> "SKFunctionBase": - """ - Sets the skill collection to use when the function is - invoked without a context or with a context that doesn't have - a skill collection - - Arguments: - skills {ReadOnlySkillCollectionBase} -- Kernel's skill collection - - Returns: - SKFunctionBase -- The function instance - """ - pass - - @abstractmethod - def set_ai_service( - self, service_factory: Callable[[], TextCompletionClientBase] - ) -> "SKFunctionBase": - """ - Sets the AI service used by the semantic function, passing in a factory - method. The factory allows us to lazily instantiate the client and to - properly handle its disposal - - Arguments: - service_factory -- AI service factory - - Returns: - SKFunctionBase -- The function instance - """ - pass - - @abstractmethod - def set_ai_configuration( - self, settings: CompleteRequestSettings - ) -> "SKFunctionBase": - """ - Sets the AI completion settings used with LLM requests - - Arguments: - settings {CompleteRequestSettings} -- LLM completion settings - - Returns: - SKFunctionBase -- The function instance - """ - pass diff --git a/python/semantic_kernel/planning/action_planner/action_planner.py b/python/semantic_kernel/planning/action_planner/action_planner.py index 078f2f8639dc..337dd551f164 100644 --- a/python/semantic_kernel/planning/action_planner/action_planner.py +++ b/python/semantic_kernel/planning/action_planner/action_planner.py @@ -2,25 +2,26 @@ import itertools import json +import logging import os -from logging import Logger from textwrap import dedent from typing import List, Optional import regex from semantic_kernel import Kernel -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction from semantic_kernel.planning.action_planner.action_planner_config import ( ActionPlannerConfig, ) from semantic_kernel.planning.plan import Plan from semantic_kernel.planning.planning_exception import PlanningException -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.parameter_view import ParameterView -from semantic_kernel.utils.null_logger import NullLogger +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.parameter_view import ParameterView + +logger: logging.Logger = logging.getLogger(__name__) class ActionPlanner: @@ -32,30 +33,30 @@ class ActionPlanner: "no function" if nothing relevant is available. """ - RESTRICTED_SKILL_NAME = "ActionPlanner_Excluded" + RESTRICTED_PLUGIN_NAME = "ActionPlanner_Excluded" config: ActionPlannerConfig _stop_sequence: str = "#END-OF-PLAN" - _planner_function: SKFunctionBase + _planner_function: KernelFunction _kernel: Kernel _prompt_template: str - _logger: Logger def __init__( self, kernel: Kernel, config: Optional[ActionPlannerConfig] = None, prompt: Optional[str] = None, - logger: Optional[Logger] = None, + **kwargs, ) -> None: + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") if kernel is None: raise PlanningException( PlanningException.ErrorCodes.InvalidConfiguration, "Kernel cannot be `None`.", ) - self._logger = logger if logger else NullLogger() self.config = config or ActionPlannerConfig() __cur_dir = os.path.dirname(os.path.abspath(__file__)) @@ -64,51 +65,47 @@ def __init__( self._prompt_template = prompt if prompt else open(__prompt_file, "r").read() self._planner_function = kernel.create_semantic_function( - skill_name=self.RESTRICTED_SKILL_NAME, + plugin_name=self.RESTRICTED_PLUGIN_NAME, prompt_template=self._prompt_template, max_tokens=self.config.max_tokens, stop_sequences=[self._stop_sequence], ) - kernel.import_skill(self, self.RESTRICTED_SKILL_NAME) + kernel.import_plugin(self, self.RESTRICTED_PLUGIN_NAME) self._kernel = kernel self._context = kernel.create_new_context() - async def create_plan_async(self, goal: str) -> Plan: + async def create_plan(self, goal: str) -> Plan: """ :param goal: The input to the planner based on which the plan is made :return: a Plan object """ if goal is None: - raise PlanningException( - PlanningException.ErrorCodes.InvalidGoal, "Goal cannot be `None`." - ) + raise PlanningException(PlanningException.ErrorCodes.InvalidGoal, "Goal cannot be `None`.") - self._logger.info(f"Finding the best function for achieving the goal: {goal}") + logger.info(f"Finding the best function for achieving the goal: {goal}") self._context.variables.update(goal) - generated_plan_raw = await self._planner_function.invoke_async( - context=self._context - ) + generated_plan_raw = await self._planner_function.invoke(context=self._context) generated_plan_raw_str = str(generated_plan_raw) if not generated_plan_raw or not generated_plan_raw_str: - self._logger.error("No plan has been generated.") + logger.error("No plan has been generated.") raise PlanningException( PlanningException.ErrorCodes.CreatePlanError, "No plan has been generated.", ) - self._logger.info(f"Plan generated by ActionPlanner:\n{generated_plan_raw_str}") + logger.info(f"Plan generated by ActionPlanner:\n{generated_plan_raw_str}") # Ignore additional text around JSON recursively json_regex = r"\{(?:[^{}]|(?R))*\}" generated_plan_str = regex.search(json_regex, generated_plan_raw_str) if not generated_plan_str: - self._logger.error("No valid plan has been generated.") + logger.error("No valid plan has been generated.") raise PlanningException( PlanningException.ErrorCodes.InvalidPlan, "No valid plan has been generated.", @@ -121,19 +118,17 @@ async def create_plan_async(self, goal: str) -> Plan: try: generated_plan = json.loads(generated_plan_str) except json.decoder.JSONDecodeError as e: - self._logger.error("Encountered an error while parsing Plan JSON.") - self._logger.error(e) + logger.error("Encountered an error while parsing Plan JSON.") + logger.error(e) raise PlanningException( PlanningException.ErrorCodes.InvalidPlan, "Encountered an error while parsing Plan JSON.", ) - self._logger.info( - f"Python dictionary of plan generated by ActionPlanner:\n{generated_plan}" - ) + logger.info(f"Python dictionary of plan generated by ActionPlanner:\n{generated_plan}") if not generated_plan["plan"]: - self._logger.error("Suitable plan not generated by ActionPlanner.") + logger.error("Suitable plan not generated by ActionPlanner.") raise PlanningException( PlanningException.ErrorCodes.CreatePlanError, "Suitable plan not generated by ActionPlanner.", @@ -141,23 +136,20 @@ async def create_plan_async(self, goal: str) -> Plan: if not generated_plan["plan"]["function"]: # no suitable function identified, returning plan with no steps - self._logger.warn( - "No suitable function has been identified by ActionPlanner." - ) + logger.warn("No suitable function has been identified by ActionPlanner.") plan = Plan(description=goal) elif "." in generated_plan["plan"]["function"]: - skill, fun = generated_plan["plan"]["function"].split(".") - function_ref = self._context.skills.get_function(skill, fun) - self._logger.info( - f"ActionPlanner has picked {skill}.{fun}. Reference to this function" + plugin, fun = generated_plan["plan"]["function"].split(".") + function_ref = self._context.plugins[plugin][fun] + logger.info( + f"ActionPlanner has picked {plugin}.{fun}. Reference to this function" f" found in context: {function_ref}" ) plan = Plan(description=goal, function=function_ref) else: - function_ref = self._context.skills.get_function( - generated_plan["plan"]["function"] - ) - self._logger.info( + plugin, func = generated_plan["plan"]["function"] + function_ref = self._context.plugins[plugin][func] + logger.info( f"ActionPlanner has picked {generated_plan['plan']['function']}. " " Reference to this function found in context:" f" {function_ref}" @@ -165,42 +157,38 @@ async def create_plan_async(self, goal: str) -> Plan: plan = Plan(description=goal, function=function_ref) for key, val in generated_plan["plan"]["parameters"].items(): - self._logger.info(f"Parameter {key}: {val}") + logger.info(f"Parameter {key}: {val}") if val: plan.parameters[key] = str(val) plan.state[key] = str(val) return plan - @sk_function( - description="List a few good examples of plans to generate", name="GoodExamples" - ) - @sk_function_context_parameter( - name="goal", description="The current goal processed by the planner" - ) - def good_examples(self, goal: str, context: SKContext) -> str: + @kernel_function(description="List a few good examples of plans to generate", name="GoodExamples") + @kernel_function_context_parameter(name="goal", description="The current goal processed by the planner") + def good_examples(self, goal: str, context: KernelContext) -> str: return dedent( """ [EXAMPLE] - List of functions: // Read a file. - FileIOSkill.ReadAsync + FileIOPlugin.ReadAsync Parameter ""path"": Source file. // Write a file. - FileIOSkill.WriteAsync + FileIOPlugin.WriteAsync Parameter ""path"": Destination file. (default value: sample.txt) Parameter ""content"": File content. // Get the current time. - TimeSkill.Time + TimePlugin.Time No parameters. // Makes a POST request to a uri. - HttpSkill.PostAsync + HttpPlugin.PostAsync Parameter ""body"": The body of the request. - End list of functions. Goal: create a file called ""something.txt"". {""plan"":{ ""rationale"": ""the list contains a function that allows to create files"", - ""function"": ""FileIOSkill.WriteAsync"", + ""function"": ""FileIOPlugin.WriteAsync"", ""parameters"": { ""path"": ""something.txt"", ""content"": null @@ -209,30 +197,28 @@ def good_examples(self, goal: str, context: SKContext) -> str: """ ) - @sk_function( + @kernel_function( description="List a few edge case examples of plans to handle", name="EdgeCaseExamples", ) - @sk_function_context_parameter( - name="goal", description="The current goal processed by the planner" - ) - def edge_case_examples(self, goal: str, context: SKContext) -> str: + @kernel_function_context_parameter(name="goal", description="The current goal processed by the planner") + def edge_case_examples(self, goal: str, context: KernelContext) -> str: return dedent( ''' [EXAMPLE] - List of functions: // Get the current time. - TimeSkill.Time + TimePlugin.Time No parameters. // Write a file. - FileIOSkill.WriteAsync + FileIOPlugin.WriteAsync Parameter ""path"": Destination file. (default value: sample.txt) Parameter ""content"": File content. // Makes a POST request to a uri. - HttpSkill.PostAsync + HttpPlugin.PostAsync Parameter ""body"": The body of the request. // Read a file. - FileIOSkill.ReadAsync + FileIOPlugin.ReadAsync Parameter ""path"": Source file. - End list of functions. Goal: tell me a joke. @@ -245,21 +231,17 @@ def edge_case_examples(self, goal: str, context: SKContext) -> str: ''' ) - @sk_function( - description="List all functions available in the kernel", name="ListOfFunctions" - ) - @sk_function_context_parameter( - name="goal", description="The current goal processed by the planner" - ) - def list_of_functions(self, goal: str, context: SKContext) -> str: - if context.skills is None: + @kernel_function(description="List all functions available in the kernel", name="ListOfFunctions") + @kernel_function_context_parameter(name="goal", description="The current goal processed by the planner") + def list_of_functions(self, goal: str, context: KernelContext) -> str: + if context.plugins is None: raise PlanningException( error_code=PlanningException.ErrorCodes.InvalidConfiguration, message="Suitable plan not generated by ActionPlanner.", inner_exception=ValueError("No plugins are available."), ) - functions_view = context.skills.get_functions_view() + functions_view = context.plugins.get_functions_view() available_functions: List[FunctionView] = [ *functions_view.semantic_functions.values(), @@ -271,15 +253,15 @@ def list_of_functions(self, goal: str, context: SKContext) -> str: self._create_function_string(func) for func in available_functions if ( - func.skill_name != self.RESTRICTED_SKILL_NAME - and func.skill_name not in self.config.excluded_skills + func.plugin_name != self.RESTRICTED_PLUGIN_NAME + and func.plugin_name not in self.config.excluded_plugins and func.name not in self.config.excluded_functions ) ] available_functions_str = "\n".join(available_functions) - self._logger.info(f"List of available functions:\n{available_functions_str}") + logger.info(f"List of available functions:\n{available_functions_str}") return available_functions_str @@ -288,7 +270,7 @@ def _create_function_string(self, function: FunctionView) -> str: Takes an instance of FunctionView and returns a string that consists of function name, function description and parameters in the following format // - . + . Parameter """": (default value: `default_value`) ... @@ -297,10 +279,8 @@ def _create_function_string(self, function: FunctionView) -> str: """ if not function.description: - self._logger.warn( - f"{function.skill_name}.{function.name} is missing a description" - ) - description = f"// Function {function.skill_name}.{function.name}." + logger.warn(f"{function.plugin_name}.{function.name} is missing a description") + description = f"// Function {function.plugin_name}.{function.name}." else: description = f"// {function.description}" @@ -308,12 +288,10 @@ def _create_function_string(self, function: FunctionView) -> str: if description[-1] != ".": description = f"{description}." - name = f"{function.skill_name}.{function.name}" + name = f"{function.plugin_name}.{function.name}" parameters_list = [ - result - for x in function.parameters - if (result := self._create_parameter_string(x)) is not None + result for x in function.parameters if (result := self._create_parameter_string(x)) is not None ] if len(parameters_list) == 0: @@ -343,9 +321,7 @@ def _create_parameter_string(self, parameter: ParameterView) -> str: if description[-1] != ".": description = f"{description}." - default_value = ( - f"(default value: {val})" if (val := parameter.default_value) else "" - ) + default_value = f"(default value: {val})" if (val := parameter.default_value) else "" param_str = f'Parameter ""{name}"": {description} {default_value}' diff --git a/python/semantic_kernel/planning/action_planner/action_planner_config.py b/python/semantic_kernel/planning/action_planner/action_planner_config.py index fdfe62cb1aec..d04a76a57db3 100644 --- a/python/semantic_kernel/planning/action_planner/action_planner_config.py +++ b/python/semantic_kernel/planning/action_planner/action_planner_config.py @@ -4,10 +4,10 @@ class ActionPlannerConfig: def __init__( self, - excluded_skills: List[str] = None, + excluded_plugins: List[str] = None, excluded_functions: List[str] = None, max_tokens: int = 1024, ): - self.excluded_skills: List[str] = excluded_skills or [] + self.excluded_plugins: List[str] = excluded_plugins or [] self.excluded_functions: List[str] = excluded_functions or [] self.max_tokens: int = max_tokens diff --git a/python/semantic_kernel/planning/basic_planner.py b/python/semantic_kernel/planning/basic_planner.py index fee7f2fdb80d..c33683b9ff18 100644 --- a/python/semantic_kernel/planning/basic_planner.py +++ b/python/semantic_kernel/planning/basic_planner.py @@ -40,24 +40,24 @@ def __repr__(self): args: - name: the name to look up -WriterSkill.EmailTo +WriterPlugin.EmailTo description: email the input text to a recipient args: - input: the text to email - recipient: the recipient's email address. Multiple addresses may be included if separated by ';'. -WriterSkill.Translate +WriterPlugin.Translate description: translate the input to another language args: - input: the text to translate - language: the language to translate to -WriterSkill.Summarize +WriterPlugin.Summarize description: summarize input text args: - input: the text to summarize -FunSkill.Joke +FunPlugin.Joke description: Generate a funny joke args: - input: the input to generate a joke about @@ -69,29 +69,29 @@ def __repr__(self): { "input": "cars", "subtasks": [ - {"function": "FunSkill.Joke"}, - {"function": "WriterSkill.Translate", "args": {"language": "Spanish"}} + {"function": "FunPlugin.Joke"}, + {"function": "WriterPlugin.Translate", "args": {"language": "Spanish"}} ] } [AVAILABLE FUNCTIONS] -WriterSkill.Brainstorm +WriterPlugin.Brainstorm description: Brainstorm ideas args: - input: the input to brainstorm about -EdgarAllenPoeSkill.Poe +EdgarAllenPoePlugin.Poe description: Write in the style of author Edgar Allen Poe args: - input: the input to write about -WriterSkill.EmailTo +WriterPlugin.EmailTo description: Write an email to a recipient args: - input: the input to write about - recipient: the recipient's email address. -WriterSkill.Translate +WriterPlugin.Translate description: translate the input to another language args: - input: the text to translate @@ -106,10 +106,10 @@ def __repr__(self): { "input": "Valentine's Day Date Ideas", "subtasks": [ - {"function": "WriterSkill.Brainstorm"}, - {"function": "EdgarAllenPoeSkill.Poe"}, - {"function": "WriterSkill.EmailTo", "args": {"recipient": "significant_other"}}, - {"function": "WriterSkill.Translate", "args": {"language": "French"}} + {"function": "WriterPlugin.Brainstorm"}, + {"function": "EdgarAllenPoePlugin.Poe"}, + {"function": "WriterPlugin.EmailTo", "args": {"recipient": "significant_other"}}, + {"function": "WriterPlugin.Translate", "args": {"language": "French"}} ] } @@ -133,21 +133,21 @@ def _create_available_functions_string(self, kernel: Kernel) -> str: Given an instance of the Kernel, create the [AVAILABLE FUNCTIONS] string for the prompt. """ - # Get a dictionary of skill names to all native and semantic functions - native_functions = kernel.skills.get_functions_view().native_functions - semantic_functions = kernel.skills.get_functions_view().semantic_functions + # Get a dictionary of plugin names to all native and semantic functions + native_functions = kernel.plugins.get_functions_view().native_functions + semantic_functions = kernel.plugins.get_functions_view().semantic_functions native_functions.update(semantic_functions) # Create a mapping between all function names and their descriptions # and also a mapping between function names and their parameters all_functions = native_functions - skill_names = list(all_functions.keys()) + plugin_names = list(all_functions.keys()) all_functions_descriptions_dict = {} all_functions_params_dict = {} - for skill_name in skill_names: - for func in all_functions[skill_name]: - key = skill_name + "." + func.name + for plugin_name in plugin_names: + for func in all_functions[plugin_name]: + key = plugin_name + "." + func.name all_functions_descriptions_dict[key] = func.description all_functions_params_dict[key] = func.parameters @@ -166,14 +166,12 @@ def _create_available_functions_string(self, kernel: Kernel) -> str: param_description = "" else: param_description = param.description - available_functions_string += ( - "- " + param.name + ": " + param_description + "\n" - ) + available_functions_string += "- " + param.name + ": " + param_description + "\n" available_functions_string += "\n" return available_functions_string - async def create_plan_async( + async def create_plan( self, goal: str, kernel: Kernel, @@ -185,9 +183,7 @@ async def create_plan_async( """ # Create the semantic function for the planner with the given prompt - planner = kernel.create_semantic_function( - prompt, max_tokens=1000, temperature=0.8 - ) + planner = kernel.create_semantic_function(prompt, max_tokens=1000, temperature=0.8) available_functions_string = self._create_available_functions_string(kernel) @@ -196,10 +192,10 @@ async def create_plan_async( # Add the goal to the context context["goal"] = goal context["available_functions"] = available_functions_string - generated_plan = await planner.invoke_async(variables=context) + generated_plan = await planner.invoke(variables=context) return Plan(prompt=prompt, goal=goal, plan=generated_plan) - async def execute_plan_async(self, plan: Plan, kernel: Kernel) -> str: + async def execute_plan(self, plan: Plan, kernel: Kernel) -> str: """ Given a plan, execute each of the functions within the plan from start to finish and output the result. @@ -207,9 +203,7 @@ async def execute_plan_async(self, plan: Plan, kernel: Kernel) -> str: # Filter out good JSON from the result in case additional text is present json_regex = r"\{(?:[^{}]|(?R))*\}" - generated_plan_string = regex.search( - json_regex, plan.generated_plan.result - ).group() + generated_plan_string = regex.search(json_regex, plan.generated_plan.result).group() generated_plan = json.loads(generated_plan_string) context = ContextVariables() @@ -217,18 +211,18 @@ async def execute_plan_async(self, plan: Plan, kernel: Kernel) -> str: subtasks = generated_plan["subtasks"] for subtask in subtasks: - skill_name, function_name = subtask["function"].split(".") - sk_function = kernel.skills.get_function(skill_name, function_name) + plugin_name, function_name = subtask["function"].split(".") + kernel_function = kernel.plugins[plugin_name][function_name] # Get the arguments dictionary for the function args = subtask.get("args", None) if args: for key, value in args.items(): context[key] = value - output = await sk_function.invoke_async(variables=context) + output = await kernel_function.invoke(variables=context) else: - output = await sk_function.invoke_async(variables=context) + output = await kernel_function.invoke(variables=context) # Override the input context variable with the output of the function context["input"] = output.result diff --git a/python/semantic_kernel/planning/plan.py b/python/semantic_kernel/planning/plan.py index acbb3754a981..8d2221965c96 100644 --- a/python/semantic_kernel/planning/plan.py +++ b/python/semantic_kernel/planning/plan.py @@ -1,13 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. -import asyncio +import logging import re import threading -from logging import Logger -from typing import Any, Callable, List, Optional, Union +from typing import Any, Callable, ClassVar, List, Optional, Union + +from pydantic import PrivateAttr from semantic_kernel import Kernel -from semantic_kernel.connectors.ai import CompleteRequestSettings +from semantic_kernel.connectors.ai import PromptExecutionSettings from semantic_kernel.connectors.ai.text_completion_client_base import ( TextCompletionClientBase, ) @@ -15,32 +16,30 @@ from semantic_kernel.memory.null_memory import NullMemory from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.read_only_skill_collection import ( - ReadOnlySkillCollection, -) -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, ) -from semantic_kernel.utils.null_logger import NullLogger - - -class Plan(SKFunctionBase): - _state: ContextVariables - _steps: List["Plan"] - _function: SKFunctionBase - _parameters: ContextVariables - _outputs: List[str] - _has_next_step: bool - _next_step_index: int - _name: str - _skill_name: str - _description: str - _is_semantic: bool - _request_settings: CompleteRequestSettings - DEFAULT_RESULT_KEY = "PLAN.RESULT" + +logger: logging.Logger = logging.getLogger(__name__) + + +class Plan: + _state: ContextVariables = PrivateAttr() + _steps: List["Plan"] = PrivateAttr() + _function: KernelFunction = PrivateAttr() + _parameters: ContextVariables = PrivateAttr() + _outputs: List[str] = PrivateAttr() + _has_next_step: bool = PrivateAttr() + _next_step_index: int = PrivateAttr() + _name: str = PrivateAttr() + _plugin_name: str = PrivateAttr() + _description: str = PrivateAttr() + _is_semantic: bool = PrivateAttr() + _prompt_execution_settings: PromptExecutionSettings = PrivateAttr() + DEFAULT_RESULT_KEY: ClassVar[str] = "PLAN.RESULT" @property def name(self) -> str: @@ -51,8 +50,8 @@ def state(self) -> ContextVariables: return self._state @property - def skill_name(self) -> str: - return self._skill_name + def plugin_name(self) -> str: + return self._plugin_name @property def description(self) -> str: @@ -78,8 +77,8 @@ def is_native(self) -> bool: return not self._is_semantic @property - def request_settings(self) -> CompleteRequestSettings: - return self._request_settings + def prompt_execution_settings(self) -> PromptExecutionSettings: + return self._prompt_execution_settings @property def has_next_step(self) -> bool: @@ -92,17 +91,18 @@ def next_step_index(self) -> int: def __init__( self, name: Optional[str] = None, - skill_name: Optional[str] = None, + plugin_name: Optional[str] = None, description: Optional[str] = None, next_step_index: Optional[int] = None, state: Optional[ContextVariables] = None, parameters: Optional[ContextVariables] = None, outputs: Optional[List[str]] = None, steps: Optional[List["Plan"]] = None, - function: Optional[SKFunctionBase] = None, + function: Optional[KernelFunction] = None, ) -> None: + super().__init__() self._name = "" if name is None else name - self._skill_name = "" if skill_name is None else skill_name + self._plugin_name = "" if plugin_name is None else plugin_name self._description = "" if description is None else description self._next_step_index = 0 if next_step_index is None else next_step_index self._state = ContextVariables() if state is None else state @@ -112,49 +112,61 @@ def __init__( self._has_next_step = len(self._steps) > 0 self._is_semantic = None self._function = None if function is None else function - self._request_settings = None + self._prompt_execution_settings = None if function is not None: self.set_function(function) @classmethod def from_goal(cls, goal: str) -> "Plan": - return cls(description=goal, skill_name=cls.__name__) + return cls(description=goal, plugin_name=cls.__name__) @classmethod - def from_function(cls, function: SKFunctionBase) -> "Plan": + def from_function(cls, function: KernelFunction) -> "Plan": plan = cls() plan.set_function(function) return plan - async def invoke_async( + async def invoke( self, input: Optional[str] = None, - context: Optional[SKContext] = None, - settings: Optional[CompleteRequestSettings] = None, + context: Optional[KernelContext] = None, + settings: Optional[PromptExecutionSettings] = None, memory: Optional[SemanticTextMemoryBase] = None, - logger: Optional[Logger] = None, + **kwargs, # TODO: cancellation_token: CancellationToken, - ) -> SKContext: + ) -> KernelContext: + """ + Invoke the plan asynchronously. + + Args: + input (str, optional): The input to the plan. Defaults to None. + context (KernelContext, optional): The context to use. Defaults to None. + settings (PromptExecutionSettings, optional): The AI request settings to use. Defaults to None. + memory (SemanticTextMemoryBase, optional): The memory to use. Defaults to None. + **kwargs: Additional keyword arguments. + + Returns: + KernelContext: The updated context. + """ + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") if input is not None and input != "": self._state.update(input) if context is None: - context = SKContext( + context = KernelContext( variables=self._state, - skill_collection=ReadOnlySkillCollection(), memory=memory or NullMemory(), - logger=logger if logger is not None else NullLogger(), + plugins=KernelPluginCollection(), ) if self._function is not None: - result = await self._function.invoke_async( - context=context, settings=settings - ) + result = await self._function.invoke(context=context, settings=settings) if result.error_occurred: - result.log.error( + logger.error( "Something went wrong in plan step {0}.{1}:'{2}'".format( - self._skill_name, self._name, result.last_error_description + self._plugin_name, self._name, result.last_error_description ) ) return result @@ -169,88 +181,32 @@ async def invoke_async( return context - def invoke( - self, - input: Optional[str] = None, - context: Optional[SKContext] = None, - settings: Optional[CompleteRequestSettings] = None, - memory: Optional[SemanticTextMemoryBase] = None, - logger: Optional[Logger] = None, - ) -> SKContext: - if input is not None and input != "": - self._state.update(input) - - if context is None: - context = SKContext( - variables=self._state, - skill_collection=ReadOnlySkillCollection(), - memory=memory or NullMemory(), - logger=logger, - ) - - if self._function is not None: - result = self._function.invoke(context=context, settings=settings) - if result.error_occurred: - result.log.error( - result.last_exception, - "Something went wrong in plan step {0}.{1}:'{2}'".format( - self.skill_name, self.name, context.last_error_description - ), - ) - return result - context.variables.update(result.result) - else: - # loop through steps until completion - while self.has_next_step: - # Check if there is an event loop - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = None - function_context = context - self.add_variables_to_context(self._state, function_context) - - # Handle "asyncio.run() cannot be called from a running event loop" - if loop and loop.is_running(): - self._runThread(self.invoke_next_step(function_context)) - else: - asyncio.run(self.invoke_next_step(function_context)) - self.update_context_with_outputs(context) - return context - def set_ai_configuration( self, - settings: CompleteRequestSettings, - ) -> SKFunctionBase: + settings: PromptExecutionSettings, + ) -> KernelFunction: if self._function is not None: self._function.set_ai_configuration(settings) - def set_ai_service( - self, service: Callable[[], TextCompletionClientBase] - ) -> SKFunctionBase: + def set_ai_service(self, service: Callable[[], TextCompletionClientBase]) -> KernelFunction: if self._function is not None: self._function.set_ai_service(service) - def set_default_skill_collection( - self, - skills: ReadOnlySkillCollectionBase, - ) -> SKFunctionBase: + def describe(self) -> Optional[FunctionView]: if self._function is not None: - self._function.set_default_skill_collection(skills) - - def describe(self) -> FunctionView: - return self._function.describe() + return self._function.describe() + return None - def set_available_functions(self, plan: "Plan", context: SKContext) -> "Plan": + def set_available_functions(self, plan: "Plan", context: KernelContext) -> "Plan": if len(plan.steps) == 0: - if context.skills is None: + if context.plugins is None: raise KernelException( - KernelException.ErrorCodes.SkillCollectionNotSet, - "Skill collection not found in the context", + KernelException.ErrorCodes.PluginCollectionNotSet, + "Plugin collection not found in the context", ) try: - skillFunction = context.skills.get_function(plan.skill_name, plan.name) - plan.set_function(skillFunction) + pluginFunction = context.plugins[plan.plugin_name][plan.name] + plan.set_function(pluginFunction) except Exception: pass else: @@ -259,14 +215,14 @@ def set_available_functions(self, plan: "Plan", context: SKContext) -> "Plan": return plan - def add_steps(self, steps: Union[List["Plan"], List[SKFunctionBase]]) -> None: + def add_steps(self, steps: Union[List["Plan"], List[KernelFunction]]) -> None: for step in steps: if type(step) is Plan: self._steps.append(step) else: new_step = Plan( name=step.name, - skill_name=step.skill_name, + plugin_name=step.plugin_name, description=step.description, next_step_index=0, state=ContextVariables(), @@ -277,15 +233,15 @@ def add_steps(self, steps: Union[List["Plan"], List[SKFunctionBase]]) -> None: new_step.set_function(step) self._steps.append(new_step) - def set_function(self, function: SKFunctionBase) -> None: + def set_function(self, function: KernelFunction) -> None: self._function = function self._name = function.name - self._skill_name = function.skill_name + self._plugin_name = function.plugin_name self._description = function.description self._is_semantic = function.is_semantic - self._request_settings = function.request_settings + self._prompt_execution_settings = function.prompt_execution_settings - async def run_next_step_async( + async def run_next_step( self, kernel: Kernel, variables: ContextVariables, @@ -293,7 +249,7 @@ async def run_next_step_async( context = kernel.create_new_context(variables) return await self.invoke_next_step(context) - async def invoke_next_step(self, context: SKContext) -> "Plan": + async def invoke_next_step(self, context: KernelContext) -> "Plan": if self.has_next_step: step = self._steps[self._next_step_index] @@ -301,20 +257,18 @@ async def invoke_next_step(self, context: SKContext) -> "Plan": variables = self.get_next_step_variables(context.variables, step) # Invoke the step - func_context = SKContext( + func_context = KernelContext( variables=variables, memory=context.memory, - skill_collection=context.skills, - logger=context.log, + plugins=context.plugins, ) - result = await step.invoke_async(context=func_context) + result = await step.invoke(context=func_context) result_value = result.result if result.error_occurred: raise KernelException( KernelException.ErrorCodes.FunctionInvokeError, - "Error occurred while running plan step: " - + result.last_error_description, + "Error occurred while running plan step: " + result.last_error_description, result.last_exception, ) @@ -326,9 +280,7 @@ async def invoke_next_step(self, context: SKContext) -> "Plan": current_plan_result = "" if Plan.DEFAULT_RESULT_KEY in self._state.variables: current_plan_result = self._state[Plan.DEFAULT_RESULT_KEY] - self._state.set( - Plan.DEFAULT_RESULT_KEY, current_plan_result.strip() + result_value - ) + self._state.set(Plan.DEFAULT_RESULT_KEY, current_plan_result.strip() + result_value) # Update state with outputs (if any) for output in step._outputs: @@ -342,14 +294,12 @@ async def invoke_next_step(self, context: SKContext) -> "Plan": return self - def add_variables_to_context( - self, variables: ContextVariables, context: SKContext - ) -> None: + def add_variables_to_context(self, variables: ContextVariables, context: KernelContext) -> None: for key in variables.variables: if key not in context.variables: context.variables.set(key, variables[key]) - def update_context_with_outputs(self, context: SKContext) -> None: + def update_context_with_outputs(self, context: KernelContext) -> None: result_string = "" if Plan.DEFAULT_RESULT_KEY in self._state.variables: result_string = self._state[Plan.DEFAULT_RESULT_KEY] @@ -366,12 +316,10 @@ def update_context_with_outputs(self, context: SKContext) -> None: return context - def get_next_step_variables( - self, variables: ContextVariables, step: "Plan" - ) -> ContextVariables: + def get_next_step_variables(self, variables: ContextVariables, step: "Plan") -> ContextVariables: # Priority for Input # - Parameters (expand from variables if needed) - # - SKContext.Variables + # - KernelContext.Variables # - Plan.State # - Empty if sending to another plan # - Plan.Description @@ -403,24 +351,20 @@ def get_next_step_variables( if param.name in variables: step_variables.set(param.name, variables[param.name]) - elif param.name in self._state and ( - self._state[param.name] is not None and self._state[param.name] != "" - ): + elif param.name in self._state and (self._state[param.name] is not None and self._state[param.name] != ""): step_variables.set(param.name, self._state[param.name]) - for param_var in step.parameters.variables: - if param_var in step_variables: + for param_name, param_val in step.parameters.variables.items(): + if param_name in step_variables: continue - expanded_value = self.expand_from_variables(variables, param_var) - if expanded_value.lower() == param_var.lower(): - step_variables.set(param_var, step.parameters.variables[param_var]) - elif param_var in variables: - step_variables.set(param_var, variables[param_var]) - elif param_var in self._state: - step_variables.set(param_var, self._state[param_var]) + if param_name in variables: + step_variables.set(param_name, param_val) + elif param_name in self._state: + step_variables.set(param_name, self._state[param_name]) else: - step_variables.set(param_var, expanded_value) + expanded_value = self.expand_from_variables(variables, param_val) + step_variables.set(param_name, expanded_value) for item in variables.variables: if item not in step_variables: @@ -428,15 +372,11 @@ def get_next_step_variables( return step_variables - def expand_from_variables( - self, variables: ContextVariables, input_string: str - ) -> str: + def expand_from_variables(self, variables: ContextVariables, input_string: str) -> str: result = input_string variables_regex = r"\$(?P\w+)" matches = [m for m in re.finditer(variables_regex, input_string)] - ordered_matches = sorted( - matches, key=lambda m: len(m.group("var")), reverse=True - ) + ordered_matches = sorted(matches, key=lambda m: len(m.group("var")), reverse=True) for match in ordered_matches: var_name = match.group("var") diff --git a/python/semantic_kernel/planning/sequential_planner/Plugins/SequentialPlanning/config.json b/python/semantic_kernel/planning/sequential_planner/Plugins/SequentialPlanning/config.json new file mode 100644 index 000000000000..a6abef524f3d --- /dev/null +++ b/python/semantic_kernel/planning/sequential_planner/Plugins/SequentialPlanning/config.json @@ -0,0 +1,25 @@ +{ + "schema": 1, + "description": "Given a request or command or goal generate a step by step plan to fulfill the request using functions. This ability is also known as decision making and function flow", + "type": "completion", + "completion": { + "max_tokens": 1024, + "temperature": 0, + "top_p": 0, + "presence_penalty": 0, + "frequency_penalty": 0, + "stop_sequences": [""] + }, + "input_variables": [ + { + "name": "input", + "description": "The question to answer", + "defaultValue": "" + }, + { + "name": "available_functions", + "description": "The list of the agent's available_functions", + "defaultValue": "" + } + ] +} diff --git a/python/semantic_kernel/planning/sequential_planner/Skills/SequentialPlanning/skprompt.txt b/python/semantic_kernel/planning/sequential_planner/Plugins/SequentialPlanning/skprompt.txt similarity index 100% rename from python/semantic_kernel/planning/sequential_planner/Skills/SequentialPlanning/skprompt.txt rename to python/semantic_kernel/planning/sequential_planner/Plugins/SequentialPlanning/skprompt.txt diff --git a/python/semantic_kernel/planning/sequential_planner/Skills/SequentialPlanning/config.json b/python/semantic_kernel/planning/sequential_planner/Skills/SequentialPlanning/config.json deleted file mode 100644 index 1309f85b5a1a..000000000000 --- a/python/semantic_kernel/planning/sequential_planner/Skills/SequentialPlanning/config.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "schema": 1, - "description": "Given a request or command or goal generate a step by step plan to fulfill the request using functions. This ability is also known as decision making and function flow", - "type": "completion", - "completion": { - "max_tokens": 1024, - "temperature": 0, - "top_p": 0, - "presence_penalty": 0, - "frequency_penalty": 0, - "stop_sequences": [""] - }, - "input": { - "parameters": [ - { - "name": "input", - "description": "The question to answer", - "defaultValue": "" - }, - { - "name": "available_functions", - "description": "The list of the agent's available_functions", - "defaultValue": "" - } - ] - } -} diff --git a/python/semantic_kernel/planning/sequential_planner/sequential_planner.py b/python/semantic_kernel/planning/sequential_planner/sequential_planner.py index edb850f88fdc..026507085cab 100644 --- a/python/semantic_kernel/planning/sequential_planner/sequential_planner.py +++ b/python/semantic_kernel/planning/sequential_planner/sequential_planner.py @@ -10,7 +10,7 @@ SequentialPlannerConfig, ) from semantic_kernel.planning.sequential_planner.sequential_planner_extensions import ( - SequentialPlannerSKContextExtension as SKContextExtension, + SequentialPlannerKernelContextExtension as KernelContextExtension, ) from semantic_kernel.planning.sequential_planner.sequential_planner_parser import ( SequentialPlanParser, @@ -24,8 +24,8 @@ ) if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + from semantic_kernel.orchestration.kernel_context import KernelContext + from semantic_kernel.orchestration.kernel_function import KernelFunction SEQUENTIAL_PLANNER_DEFAULT_DESCRIPTION = ( "Given a request or command or goal generate a step by step plan to " @@ -33,10 +33,8 @@ ) CUR_DIR = os.path.dirname(os.path.realpath(__file__)) -PROMPT_CONFIG_FILE_PATH = os.path.join(CUR_DIR, "Skills/SequentialPlanning/config.json") -PROMPT_TEMPLATE_FILE_PATH = os.path.join( - CUR_DIR, "Skills/SequentialPlanning/skprompt.txt" -) +PROMPT_CONFIG_FILE_PATH = os.path.join(CUR_DIR, "Plugins/SequentialPlanning/config.json") +PROMPT_TEMPLATE_FILE_PATH = os.path.join(CUR_DIR, "Plugins/SequentialPlanning/skprompt.txt") def read_file(file_path: str) -> str: @@ -45,30 +43,26 @@ def read_file(file_path: str) -> str: class SequentialPlanner: - RESTRICTED_SKILL_NAME = "SequentialPlanner_Excluded" + RESTRICTED_PLUGIN_NAME = "SequentialPlanner_Excluded" config: SequentialPlannerConfig - _context: "SKContext" - _function_flow_function: "SKFunctionBase" + _context: "KernelContext" + _function_flow_function: "KernelFunction" - def __init__( - self, kernel: Kernel, config: SequentialPlannerConfig = None, prompt: str = None - ): + def __init__(self, kernel: Kernel, config: SequentialPlannerConfig = None, prompt: str = None): assert isinstance(kernel, Kernel) self.config = config or SequentialPlannerConfig() - self.config.excluded_skills.append(self.RESTRICTED_SKILL_NAME) + self.config.excluded_plugins.append(self.RESTRICTED_PLUGIN_NAME) self._function_flow_function = self._init_flow_function(prompt, kernel) self._context = kernel.create_new_context() def _init_flow_function(self, prompt: str, kernel: Kernel): - prompt_config = PromptTemplateConfig.from_json( - read_file(PROMPT_CONFIG_FILE_PATH) - ) + prompt_config = PromptTemplateConfig.from_json(read_file(PROMPT_CONFIG_FILE_PATH)) prompt_template = prompt or read_file(PROMPT_TEMPLATE_FILE_PATH) - prompt_config.completion.max_tokens = self.config.max_tokens + prompt_config.execution_settings.extension_data["max_tokens"] = self.config.max_tokens prompt_template = PromptTemplate( template=prompt_template, @@ -78,27 +72,21 @@ def _init_flow_function(self, prompt: str, kernel: Kernel): function_config = SemanticFunctionConfig(prompt_config, prompt_template) return kernel.register_semantic_function( - skill_name=self.RESTRICTED_SKILL_NAME, - function_name=self.RESTRICTED_SKILL_NAME, + plugin_name=self.RESTRICTED_PLUGIN_NAME, + function_name=self.RESTRICTED_PLUGIN_NAME, function_config=function_config, ) - async def create_plan_async(self, goal: str) -> Plan: + async def create_plan(self, goal: str) -> Plan: if len(goal) == 0: - raise PlanningException( - PlanningException.ErrorCodes.InvalidGoal, "The goal specified is empty" - ) + raise PlanningException(PlanningException.ErrorCodes.InvalidGoal, "The goal specified is empty") - relevant_function_manual = await SKContextExtension.get_functions_manual_async( - self._context, goal, self.config - ) + relevant_function_manual = await KernelContextExtension.get_functions_manual(self._context, goal, self.config) self._context.variables.set("available_functions", relevant_function_manual) self._context.variables.update(goal) - plan_result = await self._function_flow_function.invoke_async( - context=self._context - ) + plan_result = await self._function_flow_function.invoke(context=self._context) if plan_result.error_occurred: raise PlanningException( @@ -110,14 +98,13 @@ async def create_plan_async(self, goal: str) -> Plan: plan_result_string = plan_result.result.strip() try: - get_skill_function = ( - self.config.get_skill_function - or SequentialPlanParser.get_skill_function(self._context) + get_plugin_function = self.config.get_plugin_function or SequentialPlanParser.get_plugin_function( + self._context ) plan = SequentialPlanParser.to_plan_from_xml( plan_result_string, goal, - get_skill_function, + get_plugin_function, self.config.allow_missing_functions, ) diff --git a/python/semantic_kernel/planning/sequential_planner/sequential_planner_config.py b/python/semantic_kernel/planning/sequential_planner/sequential_planner_config.py index 5a223c4bdb8f..8078042321d0 100644 --- a/python/semantic_kernel/planning/sequential_planner/sequential_planner_config.py +++ b/python/semantic_kernel/planning/sequential_planner/sequential_planner_config.py @@ -8,20 +8,20 @@ def __init__( self, relevancy_threshold: Optional[float] = None, max_relevant_functions: int = 100, - excluded_skills: List[str] = None, + excluded_plugins: List[str] = None, excluded_functions: List[str] = None, included_functions: List[str] = None, max_tokens: int = 1024, allow_missing_functions: bool = False, - get_available_functions_async: Callable = None, - get_skill_function: Callable = None, + get_available_functions: Callable = None, + get_plugin_function: Callable = None, ): self.relevancy_threshold: float = relevancy_threshold self.max_relevant_functions: int = max_relevant_functions - self.excluded_skills: List[str] = excluded_skills or [] + self.excluded_plugins: List[str] = excluded_plugins or [] self.excluded_functions: List[str] = excluded_functions or [] self.included_functions: List[str] = included_functions or [] self.max_tokens: int = max_tokens self.allow_missing_functions: bool = allow_missing_functions - self.get_available_functions_async = get_available_functions_async - self.get_skill_function = get_skill_function + self.get_available_functions = get_available_functions + self.get_plugin_function = get_plugin_function diff --git a/python/semantic_kernel/planning/sequential_planner/sequential_planner_extensions.py b/python/semantic_kernel/planning/sequential_planner/sequential_planner_extensions.py index 8efa7c2e0352..2e3f47e918df 100644 --- a/python/semantic_kernel/planning/sequential_planner/sequential_planner_extensions.py +++ b/python/semantic_kernel/planning/sequential_planner/sequential_planner_extensions.py @@ -1,16 +1,19 @@ # Copyright (c) Microsoft. All rights reserved. import itertools +import logging from typing import AsyncIterable, List from semantic_kernel.kernel_exception import KernelException from semantic_kernel.memory.memory_query_result import MemoryQueryResult from semantic_kernel.memory.null_memory import NullMemory -from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.kernel_context import KernelContext from semantic_kernel.planning.sequential_planner.sequential_planner_config import ( SequentialPlannerConfig, ) -from semantic_kernel.skill_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.function_view import FunctionView + +logger: logging.Logger = logging.getLogger(__name__) class SequentialPlannerFunctionViewExtension: @@ -18,89 +21,63 @@ class SequentialPlannerFunctionViewExtension: def to_manual_string(function: FunctionView): inputs = [ f" - {parameter.name}: {parameter.description}" - + ( - f" (default value: {parameter.default_value})" - if parameter.default_value - else "" - ) + + (f" (default value: {parameter.default_value})" if parameter.default_value else "") for parameter in function.parameters ] inputs = "\n".join(inputs) - qualified_name = SequentialPlannerFunctionViewExtension.to_fully_qualified_name( - function - ) + qualified_name = SequentialPlannerFunctionViewExtension.to_fully_qualified_name(function) - return ( - f"{qualified_name}:\n description: {function.description}\n inputs:\n " - f" {inputs}" - ) + return f"{qualified_name}:\n description: {function.description}\n inputs:\n " f" {inputs}" @staticmethod def to_fully_qualified_name(function: FunctionView): - return f"{function.skill_name}.{function.name}" + return f"{function.plugin_name}.{function.name}" @staticmethod def to_embedding_string(function: FunctionView): - inputs = "\n".join( - [ - f" - {parameter.name}: {parameter.description}" - for parameter in function.parameters - ] - ) - return ( - f"{function.name}:\n description: {function.description}\n " - f" inputs:\n{inputs}" - ) + inputs = "\n".join([f" - {parameter.name}: {parameter.description}" for parameter in function.parameters]) + return f"{function.name}:\n description: {function.description}\n " f" inputs:\n{inputs}" -class SequentialPlannerSKContextExtension: - PLANNER_MEMORY_COLLECTION_NAME = " Planning.SKFunctionManual" - PLAN_SK_FUNCTIONS_ARE_REMEMBERED = "Planning.SKFunctionsAreRemembered" +class SequentialPlannerKernelContextExtension: + PLANNER_MEMORY_COLLECTION_NAME = " Planning.KernelFunctionManual" + PLAN_KERNEL_FUNCTIONS_ARE_REMEMBERED = "Planning.KernelFunctionsAreRemembered" @staticmethod - async def get_functions_manual_async( - context: SKContext, + async def get_functions_manual( + context: KernelContext, semantic_query: str = None, config: SequentialPlannerConfig = None, ) -> str: config = config or SequentialPlannerConfig() - if config.get_available_functions_async is None: - functions = ( - await SequentialPlannerSKContextExtension.get_available_functions_async( - context, config, semantic_query - ) + if config.get_available_functions is None: + functions = await SequentialPlannerKernelContextExtension.get_available_functions( + context, config, semantic_query ) else: - functions = await config.get_available_functions_async( - config, semantic_query - ) + functions = await config.get_available_functions(config, semantic_query) - return "\n\n".join( - [ - SequentialPlannerFunctionViewExtension.to_manual_string(func) - for func in functions - ] - ) + return "\n\n".join([SequentialPlannerFunctionViewExtension.to_manual_string(func) for func in functions]) @staticmethod - async def get_available_functions_async( - context: SKContext, + async def get_available_functions( + context: KernelContext, config: SequentialPlannerConfig, semantic_query: str = None, ): - excluded_skills = config.excluded_skills or [] + excluded_plugins = config.excluded_plugins or [] excluded_functions = config.excluded_functions or [] included_functions = config.included_functions or [] - if context.skills is None: + if context.plugins is None: raise KernelException( - KernelException.ErrorCodes.SkillCollectionNotSet, - "Skill collection not found in the context", + KernelException.ErrorCodes.PluginCollectionNotSet, + "Plugin collection not found in the context", ) - functions_view = context.skills.get_functions_view() + functions_view = context.plugins.get_functions_view() available_functions: List[FunctionView] = [ *functions_view.semantic_functions.values(), @@ -111,57 +88,42 @@ async def get_available_functions_async( available_functions = [ func for func in available_functions - if ( - func.skill_name not in excluded_skills - and func.name not in excluded_functions - ) + if (func.plugin_name not in excluded_plugins and func.name not in excluded_functions) ] - if ( - semantic_query is None - or isinstance(context.memory, NullMemory) - or config.relevancy_threshold is None - ): + if semantic_query is None or isinstance(context.memory, NullMemory) or config.relevancy_threshold is None: # If no semantic query is provided, return all available functions. # If a Memory provider has not been registered, return all available functions. return available_functions # Remember functions in memory so that they can be searched. - await SequentialPlannerSKContextExtension.remember_functions_async( - context, available_functions - ) + await SequentialPlannerKernelContextExtension.remember_functions(context, available_functions) # Search for functions that match the semantic query. - memories = await context.memory.search_async( - SequentialPlannerSKContextExtension.PLANNER_MEMORY_COLLECTION_NAME, + memories = await context.memory.search( + SequentialPlannerKernelContextExtension.PLANNER_MEMORY_COLLECTION_NAME, semantic_query, config.max_relevant_functions, config.relevancy_threshold, ) # Add functions that were found in the search results. - relevant_functions = ( - await SequentialPlannerSKContextExtension.get_relevant_functions_async( - context, available_functions, memories - ) + relevant_functions = await SequentialPlannerKernelContextExtension.get_relevant_functions( + context, available_functions, memories ) # Add any missing functions that were included but not found in the search results. missing_functions = [ - func - for func in included_functions - if func not in [func.name for func in relevant_functions] + func for func in included_functions if func not in [func.name for func in relevant_functions] ] - relevant_functions += [ - func for func in available_functions if func.name in missing_functions - ] + relevant_functions += [func for func in available_functions if func.name in missing_functions] - return sorted(relevant_functions, key=lambda x: (x.skill_name, x.name)) + return sorted(relevant_functions, key=lambda x: (x.plugin_name, x.name)) @staticmethod - async def get_relevant_functions_async( - context: SKContext, + async def get_relevant_functions( + context: KernelContext, available_functions: List[FunctionView], memories: AsyncIterable[MemoryQueryResult], ) -> List[FunctionView]: @@ -172,20 +134,15 @@ async def get_relevant_functions_async( ( func for func in available_functions - if SequentialPlannerFunctionViewExtension.to_fully_qualified_name( - func - ) - == memory_entry.id + if SequentialPlannerFunctionViewExtension.to_fully_qualified_name(func) == memory_entry.id ), None, ) if function is not None: - context.log.debug( + logger.debug( "Found relevant function. Relevance Score: {0}, Function: {1}".format( memory_entry.relevance, - SequentialPlannerFunctionViewExtension.to_fully_qualified_name( - function - ), + SequentialPlannerFunctionViewExtension.to_fully_qualified_name(function), ) ) relevant_functions.append(function) @@ -193,29 +150,20 @@ async def get_relevant_functions_async( return relevant_functions @staticmethod - async def remember_functions_async( - context: SKContext, available_functions: List[FunctionView] - ): + async def remember_functions(context: KernelContext, available_functions: List[FunctionView]): # Check if the functions have already been saved to memory. - if ( - SequentialPlannerSKContextExtension.PLAN_SK_FUNCTIONS_ARE_REMEMBERED - in context.variables - ): + if SequentialPlannerKernelContextExtension.PLAN_KERNEL_FUNCTIONS_ARE_REMEMBERED in context.variables: return for function in available_functions: - function_name = ( - SequentialPlannerFunctionViewExtension.to_fully_qualified_name(function) - ) + function_name = SequentialPlannerFunctionViewExtension.to_fully_qualified_name(function) key = function_name description = function.description or function_name - text_to_embed = SequentialPlannerFunctionViewExtension.to_embedding_string( - function - ) + text_to_embed = SequentialPlannerFunctionViewExtension.to_embedding_string(function) # It'd be nice if there were a saveIfNotExists method on the memory interface - memory_entry = await context.memory.get_async( - collection=SequentialPlannerSKContextExtension.PLANNER_MEMORY_COLLECTION_NAME, + memory_entry = await context.memory.get( + collection=SequentialPlannerKernelContextExtension.PLANNER_MEMORY_COLLECTION_NAME, key=key, with_embedding=False, ) @@ -223,8 +171,8 @@ async def remember_functions_async( # TODO It'd be nice if the minRelevanceScore could be a parameter for each item that was saved to memory # As folks may want to tune their functions to be more or less relevant. # Memory now supports these such strategies. - await context.memory.save_information_async( - collection=SequentialPlannerSKContextExtension.PLANNER_MEMORY_COLLECTION_NAME, + await context.memory.save_information( + collection=SequentialPlannerKernelContextExtension.PLANNER_MEMORY_COLLECTION_NAME, text=text_to_embed, id=key, description=description, @@ -232,6 +180,4 @@ async def remember_functions_async( ) # Set a flag to indicate that the functions have been saved to memory. - context.variables.set( - SequentialPlannerSKContextExtension.PLAN_SK_FUNCTIONS_ARE_REMEMBERED, "true" - ) + context.variables.set(SequentialPlannerKernelContextExtension.PLAN_KERNEL_FUNCTIONS_ARE_REMEMBERED, "true") diff --git a/python/semantic_kernel/planning/sequential_planner/sequential_planner_parser.py b/python/semantic_kernel/planning/sequential_planner/sequential_planner_parser.py index cea7747521f3..2fcf4485dd01 100644 --- a/python/semantic_kernel/planning/sequential_planner/sequential_planner_parser.py +++ b/python/semantic_kernel/planning/sequential_planner/sequential_planner_parser.py @@ -6,8 +6,8 @@ from semantic_kernel.kernel_exception import KernelException from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction from semantic_kernel.planning.plan import Plan from semantic_kernel.planning.planning_exception import PlanningException @@ -21,12 +21,14 @@ class SequentialPlanParser: @staticmethod - def get_skill_function( - context: SKContext, - ) -> Callable[[str, str], Optional[SKFunctionBase]]: - def function(skill_name: str, function_name: str) -> Optional[SKFunctionBase]: + def get_plugin_function( + context: KernelContext, + ) -> Callable[[str, str], Optional[KernelFunction]]: + def function(plugin_name: str, function_name: str) -> Optional[KernelFunction]: try: - return context.skills.get_function(skill_name, function_name) + return context.plugins[plugin_name][function_name] + except KeyError: + return None except KernelException: return None @@ -36,7 +38,7 @@ def function(skill_name: str, function_name: str) -> Optional[SKFunctionBase]: def to_plan_from_xml( xml_string: str, goal: str, - get_skill_function: Callable[[str, str], Optional[SKFunctionBase]], + get_plugin_function: Callable[[str, str], Optional[KernelFunction]], allow_missing_functions: bool = False, ): xml_string = "" + xml_string + "" @@ -71,25 +73,23 @@ def to_plan_from_xml( continue if child_node.tag.startswith(FUNCTION_TAG): - skill_function_name = child_node.tag.split(FUNCTION_TAG)[1] + plugin_function_name = child_node.tag.split(FUNCTION_TAG)[1] ( - skill_name, + plugin_name, function_name, - ) = SequentialPlanParser.get_skill_function_names( - skill_function_name - ) + ) = SequentialPlanParser.get_plugin_function_names(plugin_function_name) if function_name: - skill_function = get_skill_function(skill_name, function_name) + plugin_function = get_plugin_function(plugin_name, function_name) - if skill_function is not None: - plan_step = Plan.from_function(skill_function) + if plugin_function is not None: + plan_step = Plan.from_function(plugin_function) function_variables = ContextVariables() function_outputs = [] function_results = [] - view = skill_function.describe() + view = plugin_function.describe() for p in view.parameters: function_variables.set(p.name, p.default_value) @@ -100,9 +100,7 @@ def to_plan_from_xml( function_outputs.append(child_node.attrib[attr]) function_results.append(child_node.attrib[attr]) else: - function_variables.set( - attr, child_node.attrib[attr] - ) + function_variables.set(attr, child_node.attrib[attr]) plan_step._outputs = function_outputs plan_step._parameters = function_variables @@ -112,24 +110,18 @@ def to_plan_from_xml( plan.add_steps([plan_step]) elif allow_missing_functions: - plan.add_steps([Plan.from_goal(skill_function_name)]) + plan.add_steps([Plan.from_goal(plugin_function_name)]) else: raise PlanningException( PlanningException.ErrorCodes.InvalidPlan, - f"Failed to find function '{skill_function_name}' in skill '{skill_name}'.", + f"Failed to find function '{plugin_function_name}' in plugin '{plugin_name}'.", ) return plan @staticmethod - def get_skill_function_names(skill_function_name: str) -> Tuple[str, str]: - skill_function_name_parts = skill_function_name.split(".") - skill_name = ( - skill_function_name_parts[0] if len(skill_function_name_parts) > 0 else "" - ) - function_name = ( - skill_function_name_parts[1] - if len(skill_function_name_parts) > 1 - else skill_function_name - ) - return skill_name, function_name + def get_plugin_function_names(plugin_function_name: str) -> Tuple[str, str]: + plugin_function_name_parts = plugin_function_name.split(".") + plugin_name = plugin_function_name_parts[0] if len(plugin_function_name_parts) > 0 else "" + function_name = plugin_function_name_parts[1] if len(plugin_function_name_parts) > 1 else plugin_function_name + return plugin_name, function_name diff --git a/python/semantic_kernel/planning/stepwise_planner/Plugins/StepwiseStep/config.json b/python/semantic_kernel/planning/stepwise_planner/Plugins/StepwiseStep/config.json new file mode 100644 index 000000000000..0706266c27fb --- /dev/null +++ b/python/semantic_kernel/planning/stepwise_planner/Plugins/StepwiseStep/config.json @@ -0,0 +1,30 @@ +{ + "schema": 1, + "description": "Given a request or command or goal generate multi-step plan to reach the goal. After each step LLM is called to perform the reasoning for the next step.", + "type": "completion", + "completion": { + "max_tokens": 1024, + "temperature": 0, + "top_p": 0, + "presence_penalty": 0, + "frequency_penalty": 0, + "stop_sequences": ["[OBSERVATION]", "\n[THOUGHT]"] + }, + "input_variables": [ + { + "name": "question", + "description": "The question to answer", + "defaultValue": "" + }, + { + "name": "agentScratchPad", + "description": "The agent's scratch pad", + "defaultValue": "" + }, + { + "name": "functionDescriptions", + "description": "The manual of the agent's functions", + "defaultValue": "" + } + ] + } \ No newline at end of file diff --git a/python/semantic_kernel/planning/stepwise_planner/Plugins/StepwiseStep/skprompt.txt b/python/semantic_kernel/planning/stepwise_planner/Plugins/StepwiseStep/skprompt.txt new file mode 100644 index 000000000000..894671d2804f --- /dev/null +++ b/python/semantic_kernel/planning/stepwise_planner/Plugins/StepwiseStep/skprompt.txt @@ -0,0 +1,67 @@ +[INSTRUCTION] +Answer the following questions as accurately as possible using the provided functions. + +[AVAILABLE FUNCTIONS] +The function definitions below are in the following format: +: + inputs: + - : + - ... + +{{$function_descriptions}} +[END AVAILABLE FUNCTIONS] + +[USAGE INSTRUCTIONS] +To use the functions, specify a JSON blob representing an action. The JSON blob should contain an "action" key with the name of the function to use, and an "action_variables" key with a JSON object of string values to use when calling the function. +Do not call functions directly; they must be invoked through an action. +The "action_variables" value should always include an "input" key, even if the input value is empty. Additional keys in the "action_variables" value should match the defined [PARAMETERS] of the named "action" in [AVAILABLE FUNCTIONS]. +Dictionary values in "action_variables" must be strings and represent the actual values to be passed to the function. +Ensure that the $JSON_BLOB contains only a SINGLE action; do NOT return multiple actions. +IMPORTANT: Use only the available functions listed in the [AVAILABLE FUNCTIONS] section. Do not attempt to use any other functions that are not specified. + +Here is an example of a valid $JSON_BLOB: +{ + "action": "functionName", + "action_variables": {"parameterName": "some value", ...} +} + +Here is another example of a valid $JSON_BLOB: +{ + "action": "_Function_.Name", + "action_variables": {"parameterName": "some value", ...} +} + +Here is another example of a valid $JSON_BLOB: +{ + "action": "FunctionName2", + "action_variables": {"parameterName": "some value", ...} +} + +The $JSON_BLOB must contain an "action_variables" key, with the {"parameterName": "some value", ...} value in the response. +[END USAGE INSTRUCTIONS] +[END INSTRUCTION] + +[THOUGHT PROCESS] +[QUESTION] +the input question I must answer +[THOUGHT] +To solve this problem, I should carefully analyze the given question and identify the necessary steps. Any facts I discover earlier in my thought process should be repeated here to keep them readily available. +[ACTION] +{ + "action": "functionName", + "action_variables": {"parameterName": "some value", ...} +} +[OBSERVATION] +The result of the action will be provided here. +... (These Thought/Action/Observation can repeat until the final answer is reached.) +[FINAL ANSWER] +Once I have gathered all the necessary observations and performed any required actions, I can provide the final answer in a clear and human-readable format. +[END THOUGHT PROCESS] + +Let's break down the problem step by step and think about the best approach. Questions and observations should be followed by a single thought and an optional single action to take. + +Begin! + +[QUESTION] +{{$question}} +{{$agent_scratch_pad}} \ No newline at end of file diff --git a/python/semantic_kernel/planning/stepwise_planner/Skills/StepwiseStep/config.json b/python/semantic_kernel/planning/stepwise_planner/Skills/StepwiseStep/config.json deleted file mode 100644 index 0802ff14375e..000000000000 --- a/python/semantic_kernel/planning/stepwise_planner/Skills/StepwiseStep/config.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "schema": 1, - "description": "Given a request or command or goal generate multi-step plan to reach the goal. After each step LLM is called to perform the reasoning for the next step.", - "type": "completion", - "completion": { - "max_tokens": 1024, - "temperature": 0, - "top_p": 0, - "presence_penalty": 0, - "frequency_penalty": 0, - "stop_sequences": ["[OBSERVATION]", "\n[THOUGHT]"] - }, - "input": { - "parameters": [ - { - "name": "question", - "description": "The question to answer", - "defaultValue": "" - }, - { - "name": "agentScratchPad", - "description": "The agent's scratch pad", - "defaultValue": "" - }, - { - "name": "functionDescriptions", - "description": "The manual of the agent's functions", - "defaultValue": "" - } - ] - } - } \ No newline at end of file diff --git a/python/semantic_kernel/planning/stepwise_planner/Skills/StepwiseStep/skprompt.txt b/python/semantic_kernel/planning/stepwise_planner/Skills/StepwiseStep/skprompt.txt deleted file mode 100644 index 2c4576b30056..000000000000 --- a/python/semantic_kernel/planning/stepwise_planner/Skills/StepwiseStep/skprompt.txt +++ /dev/null @@ -1,53 +0,0 @@ -[INSTRUCTION] -Answer the following questions as accurately as possible using the provided functions. - -[AVAILABLE FUNCTIONS] -The function definitions below are in the following format: -: - inputs: - - : - - ... - -{{$function_descriptions}} -[END AVAILABLE FUNCTIONS] - -[USAGE INSTRUCTIONS] -To use the functions, specify a JSON blob representing an action. The JSON blob should contain an "action" key with the name of the function to use, and an "action_variables" key with a JSON object of string values to use when calling the function. -Do not call functions directly; they must be invoked through an action. -The "action_variables" value should always include an "input" key, even if the input value is empty. Additional keys in the "action_variables" value should match the defined [PARAMETERS] of the named "action" in [AVAILABLE FUNCTIONS]. -Dictionary values in "action_variables" must be strings and represent the actual values to be passed to the function. -Ensure that the $JSON_BLOB contains only a SINGLE action; do NOT return multiple actions. -IMPORTANT: Use only the available functions listed in the [AVAILABLE FUNCTIONS] section. Do not attempt to use any other functions that are not specified. - -Here is an example of a valid $JSON_BLOB: -{ - "action": "functionName", - "action_variables": {"parameterName": "some value", ...} -} -[END USAGE INSTRUCTIONS] -[END INSTRUCTION] - -[THOUGHT PROCESS] -[QUESTION] -the input question I must answer -[THOUGHT] -To solve this problem, I should carefully analyze the given question and identify the necessary steps. Any facts I discover earlier in my thought process should be repeated here to keep them readily available. -[ACTION] -{ - "action": "functionName", - "action_variables": {"parameterName": "some value", ...} -} -[OBSERVATION] -The result of the action will be provided here. -... (These Thought/Action/Observation can repeat until the final answer is reached.) -[FINAL ANSWER] -Once I have gathered all the necessary observations and performed any required actions, I can provide the final answer in a clear and human-readable format. -[END THOUGHT PROCESS] - -Let's break down the problem step by step and think about the best approach. Questions and observations should be followed by a single thought and an optional single action to take. - -Begin! - -[QUESTION] -{{$question}} -{{$agent_scratch_pad}} \ No newline at end of file diff --git a/python/semantic_kernel/planning/stepwise_planner/stepwise_planner.py b/python/semantic_kernel/planning/stepwise_planner/stepwise_planner.py index 660a95693309..b6c2f0060e8d 100644 --- a/python/semantic_kernel/planning/stepwise_planner/stepwise_planner.py +++ b/python/semantic_kernel/planning/stepwise_planner/stepwise_planner.py @@ -3,18 +3,24 @@ import asyncio import itertools import json +import logging import os import re from typing import TYPE_CHECKING, Dict, List from semantic_kernel.kernel import Kernel -from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.kernel_context import KernelContext from semantic_kernel.planning.plan import Plan from semantic_kernel.planning.planning_exception import PlanningException from semantic_kernel.planning.stepwise_planner.stepwise_planner_config import ( StepwisePlannerConfig, ) from semantic_kernel.planning.stepwise_planner.system_step import SystemStep +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.kernel_function_context_parameter_decorator import ( + kernel_function_context_parameter, +) +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function from semantic_kernel.semantic_functions.prompt_template import PromptTemplate from semantic_kernel.semantic_functions.prompt_template_config import ( PromptTemplateConfig, @@ -22,19 +28,15 @@ from semantic_kernel.semantic_functions.semantic_function_config import ( SemanticFunctionConfig, ) -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.sk_function_context_parameter_decorator import ( - sk_function_context_parameter, -) -from semantic_kernel.skill_definition.sk_function_decorator import sk_function if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + from semantic_kernel.orchestration.kernel_function import KernelFunction +logger: logging.Logger = logging.getLogger(__name__) CUR_DIR = os.path.dirname(os.path.realpath(__file__)) -PROMPT_CONFIG_FILE_PATH = os.path.join(CUR_DIR, "Skills/StepwiseStep/config.json") -PROMPT_TEMPLATE_FILE_PATH = os.path.join(CUR_DIR, "Skills/StepwiseStep/skprompt.txt") +PROMPT_CONFIG_FILE_PATH = os.path.join(CUR_DIR, "Plugins/StepwiseStep/config.json") +PROMPT_TEMPLATE_FILE_PATH = os.path.join(CUR_DIR, "Plugins/StepwiseStep/skprompt.txt") def read_file(file_path: str) -> str: @@ -42,22 +44,17 @@ def read_file(file_path: str) -> str: return file.read() -# TODO: Original C# uses "StepwisePlanner_Excluded" for RESTRICTED_SKILL_NAME -RESTRICTED_SKILL_NAME = "StepwisePlanner" -S_FINAL_ANSWER_REGEX = re.compile( - r"\[FINAL[_\s\-]ANSWER\](?P.+)", re.DOTALL -) -S_THOUGHT_REGEX = re.compile( - r"(\[THOUGHT\])?(?P.+?)(?=\[ACTION\]|$)", re.DOTALL -) +# TODO: Original C# uses "StepwisePlanner_Excluded" for RESTRICTED_PLUGIN_NAME +RESTRICTED_PLUGIN_NAME = "StepwisePlanner" +S_FINAL_ANSWER_REGEX = re.compile(r"\[FINAL[_\s\-]ANSWER\](?P.+)", re.DOTALL) +S_THOUGHT_REGEX = re.compile(r"(\[THOUGHT\])?(?P.+?)(?=\[ACTION\]|$)", re.DOTALL) S_ACTION_REGEX = re.compile(r"\[ACTION\][^{}]*({(?:[^{}]*{[^{}]*})*[^{}]*})", re.DOTALL) ACTION = "[ACTION]" THOUGHT = "[THOUGHT]" OBSERVATION = "[OBSERVATION]" SCRATCH_PAD_PREFIX = ( - "This was my previous work (but they haven't seen any of it!" - " They only see what I return as final answer):" + "This was my previous work (but they haven't seen any of it!" " They only see what I return as final answer):" ) @@ -67,8 +64,8 @@ def is_null_or_empty(value: str) -> bool: class StepwisePlanner: config: StepwisePlannerConfig - _context: "SKContext" - _function_flow_function: "SKFunctionBase" + _context: "KernelContext" + _function_flow_function: "KernelFunction" def __init__( self, @@ -81,31 +78,26 @@ def __init__( self._kernel = kernel self.config = config or StepwisePlannerConfig() - self.config.excluded_skills.append(RESTRICTED_SKILL_NAME) + self.config.excluded_plugins.append(RESTRICTED_PLUGIN_NAME) prompt_config = prompt_user_config or PromptTemplateConfig() prompt_template = prompt or read_file(PROMPT_TEMPLATE_FILE_PATH) if prompt_user_config is None: - prompt_config = PromptTemplateConfig.from_json( - read_file(PROMPT_CONFIG_FILE_PATH) - ) + prompt_config = PromptTemplateConfig.from_json(read_file(PROMPT_CONFIG_FILE_PATH)) - prompt_config.completion.max_tokens = self.config.max_tokens + prompt_config.execution_settings.extension_data["max_tokens"] = self.config.max_tokens self._system_step_function = self.import_semantic_function( kernel, "StepwiseStep", prompt_template, prompt_config ) - self._native_functions = self._kernel.import_skill(self, RESTRICTED_SKILL_NAME) + self._native_functions = self._kernel.import_plugin(self, RESTRICTED_PLUGIN_NAME) self._context = kernel.create_new_context() - self._logger = self._kernel.logger def create_plan(self, goal: str) -> Plan: if is_null_or_empty(goal): - raise PlanningException( - PlanningException.ErrorCodes.InvalidGoal, "The goal specified is empty" - ) + raise PlanningException(PlanningException.ErrorCodes.InvalidGoal, "The goal specified is empty") function_descriptions = self.get_function_descriptions() @@ -115,7 +107,7 @@ def create_plan(self, goal: str) -> Plan: plan_step._outputs.append("agent_scratch_pad") plan_step._outputs.append("step_count") - plan_step._outputs.append("skill_count") + plan_step._outputs.append("plugin_count") plan_step._outputs.append("steps_taken") plan = Plan(goal) @@ -125,14 +117,10 @@ def create_plan(self, goal: str) -> Plan: return plan # TODO: sync C# with https://github.com/microsoft/semantic-kernel/pull/1195 - @sk_function(name="ExecutePlan", description="Execute a plan") - @sk_function_context_parameter( - name="question", description="The question to answer" - ) - @sk_function_context_parameter( - name="function_descriptions", description="List of tool descriptions" - ) - async def execute_plan_async(self, context: SKContext) -> SKContext: + @kernel_function(name="ExecutePlan", description="Execute a plan") + @kernel_function_context_parameter(name="question", description="The question to answer") + @kernel_function_context_parameter(name="function_descriptions", description="List of tool descriptions") + async def execute_plan(self, context: KernelContext) -> KernelContext: question = context["question"] steps_taken: List[SystemStep] = [] @@ -142,9 +130,7 @@ async def execute_plan_async(self, context: SKContext) -> SKContext: context.variables.set("agent_scratch_pad", scratch_pad) - llm_response = await self._system_step_function.invoke_async( - context=context - ) + llm_response = await self._system_step_function.invoke(context=context) if llm_response.error_occurred: raise PlanningException( @@ -154,13 +140,13 @@ async def execute_plan_async(self, context: SKContext) -> SKContext: ) action_text = llm_response.result.strip() - self._logger.debug(f"Response: {action_text}") + logger.debug(f"Response: {action_text}") next_step = self.parse_result(action_text) steps_taken.append(next_step) if not is_null_or_empty(next_step.final_answer): - self._logger.debug(f"Final Answer: {next_step.final_answer}") + logger.debug(f"Final Answer: {next_step.final_answer}") context.variables.update(next_step.final_answer) updated_scratch_pad = self.create_scratch_pad(question, steps_taken) @@ -171,19 +157,17 @@ async def execute_plan_async(self, context: SKContext) -> SKContext: return context - self._logger.debug("Thoughts: {next_step.thought}") + logger.debug(f"Thoughts: {next_step.thought}") if not is_null_or_empty(next_step.action): - self._logger.info(f"Action: {next_step.action}. Iteration: {i+1}.") - self._logger.debug( + logger.info(f"Action: {next_step.action}. Iteration: {i+1}.") + logger.debug( f"Action: {next_step.action}({next_step.action_variables}). Iteration: {i+1}.", ) try: await asyncio.sleep(self.config.min_iteration_time_ms / 1000) - result = await self.invoke_action_async( - next_step.action, next_step.action_variables - ) + result = await self.invoke_action(next_step.action, next_step.action_variables) if is_null_or_empty(result): next_step.observation = "Got no result from action" @@ -191,24 +175,18 @@ async def execute_plan_async(self, context: SKContext) -> SKContext: next_step.observation = result except Exception as e: - next_step.observation = ( - f"Error invoking action {next_step.action}: {str(e)}" - ) - self._logger.warning( - f"Error invoking action {next_step.action}" - ) - - self._logger.debug(f"Observation: {next_step.observation}") + next_step.observation = f"Error invoking action {next_step.action}: {str(e)}" + logger.warning(f"Error invoking action {next_step.action}") + + logger.debug(f"Observation: {next_step.observation}") else: - self._logger.info("Action: No action to take") + logger.info("Action: No action to take") # sleep 3 seconds await asyncio.sleep(self.config.min_iteration_time_ms / 1000) steps_taken_str = json.dumps([s.__dict__ for s in steps_taken], indent=4) - context.variables.update( - f"Result not found, review _steps_taken to see what happened.\n{steps_taken_str}" - ) + context.variables.update(f"Result not found, review _steps_taken to see what happened.\n{steps_taken_str}") else: context.variables.update("Question not found.") @@ -246,16 +224,12 @@ def parse_result(self, input: str): system_step_results = json.loads(action_json) if system_step_results is None or len(system_step_results) == 0: - result.observation = ( - f"System step parsing error, empty JSON: {action_json}" - ) + result.observation = f"System step parsing error, empty JSON: {action_json}" else: result.action = system_step_results["action"] result.action_variables = system_step_results["action_variables"] except Exception: - result.observation = ( - f"System step parsing error, invalid JSON: {action_json}" - ) + result.observation = f"System step parsing error, invalid JSON: {action_json}" if is_null_or_empty(result.thought) and is_null_or_empty(result.action): result.observation = ( @@ -265,13 +239,9 @@ def parse_result(self, input: str): return result - def add_execution_stats_to_context( - self, steps_taken: List[SystemStep], context: SKContext - ): + def add_execution_stats_to_context(self, steps_taken: List[SystemStep], context: KernelContext): context.variables.set("step_count", str(len(steps_taken))) - context.variables.set( - "steps_taken", json.dumps([s.__dict__ for s in steps_taken], indent=4) - ) + context.variables.set("steps_taken", json.dumps([s.__dict__ for s in steps_taken], indent=4)) action_counts: Dict[str, int] = {} for step in steps_taken: @@ -281,15 +251,11 @@ def add_execution_stats_to_context( current_count = action_counts.get(step.action, 0) action_counts[step.action] = current_count + 1 - skill_call_list_with_counts = [ - f"{skill}({action_counts[skill]})" for skill in action_counts - ] - skill_call_list_with_counts = ", ".join(skill_call_list_with_counts) - skill_call_count_str = str(sum(action_counts.values())) + plugin_call_list_with_counts = [f"{plugin}({action_counts[plugin]})" for plugin in action_counts] + plugin_call_list_with_counts = ", ".join(plugin_call_list_with_counts) + plugin_call_count_str = str(sum(action_counts.values())) - context.variables.set( - "skill_count", f"{skill_call_count_str} ({skill_call_list_with_counts})" - ) + context.variables.set("plugin_count", f"{plugin_call_count_str} ({plugin_call_list_with_counts})") def create_scratch_pad(self, question: str, steps_taken: List[SystemStep]) -> str: if len(steps_taken) == 0: @@ -306,17 +272,13 @@ def create_scratch_pad(self, question: str, steps_taken: List[SystemStep]) -> st for i in reversed(range(len(steps_taken))): if len(scratch_pad_lines) / 4.0 > (self.config.max_tokens * 0.75): - self._logger.debug( - f"Scratchpad is too long, truncating. Skipping {i + 1} steps." - ) + logger.debug(f"Scratchpad is too long, truncating. Skipping {i + 1} steps.") break s = steps_taken[i] if not is_null_or_empty(s.observation): - scratch_pad_lines.insert( - insert_point, f"{OBSERVATION}\n{s.observation}" - ) + scratch_pad_lines.insert(insert_point, f"{OBSERVATION}\n{s.observation}") if not is_null_or_empty(s.action): scratch_pad_lines.insert( @@ -330,20 +292,14 @@ def create_scratch_pad(self, question: str, steps_taken: List[SystemStep]) -> st scratch_pad = "\n".join(scratch_pad_lines).strip() if not (is_null_or_empty(scratch_pad.strip())): - self._logger.debug(f"Scratchpad: {scratch_pad}") + logger.debug(f"Scratchpad: {scratch_pad}") return scratch_pad - async def invoke_action_async( - self, action_name: str, action_variables: Dict[str, str] - ) -> str: + async def invoke_action(self, action_name: str, action_variables: Dict[str, str]) -> str: available_functions = self.get_available_functions() target_function = next( - ( - f - for f in available_functions - if self.to_fully_qualified_name(f) == action_name - ), + (f for f in available_functions if self.to_fully_qualified_name(f) == action_name), None, ) @@ -354,34 +310,30 @@ async def invoke_action_async( ) try: - function = self._kernel.func( - target_function.skill_name, target_function.name - ) + function = self._kernel.func(target_function.plugin_name, target_function.name) action_context = self.create_action_context(action_variables) - result = await function.invoke_async(context=action_context) + result = await function.invoke(context=action_context) if result.error_occurred: - self._logger.error(f"Error occurred: {result.last_exception}") + logger.error(f"Error occurred: {result.last_exception}") return f"Error occurred: {result.last_exception}" - self._logger.debug( - f"Invoked {target_function.name}. Result: {result.result}" - ) + logger.debug(f"Invoked {target_function.name}. Result: {result.result}") return result.result except Exception as e: - self._logger.error( + logger.error( e, - f"Something went wrong in system step: {target_function.skill_name}.{target_function.name}. Error: {e}", + f"Something went wrong in system step: {target_function.plugin_name}.{target_function.name}. Error: {e}", # noqa: E501 ) return ( "Something went wrong in system step: ", - f"{target_function.skill_name}.{target_function.name}. Error: {e}", + f"{target_function.plugin_name}.{target_function.name}. Error: {e}", ) - def create_action_context(self, action_variables: Dict[str, str]) -> SKContext: + def create_action_context(self, action_variables: Dict[str, str]) -> KernelContext: action_context = self._kernel.create_new_context() if action_variables is not None: for k, v in action_variables.items(): @@ -390,9 +342,9 @@ def create_action_context(self, action_variables: Dict[str, str]) -> SKContext: return action_context def get_available_functions(self) -> List[FunctionView]: - functions_view = self._context.skills.get_functions_view() + functions_view = self._context.plugins.get_functions_view() - excluded_skills = self.config.excluded_skills or [] + excluded_plugins = self.config.excluded_plugins or [] excluded_functions = self.config.excluded_functions or [] available_functions: List[FunctionView] = [ @@ -403,23 +355,16 @@ def get_available_functions(self) -> List[FunctionView]: available_functions = [ func for func in available_functions - if ( - func.skill_name not in excluded_skills - and func.name not in excluded_functions - ) + if (func.plugin_name not in excluded_plugins and func.name not in excluded_functions) ] - available_functions = sorted( - available_functions, key=lambda x: (x.skill_name, x.name) - ) + available_functions = sorted(available_functions, key=lambda x: (x.plugin_name, x.name)) return available_functions def get_function_descriptions(self) -> str: available_functions = self.get_available_functions() - function_descriptions = "\n".join( - [self.to_manual_string(f) for f in available_functions] - ) + function_descriptions = "\n".join([self.to_manual_string(f) for f in available_functions]) return function_descriptions def import_semantic_function( @@ -428,24 +373,16 @@ def import_semantic_function( function_name: str, prompt_template: str, config: PromptTemplateConfig = None, - ) -> "SKFunctionBase": - template = PromptTemplate( - prompt_template, kernel.prompt_template_engine, config - ) + ) -> "KernelFunction": + template = PromptTemplate(prompt_template, kernel.prompt_template_engine, config) function_config = SemanticFunctionConfig(config, template) - return kernel.register_semantic_function( - RESTRICTED_SKILL_NAME, function_name, function_config - ) + return kernel.register_semantic_function(RESTRICTED_PLUGIN_NAME, function_name, function_config) def to_manual_string(self, function: FunctionView) -> str: inputs = [ f" - {parameter.name}: {parameter.description}" - + ( - f" (default value={parameter.default_value})" - if parameter.default_value - else "" - ) + + (f" (default value={parameter.default_value})" if parameter.default_value else "") for parameter in function.parameters ] inputs = "\n".join(inputs) @@ -458,4 +395,4 @@ def to_manual_string(self, function: FunctionView) -> str: return f"{self.to_fully_qualified_name(function)}: {function_description}\n inputs:\n{inputs}\n" def to_fully_qualified_name(self, function: FunctionView): - return f"{function.skill_name}.{function.name}" + return f"{function.plugin_name}.{function.name}" diff --git a/python/semantic_kernel/planning/stepwise_planner/stepwise_planner_config.py b/python/semantic_kernel/planning/stepwise_planner/stepwise_planner_config.py index 0654a829dd9c..eabf5abc324e 100644 --- a/python/semantic_kernel/planning/stepwise_planner/stepwise_planner_config.py +++ b/python/semantic_kernel/planning/stepwise_planner/stepwise_planner_config.py @@ -8,7 +8,7 @@ def __init__( self, relevancy_threshold: Optional[float] = None, max_relevant_functions: int = 100, - excluded_skills: List[str] = None, + excluded_plugins: List[str] = None, excluded_functions: List[str] = None, included_functions: List[str] = None, max_tokens: int = 1024, @@ -17,7 +17,7 @@ def __init__( ): self.relevancy_threshold: float = relevancy_threshold self.max_relevant_functions: int = max_relevant_functions - self.excluded_skills: List[str] = excluded_skills or [] + self.excluded_plugins: List[str] = excluded_plugins or [] self.excluded_functions: List[str] = excluded_functions or [] self.included_functions: List[str] = included_functions or [] self.max_tokens: int = max_tokens diff --git a/python/semantic_kernel/plugin_definition/__init__.py b/python/semantic_kernel/plugin_definition/__init__.py new file mode 100644 index 000000000000..7da6654d819a --- /dev/null +++ b/python/semantic_kernel/plugin_definition/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.plugin_definition.kernel_function_context_parameter_decorator import ( + kernel_function_context_parameter, +) +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function + +__all__ = [ + "kernel_function", + "kernel_function_context_parameter", +] diff --git a/python/semantic_kernel/plugin_definition/function_view.py b/python/semantic_kernel/plugin_definition/function_view.py new file mode 100644 index 000000000000..7274711901d7 --- /dev/null +++ b/python/semantic_kernel/plugin_definition/function_view.py @@ -0,0 +1,57 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import List + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition.parameter_view import ParameterView +from semantic_kernel.utils.validation import validate_function_name + + +class FunctionView(KernelBaseModel): + name: str + plugin_name: str + description: str + is_semantic: bool + parameters: List[ParameterView] + is_asynchronous: bool = True + + def __init__( + self, + name: str, + plugin_name: str, + description: str, + parameters: List[ParameterView], + is_semantic: bool, + is_asynchronous: bool = True, + ) -> None: + validate_function_name(name) + super().__init__( + name=name, + plugin_name=plugin_name, + description=description, + parameters=parameters, + is_semantic=is_semantic, + is_asynchronous=is_asynchronous, + ) + + def __eq__(self, other): + """ + Compare to another FunctionView instance. + + Args: + other (FunctionView): The other FunctionView instance. + + Returns: + True if the two instances are equal, False otherwise. + """ + if not isinstance(other, FunctionView): + return False + + return ( + self.name == other.name + and self.plugin_name == other.plugin_name + and self.description == other.description + and self.parameters == other.parameters + and self.is_semantic == other.is_semantic + and self.is_asynchronous == other.is_asynchronous + ) diff --git a/python/semantic_kernel/plugin_definition/functions_view.py b/python/semantic_kernel/plugin_definition/functions_view.py new file mode 100644 index 000000000000..52dde0ad828f --- /dev/null +++ b/python/semantic_kernel/plugin_definition/functions_view.py @@ -0,0 +1,56 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Dict, List + +from pydantic import Field + +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition.function_view import FunctionView + + +class FunctionsView(KernelBaseModel): + semantic_functions: Dict[str, List[FunctionView]] = Field(default_factory=dict) + native_functions: Dict[str, List[FunctionView]] = Field(default_factory=dict) + + def add_function(self, view: FunctionView) -> "FunctionsView": + if view.is_semantic: + if view.plugin_name not in self.semantic_functions: + self.semantic_functions[view.plugin_name] = [] + self.semantic_functions[view.plugin_name].append(view) + else: + if view.plugin_name not in self.native_functions: + self.native_functions[view.plugin_name] = [] + self.native_functions[view.plugin_name].append(view) + + return self + + def is_semantic(self, plugin_name: str, function_name: str) -> bool: + as_sf = self.semantic_functions.get(plugin_name, []) + as_sf = any(f.name == function_name for f in as_sf) + + as_nf = self.native_functions.get(plugin_name, []) + as_nf = any(f.name == function_name for f in as_nf) + + if as_sf and as_nf: + raise KernelException( + KernelException.ErrorCodes.AmbiguousImplementation, + (f"There are 2 functions with the same name: {function_name}." "One is native and the other semantic."), + ) + + return as_sf + + def is_native(self, plugin_name: str, function_name: str) -> bool: + as_sf = self.semantic_functions.get(plugin_name, []) + as_sf = any(f.name == function_name for f in as_sf) + + as_nf = self.native_functions.get(plugin_name, []) + as_nf = any(f.name == function_name for f in as_nf) + + if as_sf and as_nf: + raise KernelException( + KernelException.ErrorCodes.AmbiguousImplementation, + (f"There are 2 functions with the same name: {function_name}." "One is native and the other semantic."), + ) + + return as_nf diff --git a/python/semantic_kernel/plugin_definition/kernel_function_context_parameter_decorator.py b/python/semantic_kernel/plugin_definition/kernel_function_context_parameter_decorator.py new file mode 100644 index 000000000000..bc60e1a01b51 --- /dev/null +++ b/python/semantic_kernel/plugin_definition/kernel_function_context_parameter_decorator.py @@ -0,0 +1,34 @@ +# Copyright (c) Microsoft. All rights reserved. + + +def kernel_function_context_parameter( + *, name: str, description: str, default_value: str = "", type: str = "string", required: bool = False +): + """ + Decorator for kernel function context parameters. + + Args: + name -- The name of the context parameter + description -- The description of the context parameter + default_value -- The default value of the context parameter + type -- The type of the context parameter, used for function calling + required -- Whether the context parameter is required + + """ + + def decorator(func): + if not hasattr(func, "__kernel_function_context_parameters__"): + func.__kernel_function_context_parameters__ = [] + + func.__kernel_function_context_parameters__.append( + { + "name": name, + "description": description, + "default_value": default_value, + "type": type, + "required": required, + } + ) + return func + + return decorator diff --git a/python/semantic_kernel/plugin_definition/kernel_function_decorator.py b/python/semantic_kernel/plugin_definition/kernel_function_decorator.py new file mode 100644 index 000000000000..949a07efb8e6 --- /dev/null +++ b/python/semantic_kernel/plugin_definition/kernel_function_decorator.py @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft. All rights reserved. + + +def kernel_function( + *, + description: str = "", + name: str = "", + input_description: str = "", + input_default_value: str = "", +): + """ + Decorator for kernel functions. + + Args: + description -- The description of the function + name -- The name of the function + input_description -- The description of the input + input_default_value -- The default value of the input + """ + + def decorator(func): + func.__kernel_function__ = True + func.__kernel_function_description__ = description or "" + func.__kernel_function_name__ = name or func.__name__ + func.__kernel_function_input_description__ = input_description or "" + func.__kernel_function_input_default_value__ = input_default_value or "" + return func + + return decorator diff --git a/python/semantic_kernel/plugin_definition/kernel_plugin.py b/python/semantic_kernel/plugin_definition/kernel_plugin.py new file mode 100644 index 000000000000..19d483137419 --- /dev/null +++ b/python/semantic_kernel/plugin_definition/kernel_plugin.py @@ -0,0 +1,107 @@ +# Copyright (c) Microsoft. All rights reserved. + +import sys +from typing import Dict, List, Optional + +if sys.version_info >= (3, 9): + from typing import Annotated +else: + from typing_extensions import Annotated + +from pydantic import Field, StringConstraints + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.orchestration.kernel_function import KernelFunction + + +class KernelPlugin(KernelBaseModel): + """ + Represents a Kernel Plugin with functions. + + Attributes: + name (str): The name of the plugin. The name can be upper/lower + case letters and underscores. + description (str): The description of the plugin. + functions (Dict[str, KernelFunction]): The functions in the plugin, + indexed by their name. + """ + + name: Annotated[str, StringConstraints(pattern=r"^[A-Za-z_]+$", min_length=1)] + description: Optional[str] = Field(default=None) + functions: Optional[Dict[str, KernelFunction]] = Field(default_factory=dict) + + def __init__(self, name: str, description: Optional[str] = None, functions: Optional[List[KernelFunction]] = None): + """ + Initialize a new instance of the KernelPlugin class + + Args: + name (str): The name of the plugin. + description (Optional[str]): The description of the plugin. + functions (List[KernelFunction]): The functions in the plugin. + + Raises: + ValueError: If the functions list contains duplicate function names. + """ + functions_dict = {} + if functions is not None: + for function in functions: + if function.name in functions_dict: + raise ValueError(f"Duplicate function name detected: {function.name}") + functions_dict[function.name] = function + super().__init__(name=name, description=description, functions=functions_dict) + + def __len__(self) -> int: + """ + Gets the number of functions in the plugin. + + Returns: + The number of functions in the plugin. + + """ + return len(self.functions) + + def __contains__(self, function_name: str) -> bool: + """ + Checks if the plugin contains a function with the specified name. + + Args: + function_name (str): The name of the function. + + Returns: + True if the plugin contains a function with the specified name, False otherwise. + """ + return function_name in self.functions.keys() + + def __getitem__(self, name: str) -> "KernelFunction": + """Define the [] operator for the plugin + + Args: + name (str): The name of the function to retrieve. + + Returns: + The function if it exists, None otherwise. + + Raises: + KeyError: If the function does not exist. + """ + if name not in self.functions: + raise KeyError(f"Function {name} not found.") + return self.functions[name] + + @classmethod + def from_functions( + cls, functions: List["KernelFunction"], plugin_name: str, description: Optional[str] = None + ) -> "KernelPlugin": + """ + Creates a KernelPlugin from a KernelFunction instance. + + Args: + functions (List[KernelFunction]): The functions to create the plugin from. + plugin_name (Optional[str]): The name of the plugin. If not specified, + the name of the function will be used. + description (Optional[str]): The description of the plugin. + + Returns: + A KernelPlugin instance. + """ + return cls(name=plugin_name, description=description, functions=functions) diff --git a/python/semantic_kernel/plugin_definition/kernel_plugin_collection.py b/python/semantic_kernel/plugin_definition/kernel_plugin_collection.py new file mode 100644 index 000000000000..c658b9f15a7d --- /dev/null +++ b/python/semantic_kernel/plugin_definition/kernel_plugin_collection.py @@ -0,0 +1,229 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any, Dict, Iterable, List, Optional, TypeVar, Union + +from pydantic import Field + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.functions_view import FunctionsView +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin + +# To support Python 3.8, need to use TypeVar since Iterable is not scriptable +KernelPluginType = TypeVar("KernelPluginType", bound=KernelPlugin) + + +class KernelPluginCollection(KernelBaseModel): + """ + The Kernel Plugin Collection class. This class is used to store a collection of plugins. + + Attributes: + plugins (Dict[str, KernelPlugin]): The plugins in the collection, indexed by their name. + """ + + plugins: Optional[Dict[str, KernelPlugin]] = Field(default_factory=dict) + + def __init__(self, plugins: Union[None, "KernelPluginCollection", Iterable[KernelPluginType]] = None): + """ + Initialize a new instance of the KernelPluginCollection class + + Args: + plugins (Union[None, KernelPluginCollection, Iterable[KernelPlugin]]): The plugins to add + to the collection. If None, an empty collection is created. If a KernelPluginCollection, + the plugins are copied from the other collection. If an iterable of KernelPlugin, + the plugins are added to the collection. + + Raises: + ValueError: If the plugins is not None, a KernelPluginCollection, or an iterable of KernelPlugin. + """ + if plugins is None: + plugins = {} + elif isinstance(plugins, KernelPluginCollection): + # Extract plugins from another KernelPluginCollection instance + plugins = {plugin.name: plugin for plugin in plugins.plugins.values()} + elif isinstance(plugins, Iterable): + # Process an iterable of plugins + plugins = self._process_plugins_iterable(plugins) + else: + raise ValueError("Invalid type for plugins") + + super().__init__(plugins=plugins) + + @staticmethod + def _process_plugins_iterable(plugins_input: Iterable[KernelPlugin]) -> Dict[str, KernelPlugin]: + plugins_dict = {} + for plugin in plugins_input: + if plugin is None: + raise ValueError("Plugin and plugin.name must not be None") + if plugin.name in plugins_dict: + raise ValueError(f"Duplicate plugin name detected: {plugin.name}") + plugins_dict[plugin.name] = plugin + return plugins_dict + + def add(self, plugin: KernelPlugin) -> None: + """ + Add a single plugin to the collection + + Args: + plugin (KernelPlugin): The plugin to add to the collection. + + Raises: + ValueError: If the plugin or plugin.name is None. + """ + if plugin is None: + raise ValueError("Plugin must not be None") + if plugin.name in self.plugins: + raise ValueError(f"Plugin with name {plugin.name} already exists") + self.plugins[plugin.name] = plugin + + def add_plugin_from_functions(self, plugin_name: str, functions: List[KernelFunction]) -> None: + """ + Add a function to a new plugin in the collection + + Args: + plugin_name (str): The name of the plugin to create. + functions (List[KernelFunction]): The functions to add to the plugin. + + Raises: + ValueError: If the function or plugin_name is None or invalid. + """ + if not functions or not plugin_name: + raise ValueError("Functions or plugin_name must not be None or empty") + if plugin_name in self.plugins: + raise ValueError(f"Plugin with name {plugin_name} already exists") + + plugin = KernelPlugin.from_functions(plugin_name=plugin_name, functions=functions) + self.plugins[plugin_name] = plugin + + def add_functions_to_plugin(self, functions: List[KernelFunction], plugin_name: str) -> None: + """ + Add functions to a plugin in the collection + + Args: + functions (List[KernelFunction]): The function to add to the plugin. + plugin_name (str): The name of the plugin to add the function to. + + Raises: + ValueError: If the functions or plugin_name is None or invalid. + ValueError: if the function already exists in the plugin. + """ + if not functions or not plugin_name: + raise ValueError("Functions and plugin_name must not be None or empty") + + if plugin_name not in self.plugins: + self.plugins.add(KernelPlugin(name=plugin_name, functions=functions)) + return + + plugin = self.plugins[plugin_name] + for func in functions: + if func.name in plugin.functions: + raise ValueError(f"Function with name '{func.name}' already exists in plugin '{plugin_name}'") + plugin.functions[func.name] = func + + def add_list_of_plugins(self, plugins: List[KernelPlugin]) -> None: + """ + Add a list of plugins to the collection + + Args: + plugins (List[KernelPlugin]): The plugins to add to the collection. + + Raises: + ValueError: If the plugins list is None. + """ + + if plugins is None: + raise ValueError("Plugins must not be None") + for plugin in plugins: + self.add(plugin) + + def remove(self, plugin: KernelPlugin) -> bool: + """ + Remove a plugin from the collection + + Args: + plugin (KernelPlugin): The plugin to remove from the collection. + + Returns: + True if the plugin was removed, False otherwise. + """ + if plugin is None or plugin.name is None: + return False + return self.plugins.pop(plugin.name, None) is not None + + def remove_by_name(self, plugin_name: str) -> bool: + """ + Remove a plugin from the collection by name + + Args: + plugin_name (str): The name of the plugin to remove from the collection. + + Returns: + True if the plugin was removed, False otherwise. + """ + if plugin_name is None: + return False + return self.plugins.pop(plugin_name, None) is not None + + def __getitem__(self, name): + """Define the [] operator for the collection + + Args: + name (str): The name of the plugin to retrieve. + + Returns: + The plugin if it exists, None otherwise. + + Raises: + KeyError: If the plugin does not exist. + """ + if name not in self.plugins: + raise KeyError(f"Plugin {name} not found.") + return self.plugins[name] + + def clear(self): + """Clear the collection of all plugins""" + self.plugins.clear() + + def get_functions_view(self, include_semantic: bool = True, include_native: bool = True) -> FunctionsView: + """ + Get a view of the functions in the collection + + Args: + include_semantic (bool): Whether to include semantic functions in the view. + include_native (bool): Whether to include native functions in the view. + + Returns: + A view of the functions in the collection. + """ + result = FunctionsView() + + for _, plugin in self.plugins.items(): + for _, function in plugin.functions.items(): + if include_semantic and function.is_semantic: + result.add_function(function.describe()) + elif include_native and not function.is_semantic: + result.add_function(function.describe()) + + return result + + def __iter__(self) -> Any: + """Define an iterator for the collection""" + return iter(self.plugins.values()) + + def __len__(self) -> int: + """Define the length of the collection""" + return len(self.plugins) + + def __contains__(self, plugin_name: str) -> bool: + """ + Check if the collection contains a plugin + + Args: + plugin_name (str): The name of the plugin to check for. + + Returns: + True if the collection contains the plugin, False otherwise. + """ + if not plugin_name: + return False + return self.plugins.get(plugin_name) is not None diff --git a/python/semantic_kernel/plugin_definition/parameter_view.py b/python/semantic_kernel/plugin_definition/parameter_view.py new file mode 100644 index 000000000000..1b3058bce588 --- /dev/null +++ b/python/semantic_kernel/plugin_definition/parameter_view.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from typing import Optional + +from pydantic import Field, field_validator + +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.utils.validation import validate_function_param_name + + +class ParameterView(KernelBaseModel): + name: str + description: str + default_value: str + type_: Optional[str] = Field(default="string", alias="type") + required: Optional[bool] = False + + @field_validator("name") + @classmethod + def validate_name(cls, name: str): + validate_function_param_name(name) + return name diff --git a/python/semantic_kernel/reliability/pass_through_without_retry.py b/python/semantic_kernel/reliability/pass_through_without_retry.py index 8056cb87a257..f33d98a87bed 100644 --- a/python/semantic_kernel/reliability/pass_through_without_retry.py +++ b/python/semantic_kernel/reliability/pass_through_without_retry.py @@ -1,31 +1,32 @@ # Copyright (c) Microsoft. All rights reserved. import logging -from typing import Awaitable, Callable, TypeVar +from typing import Any, Awaitable, Callable, Optional, TypeVar +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.reliability.retry_mechanism_base import RetryMechanismBase -from semantic_kernel.sk_pydantic import PydanticField T = TypeVar("T") +logger: logging.Logger = logging.getLogger(__name__) -class PassThroughWithoutRetry(RetryMechanismBase, PydanticField): + +class PassThroughWithoutRetry(RetryMechanismBase, KernelBaseModel): """A retry mechanism that does not retry.""" - async def execute_with_retry_async( - self, action: Callable[[], Awaitable[T]], log: logging.Logger - ) -> Awaitable[T]: + async def execute_with_retry(self, action: Callable[[], Awaitable[T]], log: Optional[Any] = None) -> Awaitable[T]: """Executes the given action with retry logic. Arguments: action {Callable[[], Awaitable[T]]} -- The action to retry on exception. - log {logging.Logger} -- The logger to use. Returns: Awaitable[T] -- An awaitable that will return the result of the action. """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") try: await action() except Exception as e: - log.warning(e, "Error executing action, not retrying") + logger.warning(e, "Error executing action, not retrying") raise diff --git a/python/semantic_kernel/reliability/retry_mechanism_base.py b/python/semantic_kernel/reliability/retry_mechanism_base.py index 8e035d281300..e5381d3d490a 100644 --- a/python/semantic_kernel/reliability/retry_mechanism_base.py +++ b/python/semantic_kernel/reliability/retry_mechanism_base.py @@ -2,23 +2,24 @@ import logging from abc import ABC, abstractmethod -from typing import Awaitable, Callable, TypeVar +from typing import Any, Awaitable, Callable, Optional, TypeVar T = TypeVar("T") +logger: logging.Logger = logging.getLogger(__name__) + class RetryMechanismBase(ABC): @abstractmethod - async def execute_with_retry_async( - self, action: Callable[[], Awaitable[T]], log: logging.Logger - ) -> Awaitable[T]: + async def execute_with_retry(self, action: Callable[[], Awaitable[T]], log: Optional[Any] = None) -> Awaitable[T]: """Executes the given action with retry logic. Arguments: action {Callable[[], Awaitable[T]]} -- The action to retry on exception. - log {logging.Logger} -- The logger to use. Returns: Awaitable[T] -- An awaitable that will return the result of the action. """ + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") pass diff --git a/python/semantic_kernel/semantic_functions/chat_prompt_template.py b/python/semantic_kernel/semantic_functions/chat_prompt_template.py index 8aa999e948ec..3d805cb38108 100644 --- a/python/semantic_kernel/semantic_functions/chat_prompt_template.py +++ b/python/semantic_kernel/semantic_functions/chat_prompt_template.py @@ -1,9 +1,11 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -from logging import Logger +import logging from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar +from pydantic import Field + from semantic_kernel.models.chat.chat_message import ChatMessage from semantic_kernel.semantic_functions.prompt_template import PromptTemplate from semantic_kernel.semantic_functions.prompt_template_config import ( @@ -14,31 +16,61 @@ ) if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext ChatMessageT = TypeVar("ChatMessageT", bound=ChatMessage) +logger: logging.Logger = logging.getLogger(__name__) + class ChatPromptTemplate(PromptTemplate, Generic[ChatMessageT]): - _messages: List[ChatMessageT] + messages: List[ChatMessageT] = Field(default_factory=list) def __init__( self, template: str, template_engine: PromptTemplatingEngine, prompt_config: PromptTemplateConfig, - log: Optional[Logger] = None, + parse_chat_system_prompt: bool = False, + parse_messages: bool = False, + **kwargs: Any, ) -> None: - super().__init__(template, template_engine, prompt_config, log) - self._messages = [] - if self._prompt_config.completion.chat_system_prompt: - self.add_system_message(self._prompt_config.completion.chat_system_prompt) - - async def render_async(self, context: "SKContext") -> str: - raise NotImplementedError( - "Can't call render_async on a ChatPromptTemplate.\n" - "Use render_messages_async instead." - ) + """Initialize a chat prompt template. + + if there is a field 'chat_system_prompt' in the prompt_config.execution_settings.extension_data, + that value is added to the messages list as a system message, + can be controlled by setting the parse_chat_system_prompt parameter to True. + + After that any messages that are in messages in the prompt_config.execution_settings + are added to the messages list. + Can be controlled by setting the parse_messages parameter to True. + + Arguments: + template {str} -- The template to use for the chat prompt. + template_engine {PromptTemplatingEngine} -- The templating engine to use. + prompt_config {PromptTemplateConfig} -- The prompt config to use. + parse_chat_system_prompt {bool} -- Whether to parse the chat_system_prompt from + the prompt_config.execution_settings.extension_data. + parse_messages {bool} -- Whether to parse the messages from the prompt_config.execution_settings. + + """ + super().__init__(template, template_engine, prompt_config) + if "log" in kwargs: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + + if parse_chat_system_prompt and "chat_system_prompt" in self.prompt_config.execution_settings.extension_data: + self.add_system_message(self.prompt_config.execution_settings.extension_data["chat_system_prompt"]) + + if ( + parse_messages + and hasattr(self.prompt_config.execution_settings, "messages") + and self.prompt_config.execution_settings.messages + ): + for message in self.prompt_config.execution_settings.messages: + self.add_message(**message) + + async def render(self, context: "KernelContext") -> str: + raise NotImplementedError("Can't call render on a ChatPromptTemplate.\n" "Use render_messages instead.") def add_system_message(self, message: str) -> None: """Add a system message to the chat template.""" @@ -52,9 +84,7 @@ def add_assistant_message(self, message: str) -> None: """Add an assistant message to the chat template.""" self.add_message("assistant", message) - def add_message( - self, role: str, message: Optional[str] = None, **kwargs: Any - ) -> None: + def add_message(self, role: str, message: Optional[str] = None, **kwargs: Any) -> None: """Add a message to the chat template. Arguments: @@ -62,31 +92,39 @@ def add_message( message: The message to add, can include templating components. kwargs: can be used by inherited classes. """ - self._messages.append( - ChatMessage( + concrete_message = self.model_fields["messages"].annotation.__args__[0] + # When the type is not explicitly set, it is still the typevar, replace with generic ChatMessage + if isinstance(concrete_message, TypeVar): + concrete_message = ChatMessage + assert issubclass(concrete_message, ChatMessage) + if not message and "content" in kwargs: + message = kwargs["content"] + self.messages.append( + concrete_message( role=role, - content_template=PromptTemplate( - message, self._template_engine, self._prompt_config - ), + content_template=PromptTemplate(message, self.template_engine, self.prompt_config) if message else None, + **kwargs, ) ) - async def render_messages_async(self, context: "SKContext") -> List[Dict[str, str]]: + async def render_messages(self, context: "KernelContext") -> List[Dict[str, str]]: """Render the content of the message in the chat template, based on the context.""" - if len(self._messages) == 0 or self._messages[-1].role in [ + if len(self.messages) == 0 or self.messages[-1].role in [ "assistant", "system", ]: - self.add_user_message(message=self._template) - await asyncio.gather( - *[message.render_message_async(context) for message in self._messages] - ) - return [message.as_dict() for message in self._messages] - - @property - def messages(self) -> List[Dict[str, str]]: - """Return the messages as a list of dicts with role, content, name.""" - return [message.as_dict() for message in self._messages] + self.add_user_message(message=self.template) + await asyncio.gather(*[message.render_message(context) for message in self.messages]) + # Don't resend the assistant + tool_calls message as it will error + return [ + message.as_dict() + for message in self.messages + if not (message.role == "assistant" and hasattr(message, "tool_calls")) + ] + + def dump_messages(self) -> List[Dict[str, str]]: + """Return the messages as a list of dicts with role, content, name and function_call.""" + return [message.as_dict() for message in self.messages] @classmethod def restore( @@ -95,30 +133,35 @@ def restore( template: str, template_engine: PromptTemplatingEngine, prompt_config: PromptTemplateConfig, - log: Optional[Logger] = None, + parse_chat_system_prompt: bool = False, + parse_messages: bool = False, + **kwargs: Any, ) -> "ChatPromptTemplate": """Restore a ChatPromptTemplate from a list of role and message pairs. - If there is a chat_system_prompt in the prompt_config.completion settings, - that takes precedence over the first message in the list of messages, - if that is a system message. + The parse_messages and parse_chat_system_prompt parameters control whether + the messages and chat_system_prompt from the prompt_config.execution_settings + are parsed and added to the messages list, not whether or not the 'messages' + from the messages parameter are parsed, those are always parsed. + + Arguments: + messages {List[Dict[str, str]]} -- The messages to restore, + the default format is [{"role": "user", "message": "Hi there"}]. + if the ChatPromptTemplate is created with a different message type, + the messages should contain any fields that are relevant to the message, + for instance: ChatPromptTemplate[OpenAIChatMessage].restore can be used with a format: + [{"role": "assistant", "function_call": FunctionCall()}]. + template {str} -- The template to use for the chat prompt. + template_engine {PromptTemplatingEngine} -- The templating engine to use. + prompt_config {PromptTemplateConfig} -- The prompt config to use. + parse_chat_system_prompt {bool} -- Whether to parse the chat_system_prompt from the + prompt_config.execution_settings.extension_data. + parse_messages {bool} -- Whether to parse the messages from the prompt_config.execution_settings. + """ - chat_template = cls(template, template_engine, prompt_config, log) - if ( - prompt_config.completion.chat_system_prompt - and messages[0]["role"] == "system" - ): - existing_system_message = messages.pop(0) - if ( - existing_system_message["message"] - != prompt_config.completion.chat_system_prompt - ): - chat_template._log.info( - "Overriding system prompt with chat_system_prompt, old system message: %s, new system message: %s", - existing_system_message["message"], - prompt_config.completion.chat_system_prompt, - ) + chat_template = cls( + template, template_engine, prompt_config, parse_chat_system_prompt, parse_messages, **kwargs + ) for message in messages: - chat_template.add_message(message["role"], message["message"]) - + chat_template.add_message(**message) return chat_template diff --git a/python/semantic_kernel/semantic_functions/prompt_template.py b/python/semantic_kernel/semantic_functions/prompt_template.py index b8ead27d112e..f37a18c16b05 100644 --- a/python/semantic_kernel/semantic_functions/prompt_template.py +++ b/python/semantic_kernel/semantic_functions/prompt_template.py @@ -1,47 +1,50 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import TYPE_CHECKING, List, Optional +import logging +from typing import TYPE_CHECKING, Any, List, Optional +from semantic_kernel.plugin_definition.parameter_view import ParameterView from semantic_kernel.semantic_functions.prompt_template_base import PromptTemplateBase from semantic_kernel.semantic_functions.prompt_template_config import ( PromptTemplateConfig, ) -from semantic_kernel.skill_definition.parameter_view import ParameterView from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.var_block import VarBlock from semantic_kernel.template_engine.protocols.prompt_templating_engine import ( PromptTemplatingEngine, ) -from semantic_kernel.utils.null_logger import NullLogger if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext + +logger: logging.Logger = logging.getLogger(__name__) class PromptTemplate(PromptTemplateBase): - _template: str - _template_engine: PromptTemplatingEngine - _log: Logger - _prompt_config: PromptTemplateConfig + template: str + template_engine: PromptTemplatingEngine + prompt_config: PromptTemplateConfig def __init__( self, template: str, template_engine: PromptTemplatingEngine, prompt_config: PromptTemplateConfig, - log: Optional[Logger] = None, + log: Optional[Any] = None, ) -> None: - self._template = template - self._template_engine = template_engine - self._prompt_config = prompt_config - self._log = log if log is not None else NullLogger() + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + super().__init__( + template=template, + template_engine=template_engine, + prompt_config=prompt_config, + ) def get_parameters(self) -> List[ParameterView]: seen = set() result = [] - for param in self._prompt_config.input.parameters: + for param in self.prompt_config.parameters: if param is None: continue @@ -50,12 +53,14 @@ def get_parameters(self) -> List[ParameterView]: name=param.name, description=param.description, default_value=param.default_value, + type_=param.type_, + required=param.required, ) ) seen.add(param.name) - blocks = self._template_engine.extract_blocks(self._template) + blocks = self.template_engine.extract_blocks(self.template) for block in blocks: if block.type != BlockTypes.VARIABLE: continue @@ -66,13 +71,11 @@ def get_parameters(self) -> List[ParameterView]: if var_block.name in seen: continue - result.append( - ParameterView(name=var_block.name, description="", default_value="") - ) + result.append(ParameterView(name=var_block.name, description="", default_value="")) seen.add(var_block.name) return result - async def render_async(self, context: "SKContext") -> str: - return await self._template_engine.render_async(self._template, context) + async def render(self, context: "KernelContext") -> str: + return await self.template_engine.render(self.template, context) diff --git a/python/semantic_kernel/semantic_functions/prompt_template_base.py b/python/semantic_kernel/semantic_functions/prompt_template_base.py index 13fcb9dcc173..8f7d1483e17e 100644 --- a/python/semantic_kernel/semantic_functions/prompt_template_base.py +++ b/python/semantic_kernel/semantic_functions/prompt_template_base.py @@ -3,16 +3,18 @@ from abc import ABC, abstractmethod from typing import TYPE_CHECKING, List +from semantic_kernel.kernel_pydantic import KernelBaseModel + if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext - from semantic_kernel.skill_definition.parameter_view import ParameterView + from semantic_kernel.orchestration.kernel_context import KernelContext + from semantic_kernel.plugin_definition.parameter_view import ParameterView -class PromptTemplateBase(ABC): +class PromptTemplateBase(KernelBaseModel, ABC): @abstractmethod def get_parameters(self) -> List["ParameterView"]: pass @abstractmethod - async def render_async(self, context: "SKContext") -> str: + async def render(self, context: "KernelContext") -> str: pass diff --git a/python/semantic_kernel/semantic_functions/prompt_template_config.py b/python/semantic_kernel/semantic_functions/prompt_template_config.py index 1d996d2054cd..c27863a0d17c 100644 --- a/python/semantic_kernel/semantic_functions/prompt_template_config.py +++ b/python/semantic_kernel/semantic_functions/prompt_template_config.py @@ -1,150 +1,83 @@ # Copyright (c) Microsoft. All rights reserved. +import json +from typing import Generic, List, TypeVar -from dataclasses import dataclass, field -from typing import Dict, List, Optional - - -@dataclass -class PromptTemplateConfig: - @dataclass - class CompletionConfig: - temperature: float = 0.0 - top_p: float = 1.0 - presence_penalty: float = 0.0 - frequency_penalty: float = 0.0 - max_tokens: int = 256 - number_of_responses: int = 1 - stop_sequences: List[str] = field(default_factory=list) - token_selection_biases: Dict[int, int] = field(default_factory=dict) - chat_system_prompt: str = None - # the function_call should be 'auto' or the name of a specific function in order to leverage function calling - # when not using auto, the format is 'SkillName-FunctionName', e.g. 'Weather-GetWeather' - function_call: Optional[str] = None - - @dataclass - class InputParameter: - name: str = "" - description: str = "" - default_value: str = "" - type_: str = "string" - required: bool = True - - @dataclass - class InputConfig: - parameters: List["PromptTemplateConfig.InputParameter"] = field( - default_factory=list - ) - - schema: int = 1 - type: str = "completion" - description: str = "" - completion: "PromptTemplateConfig.CompletionConfig" = field( - default_factory=CompletionConfig - ) - default_services: List[str] = field(default_factory=list) - input: "PromptTemplateConfig.InputConfig" = field(default_factory=InputConfig) - - @staticmethod - def from_dict(data: dict) -> "PromptTemplateConfig": - config = PromptTemplateConfig() - keys = ["schema", "type", "description"] - for key in keys: - if key in data: - setattr(config, key, data[key]) - - # Some skills may not have all completion parameters defined - config.completion = PromptTemplateConfig.CompletionConfig() - completion_dict = data["completion"] - completion_keys = [ - "temperature", - "top_p", - "presence_penalty", - "frequency_penalty", - "max_tokens", - "number_of_responses", - "stop_sequences", - "token_selection_biases", - "default_services", - "chat_system_prompt", - "function_call", - ] - for comp_key in completion_keys: - if comp_key in completion_dict: - setattr(config.completion, comp_key, completion_dict[comp_key]) - - # Some skills may not have input parameters defined - config.input = PromptTemplateConfig.InputConfig() - config.input.parameters = [] - if data.get("input") is not None: - for parameter in data["input"]["parameters"]: - if "name" in parameter: - name = parameter["name"] - else: - raise Exception( - f"The input parameter doesn't have a name (function: {config.description})" - ) +from pydantic import Field - if "description" in parameter: - description = parameter["description"] - else: - raise Exception( - f"Input parameter '{name}' doesn't have a description (function: {config.description})" - ) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.plugin_definition.parameter_view import ParameterView - if "defaultValue" in parameter: - defaultValue = parameter["defaultValue"] - else: - raise Exception( - f"Input parameter '{name}' doesn't have a default value (function: {config.description})" - ) +PromptExecutionSettingsT = TypeVar("PromptExecutionSettingsT", bound=PromptExecutionSettings) + +class PromptTemplateConfig(KernelBaseModel, Generic[PromptExecutionSettingsT]): + schema_: int = Field(default=1, alias="schema") + type: str = "completion" + description: str = "" + execution_settings: PromptExecutionSettingsT = Field( + default_factory=PromptExecutionSettings + ) # todo: this should be a dict + default_services: List[str] = Field(default_factory=list) + parameters: List[ParameterView] = Field(default_factory=list) + + @classmethod + def from_dict(cls, data: dict) -> "PromptTemplateConfig": + config = { + key: value for key, value in data.items() if key in ["schema", "type", "description", "default_services"] + } + config["parameters"] = [] + + config = cls._process_execution_settings(config, data) + + if "input_variables" in data: + for parameter in data["input_variables"]: + name = parameter.get("name", "") + description = parameter.get("description", "") + defaultValue = parameter.get("default", "") type_ = parameter.get("type") - required = parameter.get("required") - - config.input.parameters.append( - PromptTemplateConfig.InputParameter( - name, - description, - defaultValue, - type_, - required, + required = parameter.get("required", False) + + config["parameters"].append( + ParameterView( + name=name, + description=description, + default_value=defaultValue, + type_=type_, + required=required, ) ) - return config - @staticmethod - def from_json(json_str: str) -> "PromptTemplateConfig": - import json - - def keystoint(d): - return {int(k) if k.isdigit() else k: v for k, v in d.items()} - - return PromptTemplateConfig.from_dict( - json.loads(json_str, object_hook=keystoint) - ) - - @staticmethod - def from_completion_parameters( - temperature: float = 0.0, - top_p: float = 1.0, - presence_penalty: float = 0.0, - frequency_penalty: float = 0.0, - max_tokens: int = 256, - number_of_responses: int = 1, - stop_sequences: List[str] = [], - token_selection_biases: Dict[int, int] = {}, - chat_system_prompt: str = None, - function_call: Optional[str] = None, - ) -> "PromptTemplateConfig": - config = PromptTemplateConfig() - config.completion.temperature = temperature - config.completion.top_p = top_p - config.completion.presence_penalty = presence_penalty - config.completion.frequency_penalty = frequency_penalty - config.completion.max_tokens = max_tokens - config.completion.number_of_responses = number_of_responses - config.completion.stop_sequences = stop_sequences - config.completion.token_selection_biases = token_selection_biases - config.completion.chat_system_prompt = chat_system_prompt - config.completion.function_call = function_call + return cls(**config) + + @classmethod + def from_json(cls, json_str: str) -> "PromptTemplateConfig": + return cls.from_dict(json.loads(json_str)) + + @classmethod + def from_execution_settings(cls, **kwargs) -> "PromptTemplateConfig": + concrete_class = cls.model_fields["execution_settings"].annotation + if isinstance(concrete_class, TypeVar): + concrete_class = PromptExecutionSettings + return PromptTemplateConfig(execution_settings=concrete_class(extension_data=kwargs)) + + @classmethod + def _process_execution_settings(cls, config: dict, data: dict) -> dict: + exec_settings = data.get("execution_settings", {}) + + for service_id, settings in exec_settings.items(): + # Copy settings to avoid modifying the original data + settings = settings.copy() + + # Extract and remove 'service_id' if it exists + # service_id = settings.pop("service_id", service_id) + + # Determine the concrete type + concrete_type = cls.model_fields["execution_settings"].annotation + if isinstance(concrete_type, TypeVar): + concrete_type = PromptExecutionSettings + + # Initialize the concrete type with the service_id and remaining settings + config["execution_settings"] = concrete_type(service_id=service_id, extension_data=settings) + return config diff --git a/python/semantic_kernel/sk_pydantic.py b/python/semantic_kernel/sk_pydantic.py deleted file mode 100644 index 7f00badf4059..000000000000 --- a/python/semantic_kernel/sk_pydantic.py +++ /dev/null @@ -1,90 +0,0 @@ -import abc -import json -import typing as t - -import numpy as np -import pydantic as pdt -import typing_extensions as te -from pydantic.generics import GenericModel -from pydantic.parse import Protocol -from pydantic.types import StrBytes - - -class PydanticField(abc.ABC): - """Subclass this class to make your class a valid pydantic field type. - - This class is a no-op, but it's necessary to make pydantic recognize your class as - a valid field type. See https://pydantic-docs.helpmanual.io/usage/types/#custom-data-types - for more information. - - - If you want to add validation to your class, you can do so by implementing the - `__get_validators__` class method. See - https://pydantic-docs.helpmanual.io/usage/validators/ for more information. - - If you want to add serialization to your class, you can do so by implementing the - `json` and `parse_raw` methods. See - https://pydantic-docs.helpmanual.io/usage/exporting_models/#json for more information. - """ - - @classmethod - def __get_validators__(cls) -> t.Generator[t.Callable[..., t.Any], None, None]: - """Gets the validators for the class.""" - yield cls.no_op_validate - - @classmethod - def no_op_validate(cls, v: t.Any) -> t.Any: - """Does no validation, just returns the value.""" - if v is None: - v = cls() - if isinstance(v, str): - v = cls(**json.loads(v)) - return v - - def json(self) -> str: - """Serialize the model to JSON.""" - return "{}" - - @classmethod - def parse_raw( - cls: t.Type[te.Self], - b: StrBytes, - *, - content_type: str = None, - encoding: str = "utf8", - proto: Protocol = None, - allow_pickle: bool = False, - ) -> te.Self: - """Parse a raw byte string into a model.""" - return cls() - - def __eq__(self, other: t.Any) -> bool: - """Check if two instances are equal.""" - return isinstance(other, self.__class__) - - -_JSON_ENCODERS: t.Final[t.Dict[t.Type[t.Any], t.Callable[[t.Any], str]]] = { - PydanticField: lambda v: v.json(), - np.ndarray: lambda v: json.dumps(v.tolist()), -} - - -class SKBaseModel(pdt.BaseModel): - """Base class for all pydantic models in the SK.""" - - class Config: - """Pydantic configuration.""" - - json_encoders = _JSON_ENCODERS - # See the `allow_population_by_field_name` section of - # https://docs.pydantic.dev/latest/usage/model_config/#options - allow_population_by_field_name = True - arbitrary_types_allowed = True - - -class SKGenericModel(GenericModel): - """Base class for all pydantic `GenericModel`s in the SK.""" - - class Config: - """Pydantic configuration.""" - - json_encoders = _JSON_ENCODERS - arbitrary_types_allowed = True diff --git a/python/semantic_kernel/skill_definition/__init__.py b/python/semantic_kernel/skill_definition/__init__.py deleted file mode 100644 index 75307bc9a900..000000000000 --- a/python/semantic_kernel/skill_definition/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.skill_definition.sk_function_context_parameter_decorator import ( - sk_function_context_parameter, -) -from semantic_kernel.skill_definition.sk_function_decorator import sk_function - -__all__ = [ - "sk_function", - "sk_function_context_parameter", -] diff --git a/python/semantic_kernel/skill_definition/constants.py b/python/semantic_kernel/skill_definition/constants.py deleted file mode 100644 index e9f919bdde32..000000000000 --- a/python/semantic_kernel/skill_definition/constants.py +++ /dev/null @@ -1,3 +0,0 @@ -import typing as t - -GLOBAL_SKILL: t.Final[str] = "_GLOBAL_FUNCTIONS_" diff --git a/python/semantic_kernel/skill_definition/function_view.py b/python/semantic_kernel/skill_definition/function_view.py deleted file mode 100644 index 19961026a71e..000000000000 --- a/python/semantic_kernel/skill_definition/function_view.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import List - -from semantic_kernel.sk_pydantic import SKBaseModel -from semantic_kernel.skill_definition.parameter_view import ParameterView -from semantic_kernel.utils.validation import validate_function_name - - -class FunctionView(SKBaseModel): - name: str - skill_name: str - description: str - is_semantic: bool - parameters: List[ParameterView] - is_asynchronous: bool = True - - def __init__( - self, - name: str, - skill_name: str, - description: str, - parameters: List[ParameterView], - is_semantic: bool, - is_asynchronous: bool = True, - ) -> None: - validate_function_name(name) - super().__init__( - name=name, - skill_name=skill_name, - description=description, - parameters=parameters, - is_semantic=is_semantic, - is_asynchronous=is_asynchronous, - ) diff --git a/python/semantic_kernel/skill_definition/functions_view.py b/python/semantic_kernel/skill_definition/functions_view.py deleted file mode 100644 index 62ff9cdadab1..000000000000 --- a/python/semantic_kernel/skill_definition/functions_view.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from typing import Dict, List - -import pydantic as pdt - -from semantic_kernel.kernel_exception import KernelException -from semantic_kernel.sk_pydantic import SKBaseModel -from semantic_kernel.skill_definition.function_view import FunctionView - - -class FunctionsView(SKBaseModel): - semantic_functions: Dict[str, List[FunctionView]] = pdt.Field(default_factory=dict) - native_functions: Dict[str, List[FunctionView]] = pdt.Field(default_factory=dict) - - def add_function(self, view: FunctionView) -> "FunctionsView": - if view.is_semantic: - if view.skill_name not in self.semantic_functions: - self.semantic_functions[view.skill_name] = [] - self.semantic_functions[view.skill_name].append(view) - else: - if view.skill_name not in self.native_functions: - self.native_functions[view.skill_name] = [] - self.native_functions[view.skill_name].append(view) - - return self - - def is_semantic(self, skill_name: str, function_name: str) -> bool: - as_sf = self.semantic_functions.get(skill_name, []) - as_sf = any(f.name == function_name for f in as_sf) - - as_nf = self.native_functions.get(skill_name, []) - as_nf = any(f.name == function_name for f in as_nf) - - if as_sf and as_nf: - raise KernelException( - KernelException.ErrorCodes.AmbiguousImplementation, - ( - f"There are 2 functions with the same name: {function_name}." - "One is native and the other semantic." - ), - ) - - return as_sf - - def is_native(self, skill_name: str, function_name: str) -> bool: - as_sf = self.semantic_functions.get(skill_name, []) - as_sf = any(f.name == function_name for f in as_sf) - - as_nf = self.native_functions.get(skill_name, []) - as_nf = any(f.name == function_name for f in as_nf) - - if as_sf and as_nf: - raise KernelException( - KernelException.ErrorCodes.AmbiguousImplementation, - ( - f"There are 2 functions with the same name: {function_name}." - "One is native and the other semantic." - ), - ) - - return as_nf diff --git a/python/semantic_kernel/skill_definition/parameter_view.py b/python/semantic_kernel/skill_definition/parameter_view.py deleted file mode 100644 index 38b3c794d730..000000000000 --- a/python/semantic_kernel/skill_definition/parameter_view.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -from pydantic import Field, validator - -from semantic_kernel.sk_pydantic import SKBaseModel -from semantic_kernel.utils.validation import validate_function_param_name - - -class ParameterView(SKBaseModel): - name: str - description: str - default_value: str - type_: str = Field(default="string", alias="type") - required: bool = False - - @validator("name") - def validate_name(cls, name: str): - validate_function_param_name(name) - return name diff --git a/python/semantic_kernel/skill_definition/read_only_skill_collection.py b/python/semantic_kernel/skill_definition/read_only_skill_collection.py deleted file mode 100644 index 3a727fc1640a..000000000000 --- a/python/semantic_kernel/skill_definition/read_only_skill_collection.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from logging import Logger -from typing import TYPE_CHECKING, ClassVar, Dict, Optional, Tuple - -import pydantic as pdt - -from semantic_kernel.kernel_exception import KernelException -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.sk_pydantic import SKBaseModel -from semantic_kernel.skill_definition import constants -from semantic_kernel.skill_definition.functions_view import FunctionsView -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) -from semantic_kernel.utils.null_logger import NullLogger - -if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_function_base import SKFunctionBase - - -class ReadOnlySkillCollection(SKBaseModel, ReadOnlySkillCollectionBase): - GLOBAL_SKILL: ClassVar[str] = constants.GLOBAL_SKILL - data: Dict[str, Dict[str, SKFunction]] = pdt.Field(default_factory=dict) - _log: Logger = pdt.PrivateAttr() - - class Config: - allow_mutation = False - - def __init__( - self, - data: Dict[str, Dict[str, SKFunction]] = None, - log: Optional[Logger] = None, - ) -> None: - super().__init__(data=data or {}) - self._log = log or NullLogger() - - def has_function(self, skill_name: Optional[str], function_name: str) -> bool: - s_name, f_name = self._normalize_names(skill_name, function_name, True) - if s_name not in self.data: - return False - return f_name in self.data[s_name] - - def has_semantic_function(self, skill_name: str, function_name: str) -> bool: - s_name, f_name = self._normalize_names(skill_name, function_name) - if s_name not in self.data: - return False - if f_name not in self.data[s_name]: - return False - return self.data[s_name][f_name].is_semantic - - def has_native_function(self, skill_name: str, function_name: str) -> bool: - s_name, f_name = self._normalize_names(skill_name, function_name, True) - if s_name not in self.data: - return False - if f_name not in self.data[s_name]: - return False - return self.data[s_name][f_name].is_native - - def get_semantic_function( - self, skill_name: str, function_name: str - ) -> "SKFunctionBase": - s_name, f_name = self._normalize_names(skill_name, function_name) - if self.has_semantic_function(s_name, f_name): - return self.data[s_name][f_name] - - self._log.error(f"Function not available: {s_name}.{f_name}") - raise KernelException( - KernelException.ErrorCodes.FunctionNotAvailable, - f"Function not available: {s_name}.{f_name}", - ) - - def get_native_function( - self, skill_name: str, function_name: str - ) -> "SKFunctionBase": - s_name, f_name = self._normalize_names(skill_name, function_name, True) - if self.has_native_function(s_name, f_name): - return self.data[s_name][f_name] - - self._log.error(f"Function not available: {s_name}.{f_name}") - raise KernelException( - KernelException.ErrorCodes.FunctionNotAvailable, - f"Function not available: {s_name}.{f_name}", - ) - - def get_functions_view( - self, include_semantic: bool = True, include_native: bool = True - ) -> FunctionsView: - result = FunctionsView() - - for skill in self.data.values(): - for function in skill.values(): - if include_semantic and function.is_semantic: - result.add_function(function.describe()) - elif include_native and function.is_native: - result.add_function(function.describe()) - - return result - - def get_function( - self, skill_name: Optional[str], function_name: str - ) -> "SKFunctionBase": - s_name, f_name = self._normalize_names(skill_name, function_name, True) - if self.has_function(s_name, f_name): - return self.data[s_name][f_name] - - self._log.error(f"Function not available: {s_name}.{f_name}") - raise KernelException( - KernelException.ErrorCodes.FunctionNotAvailable, - f"Function not available: {s_name}.{f_name}", - ) - - def _normalize_names( - self, - skill_name: Optional[str], - function_name: str, - allow_substitution: bool = False, - ) -> Tuple[str, str]: - s_name, f_name = skill_name, function_name - if s_name is None and allow_substitution: - s_name = self.GLOBAL_SKILL - - if s_name is None: - raise ValueError("The skill name provided cannot be `None`") - - s_name, f_name = s_name.lower(), f_name.lower() - return s_name, f_name diff --git a/python/semantic_kernel/skill_definition/read_only_skill_collection_base.py b/python/semantic_kernel/skill_definition/read_only_skill_collection_base.py deleted file mode 100644 index 4218527a0a18..000000000000 --- a/python/semantic_kernel/skill_definition/read_only_skill_collection_base.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Optional - -from semantic_kernel.sk_pydantic import PydanticField - -if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_function_base import SKFunctionBase - from semantic_kernel.skill_definition.functions_view import FunctionsView - - -class ReadOnlySkillCollectionBase(PydanticField, ABC): - @abstractmethod - def has_function(self, skill_name: Optional[str], function_name: str) -> bool: - pass - - @abstractmethod - def has_semantic_function( - self, skill_name: Optional[str], function_name: str - ) -> bool: - pass - - @abstractmethod - def has_native_function( - self, skill_name: Optional[str], function_name: str - ) -> bool: - pass - - @abstractmethod - def get_semantic_function( - self, skill_name: Optional[str], function_name: str - ) -> "SKFunctionBase": - pass - - @abstractmethod - def get_native_function( - self, skill_name: Optional[str], function_name: str - ) -> "SKFunctionBase": - pass - - @abstractmethod - def get_functions_view( - self, include_semantic: bool = True, include_native: bool = True - ) -> "FunctionsView": - pass - - @abstractmethod - def get_function( - self, skill_name: Optional[str], function_name: str - ) -> "SKFunctionBase": - pass diff --git a/python/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py b/python/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py deleted file mode 100644 index c7eb2d670614..000000000000 --- a/python/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -def sk_function_context_parameter( - *, - name: str, - description: str, - default_value: str = "", - type: str = "string", - required: bool = False -): - """ - Decorator for SK function context parameters. - - Args: - name -- The name of the context parameter - description -- The description of the context parameter - default_value -- The default value of the context parameter - type -- The type of the context parameter, used for function calling - required -- Whether the context parameter is required - - """ - - def decorator(func): - if not hasattr(func, "__sk_function_context_parameters__"): - func.__sk_function_context_parameters__ = [] - - func.__sk_function_context_parameters__.append( - { - "name": name, - "description": description, - "default_value": default_value, - "type": type, - "required": required, - } - ) - return func - - return decorator diff --git a/python/semantic_kernel/skill_definition/sk_function_decorator.py b/python/semantic_kernel/skill_definition/sk_function_decorator.py deleted file mode 100644 index 6c56fbf85bad..000000000000 --- a/python/semantic_kernel/skill_definition/sk_function_decorator.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - - -def sk_function( - *, - description: str = "", - name: str = "", - input_description: str = "", - input_default_value: str = "", -): - """ - Decorator for SK functions. - - Args: - description -- The description of the function - name -- The name of the function - input_description -- The description of the input - input_default_value -- The default value of the input - """ - - def decorator(func): - func.__sk_function__ = True - func.__sk_function_description__ = description or "" - func.__sk_function_name__ = name or func.__name__ - func.__sk_function_input_description__ = input_description or "" - func.__sk_function_input_default_value__ = input_default_value or "" - return func - - return decorator diff --git a/python/semantic_kernel/skill_definition/skill_collection.py b/python/semantic_kernel/skill_definition/skill_collection.py deleted file mode 100644 index 77458f7e9054..000000000000 --- a/python/semantic_kernel/skill_definition/skill_collection.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from logging import Logger -from typing import ( - TYPE_CHECKING, - ClassVar, - Dict, - Optional, - Union, -) - -import pydantic as pdt - -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.sk_pydantic import SKGenericModel -from semantic_kernel.skill_definition import constants -from semantic_kernel.skill_definition.functions_view import FunctionsView -from semantic_kernel.skill_definition.read_only_skill_collection import ( - ReadOnlySkillCollection, -) -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) -from semantic_kernel.skill_definition.skill_collection_base import SkillCollectionBase -from semantic_kernel.utils.null_logger import NullLogger - -if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_function_base import SKFunctionBase - - -class SkillCollection(SKGenericModel, SkillCollectionBase): - GLOBAL_SKILL: ClassVar[str] = constants.GLOBAL_SKILL - read_only_skill_collection_: ReadOnlySkillCollection = pdt.Field( - alias="read_only_skill_collection" - ) - _log: Logger = pdt.PrivateAttr() - - def __init__( - self, - log: Optional[Logger] = None, - skill_collection: Union[Dict[str, Dict[str, SKFunction]], None] = None, - read_only_skill_collection_: Optional[ReadOnlySkillCollection] = None, - ) -> None: - if skill_collection and read_only_skill_collection_: - raise ValueError( - "Only one of `skill_collection` and `read_only_skill_collection` can be" - " provided" - ) - elif not skill_collection and not read_only_skill_collection_: - read_only_skill_collection = ReadOnlySkillCollection({}) - elif not read_only_skill_collection_: - read_only_skill_collection = ReadOnlySkillCollection(skill_collection) - else: - read_only_skill_collection = read_only_skill_collection_ - super().__init__(read_only_skill_collection=read_only_skill_collection) - self._log = log if log is not None else NullLogger() - - @property - def read_only_skill_collection(self) -> ReadOnlySkillCollectionBase: - return self.read_only_skill_collection_ - - @property - def skill_collection(self): - return self.read_only_skill_collection_.data - - def add_semantic_function(self, function: "SKFunctionBase") -> None: - if function is None: - raise ValueError("The function provided cannot be `None`") - - s_name, f_name = function.skill_name, function.name - s_name, f_name = s_name.lower(), f_name.lower() - - self.skill_collection.setdefault(s_name, {})[f_name] = function - - def add_native_function(self, function: "SKFunctionBase") -> None: - if function is None: - raise ValueError("The function provided cannot be `None`") - - s_name, f_name = function.skill_name, function.name - s_name, f_name = self.read_only_skill_collection_._normalize_names( - s_name, f_name, True - ) - - self.skill_collection.setdefault(s_name, {})[f_name] = function - - def has_function(self, skill_name: Optional[str], function_name: str) -> bool: - return self.read_only_skill_collection_.has_function(skill_name, function_name) - - def has_semantic_function( - self, skill_name: Optional[str], function_name: str - ) -> bool: - return self.read_only_skill_collection_.has_semantic_function( - skill_name, function_name - ) - - def has_native_function( - self, skill_name: Optional[str], function_name: str - ) -> bool: - return self.read_only_skill_collection_.has_native_function( - skill_name, function_name - ) - - def get_semantic_function( - self, skill_name: Optional[str], function_name: str - ) -> "SKFunctionBase": - return self.read_only_skill_collection_.get_semantic_function( - skill_name, function_name - ) - - def get_native_function( - self, skill_name: Optional[str], function_name: str - ) -> "SKFunctionBase": - return self.read_only_skill_collection_.get_native_function( - skill_name, function_name - ) - - def get_functions_view( - self, include_semantic: bool = True, include_native: bool = True - ) -> FunctionsView: - return self.read_only_skill_collection_.get_functions_view( - include_semantic, include_native - ) - - def get_function( - self, skill_name: Optional[str], function_name: str - ) -> "SKFunctionBase": - return self.read_only_skill_collection_.get_function(skill_name, function_name) diff --git a/python/semantic_kernel/skill_definition/skill_collection_base.py b/python/semantic_kernel/skill_definition/skill_collection_base.py deleted file mode 100644 index 089ea544b191..000000000000 --- a/python/semantic_kernel/skill_definition/skill_collection_base.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, TypeVar - -from semantic_kernel.sk_pydantic import PydanticField -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) - -if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_function_base import SKFunctionBase - - -SkillCollectionT = TypeVar("SkillCollectionT", bound="SkillCollectionBase") - - -class SkillCollectionBase(ReadOnlySkillCollectionBase, PydanticField, ABC): - @property - @abstractmethod - def read_only_skill_collection(self) -> ReadOnlySkillCollectionBase: - pass - - @abstractmethod - def add_semantic_function( - self, semantic_function: "SKFunctionBase" - ) -> "SkillCollectionBase": - pass - - @abstractmethod - def add_native_function( - self, native_function: "SKFunctionBase" - ) -> "SkillCollectionBase": - pass diff --git a/python/semantic_kernel/template_engine/blocks/block.py b/python/semantic_kernel/template_engine/blocks/block.py index fd1db61d5132..0f7169888ef4 100644 --- a/python/semantic_kernel/template_engine/blocks/block.py +++ b/python/semantic_kernel/template_engine/blocks/block.py @@ -1,24 +1,22 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import Optional, Tuple +import logging +from typing import Any, Optional, Tuple -import pydantic as pdt - -from semantic_kernel.sk_pydantic import SKBaseModel +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.template_engine.blocks.block_types import BlockTypes -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) -class Block(SKBaseModel): - content: Optional[str] - _log: Optional[Logger] = pdt.PrivateAttr(default_factory=NullLogger) +class Block(KernelBaseModel): + content: Optional[str] = None - def __init__( - self, content: Optional[str] = None, log: Optional[Logger] = NullLogger - ) -> None: + def __init__(self, content: Optional[str] = None, log: Optional[Any] = None) -> None: super().__init__(content=content) - self._log = log or NullLogger() + + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") def is_valid(self) -> Tuple[bool, str]: raise NotImplementedError("Subclasses must implement this method.") @@ -26,7 +24,3 @@ def is_valid(self) -> Tuple[bool, str]: @property def type(self) -> BlockTypes: return BlockTypes.UNDEFINED - - @property - def log(self) -> Logger: - return self._log diff --git a/python/semantic_kernel/template_engine/blocks/code_block.py b/python/semantic_kernel/template_engine/blocks/code_block.py index 6150f4488e6a..4b38afe02249 100644 --- a/python/semantic_kernel/template_engine/blocks/code_block.py +++ b/python/semantic_kernel/template_engine/blocks/code_block.py @@ -1,19 +1,19 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import List, Optional, Tuple +import logging +from typing import Any, List, Optional, Tuple import pydantic as pdt -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.kernel_plugin_collection import KernelPluginCollection from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.function_id_block import FunctionIdBlock from semantic_kernel.template_engine.code_tokenizer import CodeTokenizer +logger: logging.Logger = logging.getLogger(__name__) + class CodeBlock(Block): _tokens: List[Block] = pdt.PrivateAttr() @@ -23,11 +23,14 @@ def __init__( self, content: str, tokens: Optional[List[Block]] = None, - log: Optional[Logger] = None, + log: Optional[Any] = None, ): - super().__init__(content=content and content.strip(), log=log) + super().__init__(content=content and content.strip()) + + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") - self._tokens = tokens or CodeTokenizer(log).tokenize(content) + self._tokens = tokens or CodeTokenizer().tokenize(content) self._validated = False @property @@ -40,69 +43,64 @@ def is_valid(self) -> Tuple[bool, str]: for token in self._tokens: is_valid, error_msg = token.is_valid() if not is_valid: - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg if len(self._tokens) > 1: if self._tokens[0].type != BlockTypes.FUNCTION_ID: error_msg = f"Unexpected second token found: {self._tokens[1].content}" - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg - if ( - self._tokens[1].type != BlockTypes.VALUE - and self._tokens[1].type != BlockTypes.VARIABLE - ): + if self._tokens[1].type != BlockTypes.VALUE and self._tokens[1].type != BlockTypes.VARIABLE: error_msg = "Functions support only one parameter" - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg if len(self._tokens) > 2: error_msg = f"Unexpected second token found: {self._tokens[1].content}" - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg self._validated = True return True, "" - async def render_code_async(self, context): + async def render_code(self, context): if not self._validated: is_valid, error = self.is_valid() if not is_valid: raise ValueError(error) - self.log.debug(f"Rendering code: `{self.content}`") + logger.debug(f"Rendering code: `{self.content}`") if self._tokens[0].type in (BlockTypes.VALUE, BlockTypes.VARIABLE): return self._tokens[0].render(context.variables) if self._tokens[0].type == BlockTypes.FUNCTION_ID: - return await self._render_function_call_async(self._tokens[0], context) + return await self._render_function_call(self._tokens[0], context) raise ValueError(f"Unexpected first token type: {self._tokens[0].type}") - async def _render_function_call_async(self, f_block: FunctionIdBlock, context): - if not context.skills: - raise ValueError("Skill collection not set") + async def _render_function_call(self, f_block: FunctionIdBlock, context): + if not context.plugins: + raise ValueError("Plugin collection not set") - function = self._get_function_from_skill_collection(context.skills, f_block) + function = self._get_function_from_plugin_collection(context.plugins, f_block) if not function: error_msg = f"Function `{f_block.content}` not found" - self.log.error(error_msg) + logger.error(error_msg) raise ValueError(error_msg) variables_clone = context.variables.clone() if len(self._tokens) > 1: - self.log.debug(f"Passing variable/value: `{self._tokens[1].content}`") + logger.debug(f"Passing variable/value: `{self._tokens[1].content}`") input_value = self._tokens[1].render(variables_clone) variables_clone.update(input_value) - result = await function.invoke_async( - variables=variables_clone, memory=context.memory, log=self.log - ) + result = await function.invoke(variables=variables_clone, memory=context.memory) if result.error_occurred: error_msg = ( @@ -110,20 +108,30 @@ async def _render_function_call_async(self, f_block: FunctionIdBlock, context): f"{result.last_exception.__class__.__name__}: " f"{result.last_error_description}" ) - self.log.error(error_msg) + logger.error(error_msg) raise ValueError(error_msg) return result.result - def _get_function_from_skill_collection( - self, skills: ReadOnlySkillCollectionBase, f_block: FunctionIdBlock - ) -> Optional[SKFunctionBase]: - if not f_block.skill_name and skills.has_function(None, f_block.function_name): - return skills.get_function(None, f_block.function_name) - - if f_block.skill_name and skills.has_function( - f_block.skill_name, f_block.function_name - ): - return skills.get_function(f_block.skill_name, f_block.function_name) + def _get_function_from_plugin_collection( + self, plugins: KernelPluginCollection, f_block: FunctionIdBlock + ) -> Optional[KernelFunction]: + """ + Get the function from the plugin collection + + Args: + plugins: The plugin collection + f_block: The function block that contains the function name + + Returns: + The function if it exists, None otherwise. + """ + if f_block.plugin_name is not None and len(f_block.plugin_name) > 0: + return plugins[f_block.plugin_name][f_block.function_name] + else: + # We now require a plug-in name, but if one isn't set then we'll try to find the function + for plugin in plugins: + if f_block.function_name in plugin: + return plugin[f_block.function_name] return None diff --git a/python/semantic_kernel/template_engine/blocks/function_id_block.py b/python/semantic_kernel/template_engine/blocks/function_id_block.py index 83e3f952b61d..ce019bc9b594 100644 --- a/python/semantic_kernel/template_engine/blocks/function_id_block.py +++ b/python/semantic_kernel/template_engine/blocks/function_id_block.py @@ -1,8 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger +import logging from re import match as re_match -from typing import Optional, Tuple +from typing import Any, Optional, Tuple import pydantic as pdt @@ -10,27 +10,31 @@ from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes +logger: logging.Logger = logging.getLogger(__name__) + class FunctionIdBlock(Block): - _skill_name: str = pdt.PrivateAttr() + _plugin_name: str = pdt.PrivateAttr() _function_name: str = pdt.PrivateAttr() - def __init__(self, content: Optional[str] = None, log: Optional[Logger] = None): - super().__init__(content=content and content.strip(), log=log) + def __init__(self, content: Optional[str] = None, log: Optional[Any] = None): + super().__init__(content=content and content.strip()) + + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") function_name_parts = self.content.split(".") if len(function_name_parts) > 2: - self.log.error(f"Invalid function name `{self.content}`") + logger.error(f"Invalid function name `{self.content}`") raise ValueError( - "A function name can contain at most one dot separating " - "the skill name from the function name" + "A function name can contain at most one dot separating " "the plugin name from the function name" ) if len(function_name_parts) == 2: - self._skill_name = function_name_parts[0] + self._plugin_name = function_name_parts[0] self._function_name = function_name_parts[1] else: - self._skill_name = "" + self._plugin_name = "" self._function_name = self.content @property @@ -38,12 +42,12 @@ def type(self) -> BlockTypes: return BlockTypes.FUNCTION_ID @property - def skill_name(self) -> str: - return self._skill_name + def plugin_name(self) -> str: + return self._plugin_name - @skill_name.setter - def skill_name(self, value: str) -> None: - self._skill_name = value + @plugin_name.setter + def plugin_name(self, value: str) -> None: + self._plugin_name = value @property def function_name(self) -> str: @@ -70,8 +74,7 @@ def is_valid(self) -> Tuple[bool, str]: if self._has_more_than_one_dot(self.content): error_msg = ( - "The function identifier can contain max one '.' " - "char separating skill name from function name" + "The function identifier can contain max one '.' " "char separating plugin name from function name" ) return False, error_msg diff --git a/python/semantic_kernel/template_engine/blocks/text_block.py b/python/semantic_kernel/template_engine/blocks/text_block.py index d94530b89fc0..9eb344584599 100644 --- a/python/semantic_kernel/template_engine/blocks/text_block.py +++ b/python/semantic_kernel/template_engine/blocks/text_block.py @@ -1,12 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import Optional, Tuple +import logging +from typing import Any, Optional, Tuple from semantic_kernel.orchestration.context_variables import ContextVariables from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes +logger: logging.Logger = logging.getLogger(__name__) + class TextBlock(Block): @classmethod @@ -15,16 +17,15 @@ def from_text( text: Optional[str] = None, start_index: Optional[int] = None, stop_index: Optional[int] = None, - log: Optional[Logger] = None, + log: Optional[Any] = None, ): + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") if text is None: - return cls(content="", log=log) + return cls(content="") if start_index is not None and stop_index is not None: if start_index > stop_index: - raise ValueError( - f"start_index ({start_index}) must be less than " - f"stop_index ({stop_index})" - ) + raise ValueError(f"start_index ({start_index}) must be less than " f"stop_index ({stop_index})") if start_index < 0: raise ValueError(f"start_index ({start_index}) must be greater than 0") @@ -35,7 +36,7 @@ def from_text( elif stop_index is not None: text = text[:stop_index] - return cls(content=text, log=log) + return cls(content=text) @property def type(self) -> BlockTypes: diff --git a/python/semantic_kernel/template_engine/blocks/val_block.py b/python/semantic_kernel/template_engine/blocks/val_block.py index 07a8a881f6ed..9c47b6633845 100644 --- a/python/semantic_kernel/template_engine/blocks/val_block.py +++ b/python/semantic_kernel/template_engine/blocks/val_block.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import Optional, Tuple +import logging +from typing import Any, Optional, Tuple import pydantic as pdt @@ -10,18 +10,23 @@ from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.symbols import Symbols +logger: logging.Logger = logging.getLogger(__name__) + class ValBlock(Block): _first: str = pdt.PrivateAttr() _last: str = pdt.PrivateAttr() _value: str = pdt.PrivateAttr() - def __init__(self, content: Optional[str] = None, log: Optional[Logger] = None): - super().__init__(content=content and content.strip(), log=log) + def __init__(self, content: Optional[str] = None, log: Optional[Any] = None): + super().__init__(content=content and content.strip()) + + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") if len(self.content) < 2: err = "A value must have single quotes or double quotes on both sides" - self.log.error(err) + logger.error(err) self._value = "" self._first = "\0" self._last = "\0" @@ -38,22 +43,17 @@ def type(self) -> BlockTypes: def is_valid(self) -> Tuple[bool, str]: if len(self.content) < 2: error_msg = "A value must have single quotes or double quotes on both sides" - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg if self._first != Symbols.DBL_QUOTE and self._first != Symbols.SGL_QUOTE: - error_msg = ( - "A value must be wrapped in either single quotes or double quotes" - ) - self.log.error(error_msg) + error_msg = "A value must be wrapped in either single quotes or double quotes" + logger.error(error_msg) return False, error_msg if self._first != self._last: - error_msg = ( - "A value must be defined using either single quotes or " - "double quotes, not both" - ) - self.log.error(error_msg) + error_msg = "A value must be defined using either single quotes or " "double quotes, not both" + logger.error(error_msg) return False, error_msg return True, "" @@ -63,8 +63,4 @@ def render(self, _: Optional[ContextVariables] = None) -> str: @staticmethod def has_val_prefix(text: Optional[str]) -> bool: - return ( - text is not None - and len(text) > 0 - and (text[0] == Symbols.DBL_QUOTE or text[0] == Symbols.SGL_QUOTE) - ) + return text is not None and len(text) > 0 and (text[0] == Symbols.DBL_QUOTE or text[0] == Symbols.SGL_QUOTE) diff --git a/python/semantic_kernel/template_engine/blocks/var_block.py b/python/semantic_kernel/template_engine/blocks/var_block.py index 0e77166ead0f..7f6cca8fba86 100644 --- a/python/semantic_kernel/template_engine/blocks/var_block.py +++ b/python/semantic_kernel/template_engine/blocks/var_block.py @@ -1,8 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger +import logging from re import match as re_match -from typing import Optional, Tuple +from typing import Any, Optional, Tuple import pydantic as pdt @@ -11,16 +11,21 @@ from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.symbols import Symbols +logger: logging.Logger = logging.getLogger(__name__) + class VarBlock(Block): _name: str = pdt.PrivateAttr() - def __init__(self, content: Optional[str] = None, log: Optional[Logger] = None): - super().__init__(content=content and content.strip(), log=log) + def __init__(self, content: Optional[str] = None, log: Optional[Any] = None): + super().__init__(content=content and content.strip()) + + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") if len(self.content) < 2: err = "The variable name is empty" - self.log.error(err) + logger.error(err) self._name = "" return @@ -40,21 +45,18 @@ def name(self, value: str) -> None: def is_valid(self) -> Tuple[bool, str]: if not self.content: - error_msg = ( - f"A variable must start with the symbol {Symbols.VAR_PREFIX} " - "and have a name" - ) - self.log.error(error_msg) + error_msg = f"A variable must start with the symbol {Symbols.VAR_PREFIX} " "and have a name" + logger.error(error_msg) return False, error_msg if self.content[0] != Symbols.VAR_PREFIX: error_msg = f"A variable must start with the symbol {Symbols.VAR_PREFIX}" - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg if len(self.content) < 2: error_msg = "The variable name is empty" - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg if not re_match(r"^[a-zA-Z0-9_]*$", self.name): @@ -62,7 +64,7 @@ def is_valid(self) -> Tuple[bool, str]: f"The variable name '{self.name}' contains invalid characters. " "Only alphanumeric chars and underscore are allowed." ) - self.log.error(error_msg) + logger.error(error_msg) return False, error_msg return True, "" @@ -73,11 +75,11 @@ def render(self, variables: Optional[ContextVariables] = None) -> str: if not self.name: error_msg = "Variable rendering failed, the variable name is empty" - self.log.error(error_msg) + logger.error(error_msg) raise ValueError(error_msg) value = variables.get(self.name, None) if not value: - self.log.warning(f"Variable `{Symbols.VAR_PREFIX}{self.name}` not found") + logger.warning(f"Variable `{Symbols.VAR_PREFIX}{self.name}` not found") return value or "" diff --git a/python/semantic_kernel/template_engine/code_tokenizer.py b/python/semantic_kernel/template_engine/code_tokenizer.py index 9198f807134e..4341384cc612 100644 --- a/python/semantic_kernel/template_engine/code_tokenizer.py +++ b/python/semantic_kernel/template_engine/code_tokenizer.py @@ -1,16 +1,17 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import List +import logging +from typing import Any, List, Optional -from semantic_kernel.sk_pydantic import PydanticField +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.function_id_block import FunctionIdBlock from semantic_kernel.template_engine.blocks.symbols import Symbols from semantic_kernel.template_engine.blocks.val_block import ValBlock from semantic_kernel.template_engine.blocks.var_block import VarBlock -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) # BNF parsed by CodeTokenizer: @@ -21,9 +22,12 @@ # [value] ::= "'" [text] "'" | '"' [text] '"' # [function-call] ::= [function-id] | [function-id] [parameter] # [parameter] ::= [variable] | [value] -class CodeTokenizer(PydanticField): - def __init__(self, log: Logger = None): - self.log = log or NullLogger() +class CodeTokenizer(KernelBaseModel): + def __init__(self, log: Optional[Any] = None): + super().__init__() + + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") def tokenize(self, text: str) -> List[Block]: # Remove spaces, which are ignored anyway @@ -49,11 +53,11 @@ def tokenize(self, text: str) -> List[Block]: # 1 char only edge case if len(text) == 1: if next_char == Symbols.VAR_PREFIX: - blocks.append(VarBlock(text, self.log)) + blocks.append(VarBlock(text)) elif next_char in (Symbols.DBL_QUOTE, Symbols.SGL_QUOTE): - blocks.append(ValBlock(text, self.log)) + blocks.append(ValBlock(text)) else: - blocks.append(FunctionIdBlock(text, self.log)) + blocks.append(FunctionIdBlock(text)) return blocks @@ -84,9 +88,7 @@ def tokenize(self, text: str) -> List[Block]: # - skip the current char (escape char) # - add the next char (special char) # - jump to the one after (to handle "\\" properly) - if current_char == Symbols.ESCAPE_CHAR and self._can_be_escaped( - next_char - ): + if current_char == Symbols.ESCAPE_CHAR and self._can_be_escaped(next_char): current_token_content.append(next_char) skip_next_char = True continue @@ -95,7 +97,7 @@ def tokenize(self, text: str) -> List[Block]: # When we reach the end of the value, we add the block if current_char == text_value_delimiter: - blocks.append(ValBlock("".join(current_token_content), self.log)) + blocks.append(ValBlock("".join(current_token_content))) current_token_content.clear() current_token_type = None space_separator_found = False @@ -106,12 +108,10 @@ def tokenize(self, text: str) -> List[Block]: # Note: there might be multiple consecutive spaces if self._is_blank_space(current_char): if current_token_type == BlockTypes.VARIABLE: - blocks.append(VarBlock("".join(current_token_content), self.log)) + blocks.append(VarBlock("".join(current_token_content))) current_token_content.clear() elif current_token_type == BlockTypes.FUNCTION_ID: - blocks.append( - FunctionIdBlock("".join(current_token_content), self.log) - ) + blocks.append(FunctionIdBlock("".join(current_token_content))) current_token_content.clear() space_separator_found = True @@ -141,11 +141,11 @@ def tokenize(self, text: str) -> List[Block]: current_token_content.append(next_char) if current_token_type == BlockTypes.VALUE: - blocks.append(ValBlock("".join(current_token_content), self.log)) + blocks.append(ValBlock("".join(current_token_content))) elif current_token_type == BlockTypes.VARIABLE: - blocks.append(VarBlock("".join(current_token_content), self.log)) + blocks.append(VarBlock("".join(current_token_content))) elif current_token_type == BlockTypes.FUNCTION_ID: - blocks.append(FunctionIdBlock("".join(current_token_content), self.log)) + blocks.append(FunctionIdBlock("".join(current_token_content))) else: raise ValueError("Tokens must be separated by one space least") diff --git a/python/semantic_kernel/template_engine/prompt_template_engine.py b/python/semantic_kernel/template_engine/prompt_template_engine.py index bb1eb861e27b..5a202640a82c 100644 --- a/python/semantic_kernel/template_engine/prompt_template_engine.py +++ b/python/semantic_kernel/template_engine/prompt_template_engine.py @@ -1,28 +1,34 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger +import logging from typing import TYPE_CHECKING, List, Optional +from pydantic import PrivateAttr + +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.sk_pydantic import PydanticField from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.protocols.text_renderer import TextRenderer from semantic_kernel.template_engine.template_tokenizer import TemplateTokenizer -from semantic_kernel.utils.null_logger import NullLogger if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext + +logger: logging.Logger = logging.getLogger(__name__) + + +class PromptTemplateEngine(KernelBaseModel): + _tokenizer: TemplateTokenizer = PrivateAttr() + def __init__(self, **kwargs) -> None: + super().__init__() -class PromptTemplateEngine(PydanticField): - def __init__(self, logger: Optional[Logger] = None) -> None: - self._logger = logger or NullLogger() - self._tokenizer = TemplateTokenizer(self._logger) + if kwargs.get("logger"): + logger.warning("The `logger` parameter is deprecated. Please use the `logging` module instead.") + self._tokenizer = TemplateTokenizer() - def extract_blocks( - self, template_text: Optional[str] = None, validate: bool = True - ) -> List[Block]: + def extract_blocks(self, template_text: Optional[str] = None, validate: bool = True) -> List[Block]: """ Given a prompt template string, extract all the blocks (text, variables, function calls). @@ -33,7 +39,7 @@ def extract_blocks( :return: A list of all the blocks, ie the template tokenized in text, variables and function calls """ - self._logger.debug(f"Extracting blocks from template: {template_text}") + logger.debug(f"Extracting blocks from template: {template_text}") blocks = self._tokenizer.tokenize(template_text) if validate: @@ -44,7 +50,7 @@ def extract_blocks( return blocks - async def render_async(self, template_text: str, context: "SKContext") -> str: + async def render(self, template_text: str, context: "KernelContext") -> str: """ Given a prompt template, replace the variables with their values and execute the functions replacing their reference with the @@ -54,13 +60,11 @@ async def render_async(self, template_text: str, context: "SKContext") -> str: :param context: Access into the current kernel execution context :return: The prompt template ready to be used for an AI request """ - self._logger.debug(f"Rendering string template: {template_text}") + logger.debug(f"Rendering string template: {template_text}") blocks = self.extract_blocks(template_text) - return await self.render_blocks_async(blocks, context) + return await self.render_blocks(blocks, context) - async def render_blocks_async( - self, blocks: List[Block], context: "SKContext" - ) -> str: + async def render_blocks(self, blocks: List[Block], context: "KernelContext") -> str: """ Given a list of blocks render each block and compose the final result. @@ -70,27 +74,22 @@ async def render_blocks_async( """ from semantic_kernel.template_engine.protocols.code_renderer import CodeRenderer - self._logger.debug(f"Rendering list of {len(blocks)} blocks") + logger.debug(f"Rendering list of {len(blocks)} blocks") rendered_blocks = [] for block in blocks: if isinstance(block, TextRenderer): rendered_blocks.append(block.render(context.variables)) elif isinstance(block, CodeRenderer): - rendered_blocks.append(await block.render_code_async(context)) + rendered_blocks.append(await block.render_code(context)) else: - error = ( - "unexpected block type, the block doesn't have a rendering " - "protocol assigned to it" - ) - self._logger.error(error) + error = "unexpected block type, the block doesn't have a rendering " "protocol assigned to it" + logger.error(error) raise ValueError(error) - self._logger.debug(f"Rendered prompt: {''.join(rendered_blocks)}") + logger.debug(f"Rendered prompt: {''.join(rendered_blocks)}") return "".join(rendered_blocks) - def render_variables( - self, blocks: List[Block], variables: Optional[ContextVariables] = None - ) -> List[Block]: + def render_variables(self, blocks: List[Block], variables: Optional[ContextVariables] = None) -> List[Block]: """ Given a list of blocks, render the Variable Blocks, replacing placeholders with the actual value in memory. @@ -102,7 +101,7 @@ def render_variables( """ from semantic_kernel.template_engine.blocks.text_block import TextBlock - self._logger.debug("Rendering variables") + logger.debug("Rendering variables") rendered_blocks = [] for block in blocks: @@ -111,15 +110,11 @@ def render_variables( continue if not isinstance(block, TextRenderer): raise ValueError("TextBlock must implement TextRenderer protocol") - rendered_blocks.append( - TextBlock.from_text(block.render(variables), log=self._logger) - ) + rendered_blocks.append(TextBlock.from_text(block.render(variables))) return rendered_blocks - async def render_code_async( - self, blocks: List[Block], execution_context: "SKContext" - ) -> List[Block]: + async def render_code(self, blocks: List[Block], execution_context: "KernelContext") -> List[Block]: """ Given a list of blocks, render the Code Blocks, executing the functions and replacing placeholders with the functions result. @@ -132,7 +127,7 @@ async def render_code_async( from semantic_kernel.template_engine.blocks.text_block import TextBlock from semantic_kernel.template_engine.protocols.code_renderer import CodeRenderer - self._logger.debug("Rendering code") + logger.debug("Rendering code") rendered_blocks = [] for block in blocks: @@ -141,10 +136,6 @@ async def render_code_async( continue if not isinstance(block, CodeRenderer): raise ValueError("CodeBlock must implement CodeRenderer protocol") - rendered_blocks.append( - TextBlock.from_text( - await block.render_code_async(execution_context), log=self._logger - ) - ) + rendered_blocks.append(TextBlock.from_text(await block.render_code(execution_context))) return rendered_blocks diff --git a/python/semantic_kernel/template_engine/protocols/code_renderer.py b/python/semantic_kernel/template_engine/protocols/code_renderer.py index 5ff77a69f0da..d2a4063d3e9d 100644 --- a/python/semantic_kernel/template_engine/protocols/code_renderer.py +++ b/python/semantic_kernel/template_engine/protocols/code_renderer.py @@ -2,7 +2,7 @@ from typing import Protocol, runtime_checkable -from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.kernel_context import KernelContext @runtime_checkable @@ -11,11 +11,11 @@ class CodeRenderer(Protocol): Protocol for dynamic code blocks that need async IO to be rendered. """ - async def render_code_async(self, context: SKContext) -> str: + async def render_code(self, context: KernelContext) -> str: """ Render the block using the given context. - :param context: SK execution context + :param context: kernel execution context :return: Rendered content """ ... diff --git a/python/semantic_kernel/template_engine/protocols/prompt_templating_engine.py b/python/semantic_kernel/template_engine/protocols/prompt_templating_engine.py index 55f2cd442329..e8d25f7cd749 100644 --- a/python/semantic_kernel/template_engine/protocols/prompt_templating_engine.py +++ b/python/semantic_kernel/template_engine/protocols/prompt_templating_engine.py @@ -6,7 +6,7 @@ from semantic_kernel.template_engine.blocks.block import Block if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext @runtime_checkable @@ -15,9 +15,7 @@ class PromptTemplatingEngine(Protocol): Prompt templating engine protocol. """ - def extract_blocks( - self, template_text: Optional[str] = None, validate: bool = True - ) -> List[Block]: + def extract_blocks(self, template_text: Optional[str] = None, validate: bool = True) -> List[Block]: """ Given a prompt template string, extract all the blocks (text, variables, function calls). @@ -30,7 +28,7 @@ def extract_blocks( """ ... - async def render_async(self, template_text: str, context: "SKContext") -> str: + async def render(self, template_text: str, context: "KernelContext") -> str: """ Given a prompt template, replace the variables with their values and execute the functions replacing their reference with the @@ -42,9 +40,7 @@ async def render_async(self, template_text: str, context: "SKContext") -> str: """ ... - async def render_blocks_async( - self, blocks: List[Block], context: "SKContext" - ) -> str: + async def render_blocks(self, blocks: List[Block], context: "KernelContext") -> str: """ Given a list of blocks render each block and compose the final result. @@ -54,9 +50,7 @@ async def render_blocks_async( """ ... - def render_variables( - self, blocks: List[Block], variables: Optional[ContextVariables] = None - ) -> List[Block]: + def render_variables(self, blocks: List[Block], variables: Optional[ContextVariables] = None) -> List[Block]: """ Given a list of blocks, render the Variable Blocks, replacing placeholders with the actual value in memory. @@ -68,9 +62,7 @@ def render_variables( """ ... - async def render_code_async( - self, blocks: List[Block], execution_context: "SKContext" - ) -> List[Block]: + async def render_code(self, blocks: List[Block], execution_context: "KernelContext") -> List[Block]: """ Given a list of blocks, render the Code Blocks, executing the functions and replacing placeholders with the functions result. diff --git a/python/semantic_kernel/template_engine/template_tokenizer.py b/python/semantic_kernel/template_engine/template_tokenizer.py index a055356ed261..f4e4f2b5b57d 100644 --- a/python/semantic_kernel/template_engine/template_tokenizer.py +++ b/python/semantic_kernel/template_engine/template_tokenizer.py @@ -1,16 +1,19 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger -from typing import List +import logging +from typing import Any, List, Optional -from semantic_kernel.sk_pydantic import PydanticField +from pydantic import PrivateAttr + +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.code_block import CodeBlock from semantic_kernel.template_engine.blocks.symbols import Symbols from semantic_kernel.template_engine.blocks.text_block import TextBlock from semantic_kernel.template_engine.code_tokenizer import CodeTokenizer -from semantic_kernel.utils.null_logger import NullLogger + +logger: logging.Logger = logging.getLogger(__name__) # BNF parsed by TemplateTokenizer: @@ -21,10 +24,14 @@ # | "{{" [function-call] "}}" # [text-block] ::= [any-char] | [any-char] [text-block] # [any-char] ::= any char -class TemplateTokenizer(PydanticField): - def __init__(self, log: Logger = None): - self.log = log or NullLogger() - self.code_tokenizer = CodeTokenizer(self.log) +class TemplateTokenizer(KernelBaseModel): + _code_tokenizer: CodeTokenizer = PrivateAttr() + + def __init__(self, log: Optional[Any] = None): + super().__init__() + if log: + logger.warning("The `log` parameter is deprecated. Please use the `logging` module instead.") + self._code_tokenizer = CodeTokenizer() def tokenize(self, text: str) -> List[Block]: # An empty block consists of 4 chars: "{{}}" @@ -37,11 +44,11 @@ def tokenize(self, text: str) -> List[Block]: # Render None/empty to "" if not text or text == "": - return [TextBlock.from_text("", log=self.log)] + return [TextBlock.from_text("")] # If the template is "empty" return it as a text block if len(text) < MIN_CODE_BLOCK_LENGTH: - return [TextBlock.from_text(text, log=self.log)] + return [TextBlock.from_text(text)] blocks = [] end_of_last_block = 0 @@ -64,11 +71,7 @@ def tokenize(self, text: str) -> List[Block]: # When "{{" is found outside a value # Note: "{{ {{x}}" => ["{{ ", "{{x}}"] - if ( - not inside_text_value - and current_char == Symbols.BLOCK_STARTER - and next_char == Symbols.BLOCK_STARTER - ): + if not inside_text_value and current_char == Symbols.BLOCK_STARTER and next_char == Symbols.BLOCK_STARTER: # A block starts at the first "{" block_start_pos = current_char_pos block_start_found = True @@ -77,9 +80,7 @@ def tokenize(self, text: str) -> List[Block]: if block_start_found: # While inside a text value, when the end quote is found if inside_text_value: - if current_char == Symbols.ESCAPE_CHAR and self._can_be_escaped( - next_char - ): + if current_char == Symbols.ESCAPE_CHAR and self._can_be_escaped(next_char): skip_next_char = True continue @@ -91,10 +92,7 @@ def tokenize(self, text: str) -> List[Block]: inside_text_value = True text_value_delimiter = current_char # If the block ends here - elif ( - current_char == Symbols.BLOCK_ENDER - and next_char == Symbols.BLOCK_ENDER - ): + elif current_char == Symbols.BLOCK_ENDER and next_char == Symbols.BLOCK_ENDER: # If there is plain text between the current # var/val/code block and the previous one, # add it as a text block @@ -104,66 +102,52 @@ def tokenize(self, text: str) -> List[Block]: text, end_of_last_block, block_start_pos, - log=self.log, ) ) # Extract raw block - content_with_delimiters = text[block_start_pos : cursor + 1] + content_with_delimiters = text[block_start_pos : cursor + 1] # noqa: E203 # Remove "{{" and "}}" delimiters and trim whitespace - content_without_delimiters = content_with_delimiters[ - 2:-2 - ].strip() + content_without_delimiters = content_with_delimiters[2:-2].strip() if len(content_without_delimiters) == 0: # If what is left is empty, consider the raw block # a TextBlock - blocks.append( - TextBlock.from_text( - content_with_delimiters, log=self.log - ) - ) + blocks.append(TextBlock.from_text(content_with_delimiters)) else: - code_blocks = self.code_tokenizer.tokenize( - content_without_delimiters - ) + code_blocks = self._code_tokenizer.tokenize(content_without_delimiters) first_block_type = code_blocks[0].type if first_block_type == BlockTypes.VARIABLE: if len(code_blocks) > 1: raise ValueError( - "Invalid token detected after the " - f"variable: {content_without_delimiters}" + "Invalid token detected after the " f"variable: {content_without_delimiters}" ) blocks.append(code_blocks[0]) elif first_block_type == BlockTypes.VALUE: if len(code_blocks) > 1: raise ValueError( - "Invalid token detected after the " - "value: {content_without_delimiters}" + "Invalid token detected after the " "value: {content_without_delimiters}" ) blocks.append(code_blocks[0]) elif first_block_type == BlockTypes.FUNCTION_ID: if len(code_blocks) > 2: raise ValueError( - "Functions support only one " - f"parameter: {content_without_delimiters}" + "Functions support only one " f"parameter: {content_without_delimiters}" ) blocks.append( CodeBlock( content_without_delimiters, code_blocks, - self.log, ) ) else: raise ValueError( - "Code tokenizer returned an incorrect " - f"first token type {first_block_type}" + "Code tokenizer returned an incorrect " f"first token type {first_block_type}" ) end_of_last_block = cursor + 1 @@ -171,9 +155,7 @@ def tokenize(self, text: str) -> List[Block]: # If there is something left after the last block, capture it as a TextBlock if end_of_last_block < len(text): - blocks.append( - TextBlock.from_text(text, end_of_last_block, len(text), log=self.log) - ) + blocks.append(TextBlock.from_text(text, end_of_last_block, len(text))) return blocks diff --git a/python/semantic_kernel/text/__init__.py b/python/semantic_kernel/text/__init__.py index 08a824db9ffc..2ee67028f355 100644 --- a/python/semantic_kernel/text/__init__.py +++ b/python/semantic_kernel/text/__init__.py @@ -1,6 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. -from semantic_kernel.text.function_extension import aggregate_chunked_results_async +from semantic_kernel.text.function_extension import aggregate_chunked_results from semantic_kernel.text.text_chunker import ( split_markdown_lines, split_markdown_paragraph, @@ -13,5 +13,5 @@ "split_markdown_paragraph", "split_plaintext_paragraph", "split_markdown_lines", - "aggregate_chunked_results_async", + "aggregate_chunked_results", ] diff --git a/python/semantic_kernel/text/function_extension.py b/python/semantic_kernel/text/function_extension.py index bd5c2825e87e..52f166b3546a 100644 --- a/python/semantic_kernel/text/function_extension.py +++ b/python/semantic_kernel/text/function_extension.py @@ -2,20 +2,20 @@ from typing import List -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function import SKFunction +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction -async def aggregate_chunked_results_async( - func: SKFunction, chunked_results: List[str], context: SKContext -) -> SKContext: +async def aggregate_chunked_results( + func: KernelFunction, chunked_results: List[str], context: KernelContext +) -> KernelContext: """ Aggregate the results from the chunked results. """ results = [] for chunk in chunked_results: context.variables.update(chunk) - context = await func.invoke_async(context=context) + context = await func.invoke(context=context) results.append(str(context.variables)) diff --git a/python/semantic_kernel/text/text_chunker.py b/python/semantic_kernel/text/text_chunker.py index f0197649d4e8..66f6bb682122 100644 --- a/python/semantic_kernel/text/text_chunker.py +++ b/python/semantic_kernel/text/text_chunker.py @@ -48,9 +48,7 @@ def _token_counter(text: str) -> int: return len(text) // 4 -def split_plaintext_lines( - text: str, max_token_per_line: int, token_counter: Callable = _token_counter -) -> List[str]: +def split_plaintext_lines(text: str, max_token_per_line: int, token_counter: Callable = _token_counter) -> List[str]: """ Split plain text into lines. it will split on new lines first, and then on punctuation. @@ -63,9 +61,7 @@ def split_plaintext_lines( ) -def split_markdown_lines( - text: str, max_token_per_line: int, token_counter: Callable = _token_counter -) -> List[str]: +def split_markdown_lines(text: str, max_token_per_line: int, token_counter: Callable = _token_counter) -> List[str]: """ Split markdown into lines. It will split on punctuation first, and then on space and new lines. @@ -78,9 +74,7 @@ def split_markdown_lines( ) -def split_plaintext_paragraph( - text: List[str], max_tokens: int, token_counter: Callable = _token_counter -) -> List[str]: +def split_plaintext_paragraph(text: List[str], max_tokens: int, token_counter: Callable = _token_counter) -> List[str]: """ Split plain text into paragraphs. """ @@ -96,14 +90,10 @@ def split_plaintext_paragraph( ) ) - return _split_text_paragraph( - text=split_lines, max_tokens=max_tokens, token_counter=token_counter - ) + return _split_text_paragraph(text=split_lines, max_tokens=max_tokens, token_counter=token_counter) -def split_markdown_paragraph( - text: List[str], max_tokens: int, token_counter: Callable = _token_counter -) -> List[str]: +def split_markdown_paragraph(text: List[str], max_tokens: int, token_counter: Callable = _token_counter) -> List[str]: """ Split markdown into paragraphs. """ @@ -118,14 +108,10 @@ def split_markdown_paragraph( ) ) - return _split_text_paragraph( - text=split_lines, max_tokens=max_tokens, token_counter=token_counter - ) + return _split_text_paragraph(text=split_lines, max_tokens=max_tokens, token_counter=token_counter) -def _split_text_paragraph( - text: List[str], max_tokens: int, token_counter: Callable = _token_counter -) -> List[str]: +def _split_text_paragraph(text: List[str], max_tokens: int, token_counter: Callable = _token_counter) -> List[str]: """ Split text into paragraphs. """ @@ -139,10 +125,7 @@ def _split_text_paragraph( num_tokens_line = token_counter(line) num_tokens_paragraph = token_counter("".join(current_paragraph)) - if ( - num_tokens_paragraph + num_tokens_line + 1 >= max_tokens - and len(current_paragraph) > 0 - ): + if num_tokens_paragraph + num_tokens_line + 1 >= max_tokens and len(current_paragraph) > 0: paragraphs.append("".join(current_paragraph).strip()) current_paragraph = [] diff --git a/python/semantic_kernel/utils/logging.py b/python/semantic_kernel/utils/logging.py new file mode 100644 index 000000000000..3a171572a2f9 --- /dev/null +++ b/python/semantic_kernel/utils/logging.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging + + +def setup_logging(): + # Setup a detailed logging format. + logging.basicConfig( + format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) diff --git a/python/semantic_kernel/utils/naming.py b/python/semantic_kernel/utils/naming.py new file mode 100644 index 000000000000..2ed869392d16 --- /dev/null +++ b/python/semantic_kernel/utils/naming.py @@ -0,0 +1,19 @@ +# Copyright (c) Microsoft. All rights reserved. + +import random +import string + + +def generate_random_ascii_name(length: int = 16) -> str: + """ + Generate a series of random ASCII characters of the specified length. + As example, plugin/function names can contain upper/lowercase letters, and underscores + + Args: + length (int): The length of the string to generate. + + Returns: + A string of random ASCII characters of the specified length. + """ + letters = string.ascii_letters + return "".join(random.choices(letters, k=length)) diff --git a/python/semantic_kernel/utils/null_logger.py b/python/semantic_kernel/utils/null_logger.py index 586d3d1d27a6..b84ebad517c1 100644 --- a/python/semantic_kernel/utils/null_logger.py +++ b/python/semantic_kernel/utils/null_logger.py @@ -1,9 +1,11 @@ # Copyright (c) Microsoft. All rights reserved. from functools import wraps -from logging import Logger +from logging import Logger, getLogger from typing import Any, Callable +logger: Logger = getLogger(__name__) + def _nullify(fn) -> Callable[[Any], None]: """General wrapper to not call wrapped function""" @@ -18,14 +20,8 @@ def _inner_nullify(*args, **kwargs) -> None: class _NullerMeta(type): def __new__(cls, classname, base_classes, class_dict): """Return a Class that nullifies all Logger object callbacks""" - nullified_dict = { - attr_name: _nullify(attr) - for attr_name, attr in Logger.__dict__.items() - if callable(attr) - } - return type.__new__( - cls, classname, base_classes, {**class_dict, **nullified_dict} - ) + nullified_dict = {attr_name: _nullify(attr) for attr_name, attr in Logger.__dict__.items() if callable(attr)} + return type.__new__(cls, classname, base_classes, {**class_dict, **nullified_dict}) class NullLogger(Logger, metaclass=_NullerMeta): @@ -35,6 +31,12 @@ class NullLogger(Logger, metaclass=_NullerMeta): def __init__(self): super().__init__(None) + logger.warning( + ( + "NullLogger is deprecated and will be removed in a future release,", + "the same goes for all 'log' and 'logger' arguments.", + ) + ) __all__ = ["NullLogger"] diff --git a/python/semantic_kernel/utils/settings.py b/python/semantic_kernel/utils/settings.py index c75319e75647..3bba2168bc81 100644 --- a/python/semantic_kernel/utils/settings.py +++ b/python/semantic_kernel/utils/settings.py @@ -48,13 +48,9 @@ def azure_openai_settings_from_dot_env( # Azure requires the deployment name, the API key and the endpoint URL. if include_deployment: - assert ( - deployment is not None - ), "Azure OpenAI deployment name not found in .env file" + assert deployment is not None, "Azure OpenAI deployment name not found in .env file" if include_api_version: - assert ( - api_version is not None - ), "Azure OpenAI API version not found in .env file" + assert api_version is not None, "Azure OpenAI API version not found in .env file" assert api_key, "Azure OpenAI API key not found in .env file" assert endpoint, "Azure OpenAI endpoint not found in .env file" @@ -134,6 +130,46 @@ def pinecone_settings_from_dot_env() -> Tuple[str, Optional[str]]: return api_key, environment +def astradb_settings_from_dot_env() -> Tuple[str, Optional[str]]: + """ + Reads the Astradb API key and Environment from the .env file. + Returns: + Tuple[str, str]: The Astradb API key, the Astradb Environment + """ + + app_token, db_id, region, keyspace = None, None, None, None + with open(".env", "r") as f: + lines = f.readlines() + + for line in lines: + if line.startswith("ASTRADB_APP_TOKEN"): + parts = line.split("=")[1:] + app_token = "=".join(parts).strip().strip('"') + continue + + if line.startswith("ASTRADB_ID"): + parts = line.split("=")[1:] + db_id = "=".join(parts).strip().strip('"') + continue + + if line.startswith("ASTRADB_REGION"): + parts = line.split("=")[1:] + region = "=".join(parts).strip().strip('"') + continue + + if line.startswith("ASTRADB_KEYSPACE"): + parts = line.split("=")[1:] + keyspace = "=".join(parts).strip().strip('"') + continue + + assert app_token, "Astradb Application token not found in .env file" + assert db_id, "Astradb ID not found in .env file" + assert region, "Astradb Region not found in .env file" + assert keyspace, "Astradb Keyspace name not found in .env file" + + return app_token, db_id, region, keyspace + + def weaviate_settings_from_dot_env() -> Tuple[Optional[str], str]: """ Reads the Weaviate API key and URL from the .env file. @@ -198,6 +234,21 @@ def google_palm_settings_from_dot_env() -> str: return api_key +def azure_cosmos_db_settings_from_dot_env() -> Tuple[str, str]: + """ + Reads the Azure CosmosDB environment variables for the .env file. + Returns: + dict: The Azure CosmosDB environment variables + """ + config = dotenv_values(".env") + cosmos_api = config.get("AZCOSMOS_API") + cosmos_connstr = config.get("AZCOSMOS_CONNSTR") + + assert cosmos_connstr is not None, "Azure Cosmos Connection String not found in .env file" + + return cosmos_api, cosmos_connstr + + def redis_settings_from_dot_env() -> str: """Reads the Redis connection string from the .env file. @@ -207,8 +258,41 @@ def redis_settings_from_dot_env() -> str: config = dotenv_values(".env") connection_string = config.get("REDIS_CONNECTION_STRING", None) - assert ( - connection_string is not None - ), "Redis connection string not found in .env file" + assert connection_string is not None, "Redis connection string not found in .env file" return connection_string + + +def azure_aisearch_settings_from_dot_env( + include_index_name=False, +) -> Union[Tuple[str, str], Tuple[str, str, str]]: + """ + Reads the Azure AI Search environment variables for the .env file. + + Returns: + Tuple[str, str]: Azure AI Search API key, the Azure AI Search URL + """ + config = dotenv_values(".env") + api_key = config.get("AZURE_AISEARCH_API_KEY", None) + url = config.get("AZURE_AISEARCH_URL", None) + + assert url is not None, "Azure AI Search URL not found in .env file" + assert api_key is not None, "Azure AI Search API key not found in .env file" + + if not include_index_name: + return api_key, url + else: + index_name = config.get("AZURE_AISEARCH_INDEX_NAME", None) + assert index_name is not None, "Azure AI Search index name not found in .env file" + return api_key, url, index_name + + +def azure_aisearch_settings_from_dot_env_as_dict() -> Dict[str, str]: + """ + Reads the Azure AI Search environment variables including index name from the .env file. + + Returns: + Dict[str, str]: the Azure AI search environment variables + """ + api_key, url, index_name = azure_aisearch_settings_from_dot_env(include_index_name=True) + return {"key": api_key, "endpoint": url, "indexName": index_name} diff --git a/python/semantic_kernel/utils/validation.py b/python/semantic_kernel/utils/validation.py index a2807d61a68e..5a3590ec2e93 100644 --- a/python/semantic_kernel/utils/validation.py +++ b/python/semantic_kernel/utils/validation.py @@ -3,25 +3,29 @@ from re import match as re_match from typing import Optional +# Validation regexes +PLUGIN_NAME_REGEX = r"^[0-9A-Za-z_]*$" +FUNCTION_NAME_REGEX = r"^[0-9A-Za-z_]*$" +FUNCTION_PARAM_NAME_REGEX = r"^[0-9A-Za-z_]*$" -def validate_skill_name(value: Optional[str]) -> None: + +def validate_plugin_name(value: Optional[str]) -> None: """ - Validates that the skill name is valid. + Validates that the plugin name is valid. - Valid skill names are non-empty and + Valid plugin names are non-empty and match the regex: [0-9A-Za-z_]* - :param value: The skill name to validate. + :param value: The plugin name to validate. - :raises ValueError: If the skill name is invalid. + :raises ValueError: If the plugin name is invalid. """ if not value: - raise ValueError("The skill name cannot be `None` or empty") + raise ValueError("The plugin name cannot be `None` or empty") - SKILL_NAME_REGEX = r"^[0-9A-Za-z_]*$" - if not re_match(SKILL_NAME_REGEX, value): + if not re_match(PLUGIN_NAME_REGEX, value): raise ValueError( - f"Invalid skill name: {value}. Skill " + f"Invalid plugin name: {value}. Plugin " f"names may only contain ASCII letters, " f"digits, and underscores." ) @@ -41,7 +45,6 @@ def validate_function_name(value: Optional[str]) -> None: if not value: raise ValueError("The function name cannot be `None` or empty") - FUNCTION_NAME_REGEX = r"^[0-9A-Za-z_]*$" if not re_match(FUNCTION_NAME_REGEX, value): raise ValueError( f"Invalid function name: {value}. Function " @@ -64,7 +67,6 @@ def validate_function_param_name(value: Optional[str]) -> None: if not value: raise ValueError("The function parameter name cannot be `None` or empty") - FUNCTION_PARAM_NAME_REGEX = r"^[0-9A-Za-z_]*$" if not re_match(FUNCTION_PARAM_NAME_REGEX, value): raise ValueError( f"Invalid function parameter name: {value}. Function parameter " diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 25794c29e725..395b047b47fa 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import os import typing as t import warnings @@ -9,11 +11,10 @@ import semantic_kernel as sk from semantic_kernel.memory.null_memory import NullMemory from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.skill_definition.read_only_skill_collection import ( - ReadOnlySkillCollection, -) +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin +from semantic_kernel.plugin_definition.kernel_plugin_collection import KernelPluginCollection @pytest.fixture(autouse=True) @@ -54,7 +55,7 @@ def enable_debug_mode(): builtins.pr = snoop.pp -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def create_kernel(): kernel = sk.Kernel() return kernel @@ -87,22 +88,19 @@ def get_oai_config(): @pytest.fixture() -def context_factory() -> t.Callable[[ContextVariables], SKContext]: - """Return a factory for SKContext objects.""" - - def create_context( - context_variables: ContextVariables, *functions: SKFunction - ) -> SKContext: - """Return a SKContext object.""" - return SKContext( +def context_factory() -> t.Callable[[ContextVariables], KernelContext]: + """Return a factory for KernelContext objects.""" + + def create_context(context_variables: ContextVariables, *functions: KernelFunction) -> KernelContext: + """Return a KernelContext object.""" + + plugin = KernelPlugin(name="test_plugin", functions=functions) + + return KernelContext( context_variables, NullMemory(), - skill_collection=ReadOnlySkillCollection( - data={ - ReadOnlySkillCollection.GLOBAL_SKILL.lower(): { - f.name: f for f in functions - } - }, + plugins=KernelPluginCollection( + plugins=[plugin], ), ) diff --git a/python/tests/integration/completions/conftest.py b/python/tests/integration/completions/conftest.py index 7b0a48ceca49..72096efeb4a8 100644 --- a/python/tests/integration/completions/conftest.py +++ b/python/tests/integration/completions/conftest.py @@ -4,83 +4,10 @@ import pytest -import semantic_kernel.connectors.ai.hugging_face as sk_hf - if sys.version_info >= (3, 9): import semantic_kernel.connectors.ai.google_palm as sk_gp -@pytest.fixture( - scope="module", - params=[ - ("google/flan-t5-base", "text2text-generation"), - ("facebook/bart-large-cnn", "summarization"), - ], -) -def setup_hf_text_completion_function(create_kernel, request): - kernel = create_kernel - - # Configure LLM service - kernel.add_text_completion_service( - request.param[0], - sk_hf.HuggingFaceTextCompletion(request.param[0], task=request.param[1]), - ) - - # Define semantic function using SK prompt template language - sk_prompt = "Hello, I like {{$input}}{{$input2}}" - - # Create the semantic function - text2text_function = kernel.create_semantic_function( - sk_prompt, max_tokens=25, temperature=0.7, top_p=0.5 - ) - - # User input - simple_input = "sleeping and " - - yield kernel, text2text_function, simple_input - - -@pytest.fixture(scope="module") -def setup_summarize_function(create_kernel): - # User input (taken from https://en.wikipedia.org/wiki/Whale) - text_to_summarize = """ - Whales are fully aquatic, open-ocean animals: - they can feed, mate, give birth, suckle and raise their young at sea. - Whales range in size from the 2.6 metres (8.5 ft) and 135 kilograms (298 lb) - dwarf sperm whale to the 29.9 metres (98 ft) and 190 tonnes (210 short tons) blue whale, - which is the largest known animal that has ever lived. The sperm whale is the largest - toothed predator on Earth. Several whale species exhibit sexual dimorphism, - in that the females are larger than males. - """ - additional_text = """ - The word "whale" comes from the Old English hwæl, from Proto-Germanic *hwalaz, - from Proto-Indo-European *(s)kwal-o-, meaning "large sea fish".[3][4] - The Proto-Germanic *hwalaz is also the source of Old Saxon hwal, - Old Norse hvalr, hvalfiskr, Swedish val, Middle Dutch wal, walvisc, Dutch walvis, - Old High German wal, and German Wal.[3] Other archaic English forms include wal, - wale, whal, whalle, whaille, wheal, etc.[5] - """ - - # Define semantic function using SK prompt template language - sk_prompt = "{{$input}} {{$input2}}" - - kernel = create_kernel - - # Configure LLM service - kernel.add_text_completion_service( - "facebook/bart-large-cnn", - sk_hf.HuggingFaceTextCompletion( - "facebook/bart-large-cnn", task="summarization" - ), - ) - - # Create the semantic function - summarize_function = kernel.create_semantic_function( - sk_prompt, max_tokens=80, temperature=0, top_p=0.5 - ) - yield kernel, summarize_function, text_to_summarize, additional_text - - @pytest.fixture(scope="module") def setup_tldr_function_for_oai_models(create_kernel): kernel = create_kernel @@ -113,7 +40,7 @@ def setup_tldr_function_for_oai_models(create_kernel): @pytest.fixture(scope="module") -def setup_summarize_conversation_using_skill(create_kernel): +def setup_summarize_conversation_using_plugin(create_kernel): kernel = create_kernel ChatTranscript = """John: Hello, how are you? Jane: I'm fine, thanks. How are you? @@ -161,18 +88,14 @@ def setup_gp_text_completion_function(create_kernel, get_gp_config): kernel = create_kernel api_key = get_gp_config # Configure LLM service - palm_text_completion = sk_gp.GooglePalmTextCompletion( - "models/text-bison-001", api_key - ) + palm_text_completion = sk_gp.GooglePalmTextCompletion(ai_model_id="models/text-bison-001", api_key=api_key) kernel.add_text_completion_service("models/text-bison-001", palm_text_completion) # Define semantic function using SK prompt template language sk_prompt = "Hello, I like {{$input}}{{$input2}}" # Create the semantic function - text2text_function = kernel.create_semantic_function( - sk_prompt, max_tokens=25, temperature=0.7, top_p=0.5 - ) + text2text_function = kernel.create_semantic_function(sk_prompt, max_tokens=25, temperature=0.7, top_p=0.5) # User input simple_input = "sleeping and " diff --git a/python/tests/integration/completions/test_azure_oai_chat_service.py b/python/tests/integration/completions/test_azure_oai_chat_service.py index 3e93161efdd5..bd61707b8b4f 100644 --- a/python/tests/integration/completions/test_azure_oai_chat_service.py +++ b/python/tests/integration/completions/test_azure_oai_chat_service.py @@ -3,15 +3,14 @@ import os import pytest +from openai import AsyncAzureOpenAI from test_utils import retry import semantic_kernel.connectors.ai.open_ai as sk_oai @pytest.mark.asyncio -async def test_azure_e2e_chat_completion_with_skill( - setup_tldr_function_for_oai_models, get_aoai_config -): +async def test_azure_e2e_chat_completion_with_plugin(setup_tldr_function_for_oai_models, get_aoai_config): kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models _, api_key, endpoint = get_aoai_config @@ -28,27 +27,21 @@ async def test_azure_e2e_chat_completion_with_skill( # Configure LLM service kernel.add_chat_service( "chat_completion", - sk_oai.AzureChatCompletion(deployment_name, endpoint, api_key), + sk_oai.AzureChatCompletion(deployment_name=deployment_name, endpoint=endpoint, api_key=api_key), ) # Create the semantic function - tldr_function = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0, top_p=0.5 - ) + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) - summary = await retry( - lambda: kernel.run_async(tldr_function, input_str=text_to_summarize) - ) + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) output = str(summary).strip() print(f"TLDR using input string: '{output}'") - assert "First Law" not in output and ( - "human" in output or "Human" in output or "preserve" in output - ) + assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) assert len(output) < 100 @pytest.mark.asyncio -async def test_oai_chat_stream_service_with_skills( +async def test_azure_e2e_chat_completion_with_plugin_and_provided_client( setup_tldr_function_for_oai_models, get_aoai_config ): kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models @@ -64,27 +57,28 @@ async def test_oai_chat_stream_service_with_skills( print(f"* Endpoint: {endpoint}") print(f"* Deployment: {deployment_name}") + client = AsyncAzureOpenAI( + azure_endpoint=endpoint, + azure_deployment=deployment_name, + api_key=api_key, + api_version="2023-05-15", + default_headers={"Test-User-X-ID": "test"}, + ) + # Configure LLM service kernel.add_chat_service( "chat_completion", - sk_oai.AzureChatCompletion(deployment_name, endpoint, api_key), + sk_oai.AzureChatCompletion( + deployment_name=deployment_name, + async_client=client, + ), ) # Create the semantic function - tldr_function = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0, top_p=0.5 - ) - - result = [] - async for message in kernel.run_stream_async( - tldr_function, input_str=text_to_summarize - ): - result.append(message) - output = "".join(result).strip() + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) + output = str(summary).strip() print(f"TLDR using input string: '{output}'") - assert len(result) > 1 - assert "First Law" not in output and ( - "human" in output or "Human" in output or "preserve" in output - ) + assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) assert len(output) < 100 diff --git a/python/tests/integration/completions/test_azure_oai_chat_service_extensions.py b/python/tests/integration/completions/test_azure_oai_chat_service_extensions.py new file mode 100644 index 000000000000..96a6cea38dcd --- /dev/null +++ b/python/tests/integration/completions/test_azure_oai_chat_service_extensions.py @@ -0,0 +1,172 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +import time +from random import randint + +import numpy as np +import pytest + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.open_ai as sk_oai +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureAISearchDataSources, + AzureChatPromptExecutionSettings, + AzureDataSources, + ExtraBody, +) +from semantic_kernel.memory.memory_record import MemoryRecord + +try: + from semantic_kernel.connectors.memory.azure_cognitive_search.azure_cognitive_search_memory_store import ( + AzureCognitiveSearchMemoryStore, + ) + + if os.environ.get("AZURE_COGNITIVE_SEARCH_ENDPOINT") and os.environ.get("AZURE_COGNITIVE_SEARCH_ADMIN_KEY"): + azure_cognitive_search_installed = True + else: + azure_cognitive_search_installed = False +except ImportError: + azure_cognitive_search_installed = False + +pytestmark = pytest.mark.skipif( + not azure_cognitive_search_installed, + reason="Azure Cognitive Search is not installed", +) + + +@pytest.fixture(scope="function") +@pytest.mark.asyncio +async def create_memory_store(): + # Create an index and populate it with some data + collection = f"int-tests-chat-extensions-{randint(1000, 9999)}" + memory_store = AzureCognitiveSearchMemoryStore(vector_size=4) + await memory_store.create_collection(collection) + time.sleep(1) + try: + assert await memory_store.does_collection_exist(collection) + rec = MemoryRecord( + is_reference=False, + external_source_name=None, + id=None, + description="Emily and David's story.", + text="Emily and David, two passionate scientists, met during a research expedition to Antarctica. \ +Bonded by their love for the natural world and shared curiosity, they uncovered a \ +groundbreaking phenomenon in glaciology that could potentially reshape our understanding \ +of climate change.", + additional_metadata=None, + embedding=np.array([0.2, 0.1, 0.2, 0.7]), + ) + await memory_store.upsert(collection, rec) + time.sleep(1) + return collection, memory_store + except: + await memory_store.delete_collection(collection) + raise + + +@pytest.fixture(scope="function") +@pytest.mark.asyncio +async def create_with_data_chat_function(get_aoai_config, create_kernel, create_memory_store): + collection, memory_store = await create_memory_store + try: + deployment_name, api_key, endpoint = get_aoai_config + + if "Python_Integration_Tests" in os.environ: + deployment_name = os.environ["AzureOpenAIChat__DeploymentName"] + else: + deployment_name = "gpt-35-turbo" + + print("* Service: Azure OpenAI Chat Completion") + print(f"* Endpoint: {endpoint}") + print(f"* Deployment: {deployment_name}") + + kernel = create_kernel + + # Load Azure OpenAI with data settings + search_endpoint = os.getenv("AZURE_COGNITIVE_SEARCH_ENDPOINT") + search_api_key = os.getenv("AZURE_COGNITIVE_SEARCH_ADMIN_KEY") + + extra = ExtraBody( + data_sources=[ + AzureDataSources( + type="AzureCognitiveSearch", + parameters=AzureAISearchDataSources( + indexName=collection, + endpoint=search_endpoint, + key=search_api_key, + queryType="simple", + fieldsMapping={ + "titleField": "Description", + "contentFields": ["Text"], + }, + topNDocuments=1, + ), + ) + ] + ) + + chat_service = sk_oai.AzureChatCompletion( + deployment_name=deployment_name, + api_key=api_key, + endpoint=endpoint, + api_version="2023-12-01-preview", + use_extensions=True, + ) + kernel.add_chat_service("chat-gpt-extensions", chat_service) + + prompt_config = sk.PromptTemplateConfig( + execution_settings=AzureChatPromptExecutionSettings( + max_tokens=2000, + temperature=0.7, + top_p=0.8, + extra_body=extra, + ) + ) + prompt_config.default_services = ["chat-gpt-extensions"] + + prompt_template = sk.ChatPromptTemplate("{{$input}}", kernel.prompt_template_engine, prompt_config) + + function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) + chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) + return chat_function, kernel, collection, memory_store + except: + await memory_store.delete_collection(collection) + raise + + +@pytest.mark.asyncio +@pytestmark +async def test_azure_e2e_chat_completion_with_extensions( + create_with_data_chat_function, +): + # Create an index and populate it with some data + ( + chat_function, + kernel, + collection, + memory_store, + ) = await create_with_data_chat_function + + try: + result = None + async for message in kernel.run_stream(chat_function, input_str="who are Emily and David?"): + result = message[0] if not result else result + message[0] + print(message, end="") + + print(f"Answer using input string: '{result}'") + print(f"Tool message: {result.tool_message}") + assert result.tool_message is not None + assert "two passionate scientists" in result.tool_message + assert len(result.content) > 1 + + context = await kernel.run(chat_function, input_str="who are Emily and David?") + print(f"Answer using input string: '{context}'") + assert context.objects["results"][0].tool_message is not None + assert "two passionate scientists" in context.objects["results"][0].tool_message + assert len(context.result) > 1 + + await memory_store.delete_collection(collection) + except: + await memory_store.delete_collection(collection) + raise diff --git a/python/tests/integration/completions/test_azure_oai_text_service.py b/python/tests/integration/completions/test_azure_oai_text_service.py index ced9bf629df1..ca5df3ca92d3 100644 --- a/python/tests/integration/completions/test_azure_oai_text_service.py +++ b/python/tests/integration/completions/test_azure_oai_text_service.py @@ -3,15 +3,14 @@ import os import pytest +from openai import AsyncAzureOpenAI from test_utils import retry import semantic_kernel.connectors.ai.open_ai as sk_oai @pytest.mark.asyncio -async def test_azure_e2e_text_completion_with_skill( - setup_tldr_function_for_oai_models, get_aoai_config -): +async def test_azure_e2e_text_completion_with_plugin(setup_tldr_function_for_oai_models, get_aoai_config): kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models _, api_key, endpoint = get_aoai_config @@ -28,27 +27,25 @@ async def test_azure_e2e_text_completion_with_skill( # Configure LLM service kernel.add_text_completion_service( "text_completion", - sk_oai.AzureTextCompletion(deployment_name, endpoint, api_key), + sk_oai.AzureTextCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + ), ) # Create the semantic function - tldr_function = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0, top_p=0.5 - ) + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) - summary = await retry( - lambda: kernel.run_async(tldr_function, input_str=text_to_summarize) - ) + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) output = str(summary).strip() print(f"TLDR using input string: '{output}'") - assert "First Law" not in output and ( - "human" in output or "Human" in output or "preserve" in output - ) + assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) assert len(output) < 100 @pytest.mark.asyncio -async def test_oai_text_stream_completion_with_skills( +async def test_azure_e2e_text_completion_with_plugin_with_provided_client( setup_tldr_function_for_oai_models, get_aoai_config ): kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models @@ -64,27 +61,28 @@ async def test_oai_text_stream_completion_with_skills( print(f"* Endpoint: {endpoint}") print(f"* Deployment: {deployment_name}") + client = AsyncAzureOpenAI( + azure_endpoint=endpoint, + azure_deployment=deployment_name, + api_key=api_key, + api_version="2023-05-15", + default_headers={"Test-User-X-ID": "test"}, + ) + # Configure LLM service kernel.add_text_completion_service( "text_completion", - sk_oai.AzureTextCompletion(deployment_name, endpoint, api_key), + sk_oai.AzureTextCompletion( + deployment_name=deployment_name, + async_client=client, + ), ) # Create the semantic function - tldr_function = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0, top_p=0.5 - ) - - result = [] - async for message in kernel.run_stream_async( - tldr_function, input_str=text_to_summarize - ): - result.append(message) - output = "".join(result).strip() + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) + output = str(summary).strip() print(f"TLDR using input string: '{output}'") - assert len(result) > 1 - assert "First Law" not in output and ( - "human" in output or "Human" in output or "preserve" in output - ) + assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) assert len(output) < 100 diff --git a/python/tests/integration/completions/test_conversation_summary_plugin.py b/python/tests/integration/completions/test_conversation_summary_plugin.py new file mode 100644 index 000000000000..ac5b055f51ba --- /dev/null +++ b/python/tests/integration/completions/test_conversation_summary_plugin.py @@ -0,0 +1,77 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os + +import pytest +from test_utils import retry + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.open_ai as sk_oai +from semantic_kernel.core_plugins.conversation_summary_plugin import ( + ConversationSummaryPlugin, +) + + +@pytest.mark.asyncio +async def test_azure_summarize_conversation_using_plugin(setup_summarize_conversation_using_plugin, get_aoai_config): + kernel, chatTranscript = setup_summarize_conversation_using_plugin + + if "Python_Integration_Tests" in os.environ: + deployment_name = os.environ["AzureOpenAI__DeploymentName"] + api_key = os.environ["AzureOpenAI__ApiKey"] + endpoint = os.environ["AzureOpenAI__Endpoint"] + else: + # Load credentials from .env file + deployment_name, api_key, endpoint = get_aoai_config + deployment_name = "gpt-35-turbo-instruct" + + kernel.add_text_completion_service( + "text_completion", + sk_oai.AzureTextCompletion(deployment_name=deployment_name, endpoint=endpoint, api_key=api_key), + ) + + conversationSummaryPlugin = kernel.import_plugin(ConversationSummaryPlugin(kernel), "conversationSummary") + + summary = await retry( + lambda: kernel.run(conversationSummaryPlugin["SummarizeConversation"], input_str=chatTranscript) + ) + + output = str(summary).strip().lower() + print(output) + assert "john" in output and "jane" in output + assert len(output) < len(chatTranscript) + + +@pytest.mark.asyncio +@pytest.mark.xfail(reason="This test fails intermittently when run in parallel with other tests") +async def test_oai_summarize_conversation_using_plugin( + setup_summarize_conversation_using_plugin, +): + _, chatTranscript = setup_summarize_conversation_using_plugin + + # Defining a new kernel here to avoid using the same kernel as the previous test + # which causes failures. + kernel = sk.Kernel() + + if "Python_Integration_Tests" in os.environ: + api_key = os.environ["OpenAI__ApiKey"] + org_id = None + else: + # Load credentials from .env file + api_key, org_id = sk.openai_settings_from_dot_env() + + kernel.add_text_completion_service( + "davinci-003", + sk_oai.OpenAITextCompletion("gpt-3.5-turbo-instruct", api_key, org_id=org_id), + ) + + conversationSummaryPlugin = kernel.import_plugin(ConversationSummaryPlugin(kernel), "conversationSummary") + + summary = await retry( + lambda: kernel.run(conversationSummaryPlugin["SummarizeConversation"], input_str=chatTranscript) + ) + + output = str(summary).strip().lower() + print(output) + assert "john" in output and "jane" in output + assert len(output) < len(chatTranscript) diff --git a/python/tests/integration/completions/test_conversation_summary_skill.py b/python/tests/integration/completions/test_conversation_summary_skill.py deleted file mode 100644 index 5352ab2847ee..000000000000 --- a/python/tests/integration/completions/test_conversation_summary_skill.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os - -import pytest -from test_utils import retry - -import semantic_kernel as sk -import semantic_kernel.connectors.ai.open_ai as sk_oai -from semantic_kernel.core_skills.conversation_summary_skill import ( - ConversationSummarySkill, -) - - -@pytest.mark.asyncio -async def test_azure_summarize_conversation_using_skill( - setup_summarize_conversation_using_skill, get_aoai_config -): - kernel, chatTranscript = setup_summarize_conversation_using_skill - - if "Python_Integration_Tests" in os.environ: - deployment_name = os.environ["AzureOpenAI__DeploymentName"] - api_key = os.environ["AzureOpenAI__ApiKey"] - endpoint = os.environ["AzureOpenAI__Endpoint"] - else: - # Load credentials from .env file - deployment_name, api_key, endpoint = get_aoai_config - deployment_name = "text-davinci-003" - - kernel.add_text_completion_service( - "text_completion", - sk_oai.AzureTextCompletion(deployment_name, endpoint, api_key), - ) - - conversationSummarySkill = kernel.import_skill( - ConversationSummarySkill(kernel), "conversationSummary" - ) - - summary = await retry( - lambda: kernel.run_async( - conversationSummarySkill["SummarizeConversation"], input_str=chatTranscript - ) - ) - - output = str(summary).strip().lower() - print(output) - assert "john" in output and "jane" in output - assert len(output) < len(chatTranscript) - - -@pytest.mark.asyncio -async def test_oai_summarize_conversation_using_skill( - setup_summarize_conversation_using_skill, -): - kernel, chatTranscript = setup_summarize_conversation_using_skill - - if "Python_Integration_Tests" in os.environ: - api_key = os.environ["OpenAI__ApiKey"] - org_id = None - else: - # Load credentials from .env file - api_key, org_id = sk.openai_settings_from_dot_env() - - kernel.add_text_completion_service( - "davinci-003", - sk_oai.OpenAITextCompletion("text-davinci-003", api_key, org_id=org_id), - ) - - conversationSummarySkill = kernel.import_skill( - ConversationSummarySkill(kernel), "conversationSummary" - ) - - summary = await retry( - lambda: kernel.run_async( - conversationSummarySkill["SummarizeConversation"], input_str=chatTranscript - ) - ) - - output = str(summary).strip().lower() - print(output) - assert "john" in output and "jane" in output - assert len(output) < len(chatTranscript) diff --git a/python/tests/integration/completions/test_gp_chat_service.py b/python/tests/integration/completions/test_gp_chat_service.py index e4701e3eed95..976928680f2d 100644 --- a/python/tests/integration/completions/test_gp_chat_service.py +++ b/python/tests/integration/completions/test_gp_chat_service.py @@ -10,9 +10,7 @@ import semantic_kernel.connectors.ai.google_palm as sk_gp pytestmark = [ - pytest.mark.skipif( - sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater" - ), + pytest.mark.skipif(sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater"), pytest.mark.skipif( "Python_Integration_Tests" in os.environ, reason="Google Palm integration tests are only set up to run locally", @@ -21,36 +19,28 @@ @pytest.mark.asyncio -async def test_gp_chat_service_with_skills( - setup_tldr_function_for_oai_models, get_gp_config -): +async def test_gp_chat_service_with_plugins(setup_tldr_function_for_oai_models, get_gp_config): kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models api_key = get_gp_config print("* Service: Google PaLM Chat Completion") print("* Model: chat-bison-001") - palm_chat_completion = sk_gp.GooglePalmChatCompletion( - "models/chat-bison-001", api_key - ) + palm_chat_completion = sk_gp.GooglePalmChatCompletion(ai_model_id="models/chat-bison-001", api_key=api_key) kernel.add_chat_service("models/chat-bison-001", palm_chat_completion) # Create the semantic function - tldr_function = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0, top_p=0.5 - ) + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) max_retries = 5 # Adjust the number of retries as per your requirement retry_delay = 2 # Adjust the delay (in seconds) between retries for _ in range(max_retries): try: - summary = await kernel.run_async(tldr_function, input_str=text_to_summarize) + summary = await kernel.run(tldr_function, input_str=text_to_summarize) output = str(summary).strip() print(f"TLDR using input string: '{output}'") - assert "First Law" not in output and ( - "human" in output or "Human" in output or "preserve" in output - ) - assert len(output) < 100 + # assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) + assert len(output) > 0 break except Exception as e: print(f"Error occurred: {e}") diff --git a/python/tests/integration/completions/test_gp_text_service.py b/python/tests/integration/completions/test_gp_text_service.py index 125f771fb20b..9b27ca222542 100644 --- a/python/tests/integration/completions/test_gp_text_service.py +++ b/python/tests/integration/completions/test_gp_text_service.py @@ -8,9 +8,7 @@ import semantic_kernel as sk pytestmark = [ - pytest.mark.skipif( - sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater" - ), + pytest.mark.skipif(sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater"), pytest.mark.skipif( "Python_Integration_Tests" in os.environ, reason="Google Palm integration tests are only set up to run locally", @@ -23,7 +21,7 @@ async def test_text2text_generation_input_str(setup_gp_text_completion_function) kernel, text2text_function, simple_input = setup_gp_text_completion_function # Complete input string and print - summary = await kernel.run_async(text2text_function, input_str=simple_input) + summary = await kernel.run(text2text_function, input_str=simple_input) output = str(summary).strip() print(f"Completion using input string: '{output}'") @@ -36,7 +34,7 @@ async def test_text2text_generation_input_vars(setup_gp_text_completion_function # Complete input as context variable and print context_vars = sk.ContextVariables(simple_input) - summary = await kernel.run_async(text2text_function, input_vars=context_vars) + summary = await kernel.run(text2text_function, input_vars=context_vars) output = str(summary).strip() print(f"Completion using context variables: '{output}'") @@ -50,7 +48,7 @@ async def test_text2text_generation_input_context(setup_gp_text_completion_funct # Complete input context and print context = kernel.create_new_context() context["input"] = simple_input - summary = await kernel.run_async(text2text_function, input_context=context) + summary = await kernel.run(text2text_function, input_context=context) output = str(summary).strip() print(f"Completion using input context: '{output}'") @@ -67,9 +65,7 @@ async def test_text2text_generation_input_context_with_vars( context = kernel.create_new_context() context["input"] = simple_input context_vars = sk.ContextVariables("running and") - summary = await kernel.run_async( - text2text_function, input_context=context, input_vars=context_vars - ) + summary = await kernel.run(text2text_function, input_context=context, input_vars=context_vars) output = str(summary).strip() print(f"Completion using context and additional variables: '{output}'") @@ -85,9 +81,7 @@ async def test_text2text_generation_input_context_with_str( # Complete input context with additional input string and print context = kernel.create_new_context() context["input"] = simple_input - summary = await kernel.run_async( - text2text_function, input_context=context, input_str="running and" - ) + summary = await kernel.run(text2text_function, input_context=context, input_str="running and") output = str(summary).strip() print(f"Completion using context and additional string: '{output}'") @@ -104,7 +98,7 @@ async def test_text2text_generation_input_context_with_vars_and_str( context = kernel.create_new_context() context["input"] = simple_input context_vars = sk.ContextVariables(variables={"input2": "running and"}) - summary = await kernel.run_async( + summary = await kernel.run( text2text_function, input_context=context, input_vars=context_vars, @@ -112,7 +106,5 @@ async def test_text2text_generation_input_context_with_vars_and_str( ) output = str(summary).strip() - print( - f"Completion using context, additional variables, and additional string: '{output}'" - ) + print(f"Completion using context, additional variables, and additional string: '{output}'") assert len(output) > 0 diff --git a/python/tests/integration/completions/test_hf_local_text_completions.py b/python/tests/integration/completions/test_hf_local_text_completions.py deleted file mode 100644 index f5af64418518..000000000000 --- a/python/tests/integration/completions/test_hf_local_text_completions.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest -from transformers import AutoTokenizer - -import semantic_kernel as sk -import semantic_kernel.connectors.ai.hugging_face as sk_hf - - -@pytest.mark.asyncio -async def test_text2text_generation_input_str(setup_hf_text_completion_function): - kernel, text2text_function, simple_input = setup_hf_text_completion_function - - # Complete input string and print - summary = await kernel.run_async(text2text_function, input_str=simple_input) - - output = str(summary).strip() - print(f"Completion using input string: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_text2text_generation_input_vars(setup_hf_text_completion_function): - kernel, text2text_function, simple_input = setup_hf_text_completion_function - - # Complete input as context variable and print - context_vars = sk.ContextVariables(simple_input) - summary = await kernel.run_async(text2text_function, input_vars=context_vars) - - output = str(summary).strip() - print(f"Completion using context variables: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_text2text_generation_input_context(setup_hf_text_completion_function): - kernel, text2text_function, simple_input = setup_hf_text_completion_function - - # Complete input context and print - context = kernel.create_new_context() - context["input"] = simple_input - summary = await kernel.run_async(text2text_function, input_context=context) - - output = str(summary).strip() - print(f"Completion using input context: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_text2text_generation_input_context_with_vars( - setup_hf_text_completion_function, -): - kernel, text2text_function, simple_input = setup_hf_text_completion_function - - # Complete input context with additional variables and print - context = kernel.create_new_context() - context["input"] = simple_input - context_vars = sk.ContextVariables("running and") - summary = await kernel.run_async( - text2text_function, input_context=context, input_vars=context_vars - ) - - output = str(summary).strip() - print(f"Completion using context and additional variables: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_text2text_generation_input_context_with_str( - setup_hf_text_completion_function, -): - kernel, text2text_function, simple_input = setup_hf_text_completion_function - - # Complete input context with additional input string and print - context = kernel.create_new_context() - context["input"] = simple_input - summary = await kernel.run_async( - text2text_function, input_context=context, input_str="running and" - ) - - output = str(summary).strip() - print(f"Completion using context and additional string: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_text2text_generation_input_context_with_vars_and_str( - setup_hf_text_completion_function, -): - kernel, text2text_function, simple_input = setup_hf_text_completion_function - - # Complete input context with additional variables and string and print - context = kernel.create_new_context() - context["input"] = simple_input - context_vars = sk.ContextVariables(variables={"input2": "running and"}) - summary = await kernel.run_async( - text2text_function, - input_context=context, - input_vars=context_vars, - input_str="new text", - ) - - output = str(summary).strip() - print( - f"Completion using context, additional variables, and additional string: '{output}'" - ) - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_text_generation_with_kwargs(): - simple_input = "sleeping and " - model_name = "google/flan-t5-base" - - tokenizer = AutoTokenizer.from_pretrained( - pretrained_model_name_or_path=model_name, trust_remote_code=True - ) - - hf_model = sk_hf.HuggingFaceTextCompletion( - model_name, - task="text2text-generation", - model_kwargs={"repetition_penalty": 0.2}, - pipeline_kwargs={"tokenizer": tokenizer, "trust_remote_code": True}, - ) - - kernel = sk.Kernel() - - # Configure LLM service - kernel.add_text_completion_service("hf-local", hf_model) - - # Define semantic function using SK prompt template language - sk_prompt = "Hello, I like {{$input}}{{$input2}}" - text2text_function = kernel.create_semantic_function( - sk_prompt, max_tokens=25, temperature=0.2, top_p=0.5 - ) - - # Complete input context with additional variables and string and print - context = kernel.create_new_context() - context["input"] = simple_input - context_vars = sk.ContextVariables(variables={"input2": "running and"}) - summary = await kernel.run_async( - text2text_function, - input_context=context, - input_vars=context_vars, - input_str="new text", - ) - - output = str(summary).strip() - print( - f"Completion using context, additional variables, and additional string: '{output}'" - ) - assert len(output) > 0 diff --git a/python/tests/integration/completions/test_hf_local_text_summarization.py b/python/tests/integration/completions/test_hf_local_text_summarization.py deleted file mode 100644 index c53dade6a54f..000000000000 --- a/python/tests/integration/completions/test_hf_local_text_summarization.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest - -import semantic_kernel as sk - - -@pytest.mark.asyncio -async def test_summarize_input_str(setup_summarize_function): - ( - kernel, - summarize_function, - text_to_summarize, - additional_text, - ) = setup_summarize_function - - # Summarize input string and print - summary = await kernel.run_async(summarize_function, input_str=text_to_summarize) - - output = str(summary).strip() - print(f"Summary using input string: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_summarize_input_vars(setup_summarize_function): - ( - kernel, - summarize_function, - text_to_summarize, - additional_text, - ) = setup_summarize_function - - # Summarize input as context variable and print - context_vars = sk.ContextVariables(text_to_summarize) - summary = await kernel.run_async(summarize_function, input_vars=context_vars) - - output = str(summary).strip() - print(f"Summary using context variables: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_summarize_input_context(setup_summarize_function): - ( - kernel, - summarize_function, - text_to_summarize, - additional_text, - ) = setup_summarize_function - - # Summarize input context and print - context = kernel.create_new_context() - context["input"] = text_to_summarize - summary = await kernel.run_async(summarize_function, input_context=context) - - output = str(summary).strip() - print(f"Summary using input context: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_summarize_input_context_with_vars(setup_summarize_function): - ( - kernel, - summarize_function, - text_to_summarize, - additional_text, - ) = setup_summarize_function - - # Summarize input context with additional variables and print - context = kernel.create_new_context() - context["input"] = text_to_summarize - context_vars = sk.ContextVariables(additional_text) - summary = await kernel.run_async( - summarize_function, input_context=context, input_vars=context_vars - ) - - output = str(summary).strip() - print(f"Summary using context and additional variables: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_summarize_input_context_with_str(setup_summarize_function): - ( - kernel, - summarize_function, - text_to_summarize, - additional_text, - ) = setup_summarize_function - - # Summarize input context with additional input string and print - context = kernel.create_new_context() - context["input"] = text_to_summarize - summary = await kernel.run_async( - summarize_function, input_context=context, input_str=additional_text - ) - - output = str(summary).strip() - print(f"Summary using context and additional string: '{output}'") - assert len(output) > 0 - - -@pytest.mark.asyncio -async def test_summarize_input_context_with_vars_and_str(setup_summarize_function): - ( - kernel, - summarize_function, - text_to_summarize, - additional_text, - ) = setup_summarize_function - - # Summarize input context with additional variables and string and print - context = kernel.create_new_context() - context["input"] = text_to_summarize - context_vars = sk.ContextVariables(variables={"input2": additional_text}) - summary = await kernel.run_async( - summarize_function, - input_context=context, - input_vars=context_vars, - input_str="new text", - ) - - output = str(summary).strip() - print( - f"Summary using context, additional variables, and additional string: '{output}'" - ) - assert len(output) > 0 diff --git a/python/tests/integration/completions/test_oai_chat_service.py b/python/tests/integration/completions/test_oai_chat_service.py index df7d4e61e5b8..db0a46fa5385 100644 --- a/python/tests/integration/completions/test_oai_chat_service.py +++ b/python/tests/integration/completions/test_oai_chat_service.py @@ -1,15 +1,15 @@ # Copyright (c) Microsoft. All rights reserved. +import os import pytest +from openai import AsyncOpenAI from test_utils import retry import semantic_kernel.connectors.ai.open_ai as sk_oai @pytest.mark.asyncio -async def test_oai_chat_service_with_skills( - setup_tldr_function_for_oai_models, get_oai_config -): +async def test_oai_chat_service_with_plugins(setup_tldr_function_for_oai_models, get_oai_config): kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models api_key, org_id = get_oai_config @@ -19,20 +19,82 @@ async def test_oai_chat_service_with_skills( print("* Model: gpt-3.5-turbo") kernel.add_chat_service( - "chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) + "chat-gpt", + sk_oai.OpenAIChatCompletion(ai_model_id="gpt-3.5-turbo", api_key=api_key, org_id=org_id), ) # Create the semantic function - tldr_function = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0, top_p=0.5 + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) + output = str(summary).strip() + print(f"TLDR using input string: '{output}'") + assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) + assert len(output) < 100 + + +@pytest.mark.asyncio +async def test_oai_chat_service_with_plugins_with_provided_client(setup_tldr_function_for_oai_models, get_oai_config): + kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models + + api_key, org_id = get_oai_config + + print("* Service: OpenAI Chat Completion") + print("* Endpoint: OpenAI") + print("* Model: gpt-3.5-turbo") + + client = AsyncOpenAI( + api_key=api_key, + organization=org_id, ) - summary = await retry( - lambda: kernel.run_async(tldr_function, input_str=text_to_summarize) + kernel.add_chat_service( + "chat-gpt", + sk_oai.OpenAIChatCompletion( + ai_model_id="gpt-3.5-turbo", + async_client=client, + ), ) + + # Create the semantic function + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) output = str(summary).strip() print(f"TLDR using input string: '{output}'") - assert "First Law" not in output and ( - "human" in output or "Human" in output or "preserve" in output - ) + assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) assert len(output) < 100 + + +@pytest.mark.asyncio +async def test_oai_chat_stream_service_with_plugins(setup_tldr_function_for_oai_models, get_aoai_config): + kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models + + _, api_key, endpoint = get_aoai_config + + if "Python_Integration_Tests" in os.environ: + deployment_name = os.environ["AzureOpenAIChat__DeploymentName"] + else: + deployment_name = "gpt-35-turbo" + + print("* Service: Azure OpenAI Chat Completion") + print(f"* Endpoint: {endpoint}") + print(f"* Deployment: {deployment_name}") + + # Configure LLM service + kernel.add_chat_service( + "chat_completion", + sk_oai.AzureChatCompletion(deployment_name=deployment_name, endpoint=endpoint, api_key=api_key), + ) + + # Create the semantic function + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + + result = None + async for message in kernel.run_stream(tldr_function, input_str=text_to_summarize): + result = message[0] if not result else result + message[0] + output = str(result) + + print(f"TLDR using input string: '{output}'") + # assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) + assert 0 < len(output) < 100 diff --git a/python/tests/integration/completions/test_oai_text_service.py b/python/tests/integration/completions/test_oai_text_service.py index 44535c534988..6f8e117e8987 100644 --- a/python/tests/integration/completions/test_oai_text_service.py +++ b/python/tests/integration/completions/test_oai_text_service.py @@ -1,39 +1,106 @@ # Copyright (c) Microsoft. All rights reserved. +import os import pytest +from openai import AsyncOpenAI from test_utils import retry import semantic_kernel.connectors.ai.open_ai as sk_oai @pytest.mark.asyncio -async def test_oai_text_completion_with_skills( - setup_tldr_function_for_oai_models, get_oai_config -): +async def test_oai_text_completion_with_plugins(setup_tldr_function_for_oai_models, get_oai_config): kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models api_key, org_id = get_oai_config print("* Service: OpenAI Text Completion") print("* Endpoint: OpenAI") - print("* Model: text-davinci-003") + print("* Model: gpt-3.5-turbo-instruct") - kernel.add_chat_service( - "davinci-003", - sk_oai.OpenAITextCompletion("text-davinci-003", api_key, org_id=org_id), + kernel.add_text_completion_service( + "text-completion", + sk_oai.OpenAITextCompletion(ai_model_id="gpt-3.5-turbo-instruct", api_key=api_key, org_id=org_id), ) # Create the semantic function - tldr_function = kernel.create_semantic_function( - sk_prompt, max_tokens=200, temperature=0, top_p=0.5 + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) + output = str(summary).strip() + print(f"TLDR using input string: '{output}'") + # assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) + assert 0 < len(output) < 100 + + +@pytest.mark.asyncio +async def test_oai_text_completion_with_plugins_with_provided_client( + setup_tldr_function_for_oai_models, get_oai_config +): + kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models + + api_key, org_id = get_oai_config + + print("* Service: OpenAI Text Completion") + print("* Endpoint: OpenAI") + print("* Model: gpt-3.5-turbo-instruct") + + client = AsyncOpenAI( + api_key=api_key, + organization=org_id, ) - summary = await retry( - lambda: kernel.run_async(tldr_function, input_str=text_to_summarize) + kernel.add_text_completion_service( + "text-completion", + sk_oai.OpenAITextCompletion( + ai_model_id="gpt-3.5-turbo-instruct", + async_client=client, + ), ) + + # Create the semantic function + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + + summary = await retry(lambda: kernel.run(tldr_function, input_str=text_to_summarize)) output = str(summary).strip() print(f"TLDR using input string: '{output}'") - assert "First Law" not in output and ( - "human" in output or "Human" in output or "preserve" in output + # assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) + assert 0 < len(output) < 100 + + +@pytest.mark.asyncio +async def test_oai_text_stream_completion_with_plugins(setup_tldr_function_for_oai_models, get_aoai_config): + kernel, sk_prompt, text_to_summarize = setup_tldr_function_for_oai_models + + _, api_key, endpoint = get_aoai_config + + if "Python_Integration_Tests" in os.environ: + deployment_name = os.environ["AzureOpenAI__DeploymentName"] + else: + deployment_name = "gpt-3.5-turbo-instruct" + + print("* Service: Azure OpenAI Text Completion") + print(f"* Endpoint: {endpoint}") + print(f"* Deployment: {deployment_name}") + + # Configure LLM service + kernel.add_text_completion_service( + "text_completion", + sk_oai.AzureTextCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + ), ) - assert len(output) < 100 + + # Create the semantic function + tldr_function = kernel.create_semantic_function(sk_prompt, max_tokens=200, temperature=0, top_p=0.5) + + result = None + async for message in kernel.run_stream(tldr_function, input_str=text_to_summarize): + result = message[0] if not result else result + message[0] + output = str(result) + + print(f"TLDR using input string: '{output}'") + # assert "First Law" not in output and ("human" in output or "Human" in output or "preserve" in output) + assert 0 < len(output) < 100 diff --git a/python/tests/integration/connectors/memory/test_astradb.py b/python/tests/integration/connectors/memory/test_astradb.py new file mode 100644 index 000000000000..8816a0301c09 --- /dev/null +++ b/python/tests/integration/connectors/memory/test_astradb.py @@ -0,0 +1,266 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +import time + +import numpy as np +import pytest + +import semantic_kernel as sk +from semantic_kernel.connectors.memory.astradb import AstraDBMemoryStore +from semantic_kernel.memory.memory_record import MemoryRecord + +astradb_installed: bool +try: + if os.environ["ASTRADB_INTEGRATION_TEST"]: + astradb_installed = True +except KeyError: + astradb_installed = False + + +pytestmark = pytest.mark.skipif(not astradb_installed, reason="astradb is not installed") + + +async def retry(func, retries=1): + for i in range(retries): + try: + return await func() + except Exception as e: + print(e) + time.sleep(i * 2) + + +@pytest.fixture(autouse=True, scope="module") +def slow_down_tests(): + yield + time.sleep(3) + + +@pytest.fixture(scope="session") +def get_astradb_config(): + if "Python_Integration_Tests" in os.environ: + app_token = os.environ["ASTRADB_APP_TOKEN"] + db_id = os.environ["ASTRADB_ID"] + region = os.environ["ASTRADB_REGION"] + keyspace = os.environ["ASTRADB_KEYSPACE"] + else: + # Load credentials from .env file + app_token, db_id, region, keyspace = sk.astradb_settings_from_dot_env() + + return app_token, db_id, region, keyspace + + +@pytest.fixture +def memory_record1(): + return MemoryRecord( + id="test_id1", + text="sample text1", + is_reference=False, + embedding=np.array([0.5, 0.5]), + description="description", + additional_metadata="additional metadata", + external_source_name="external source", + timestamp="timestamp", + ) + + +@pytest.fixture +def memory_record2(): + return MemoryRecord( + id="test_id2", + text="sample text2", + is_reference=False, + embedding=np.array([0.25, 0.75]), + description="description", + additional_metadata="additional metadata", + external_source_name="external source", + timestamp="timestamp", + ) + + +@pytest.fixture +def memory_record3(): + return MemoryRecord( + id="test_id3", + text="sample text3", + is_reference=False, + embedding=np.array([0.25, 0.80]), + description="description", + additional_metadata="additional metadata", + external_source_name="external source", + timestamp="timestamp", + ) + + +@pytest.mark.asyncio +async def test_constructor(get_astradb_config): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + result = await retry(lambda: memory.get_collections_async()) + + assert result is not None + + +@pytest.mark.asyncio +async def test_create_and_get_collection_async(get_astradb_config): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + result = await retry(lambda: memory.does_collection_exist_async("test_collection")) + assert result is not None + assert result is True + + +@pytest.mark.asyncio +async def test_get_collections_async(get_astradb_config): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + result = await retry(lambda: memory.get_collections_async()) + assert "test_collection" in result + + +@pytest.mark.asyncio +async def test_delete_collection_async(get_astradb_config): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + await retry(lambda: memory.delete_collection_async("test_collection")) + result = await retry(lambda: memory.get_collections_async()) + assert "test_collection" not in result + + +@pytest.mark.asyncio +async def test_does_collection_exist_async(get_astradb_config): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + result = await retry(lambda: memory.does_collection_exist_async("test_collection")) + assert result is True + + +@pytest.mark.asyncio +async def test_upsert_async_and_get_async(get_astradb_config, memory_record1): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + await retry(lambda: memory.upsert_async("test_collection", memory_record1)) + + result = await retry( + lambda: memory.get_async( + "test_collection", + memory_record1._id, + with_embedding=True, + ) + ) + + assert result is not None + assert result._id == memory_record1._id + assert result._description == memory_record1._description + assert result._text == memory_record1._text + assert result.embedding is not None + + +@pytest.mark.asyncio +async def test_upsert_batch_async_and_get_batch_async(get_astradb_config, memory_record1, memory_record2): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + await retry(lambda: memory.upsert_batch_async("test_collection", [memory_record1, memory_record2])) + + results = await retry( + lambda: memory.get_batch_async( + "test_collection", + [memory_record1._id, memory_record2._id], + with_embeddings=True, + ) + ) + + assert len(results) >= 2 + assert results[0]._id in [memory_record1._id, memory_record2._id] + assert results[1]._id in [memory_record1._id, memory_record2._id] + + +@pytest.mark.asyncio +async def test_remove_async(get_astradb_config, memory_record1): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + await retry(lambda: memory.upsert_async("test_collection", memory_record1)) + await retry(lambda: memory.remove_async("test_collection", memory_record1._id)) + + with pytest.raises(KeyError): + _ = await memory.get_async("test_collection", memory_record1._id, with_embedding=True) + + +@pytest.mark.asyncio +async def test_remove_batch_async(get_astradb_config, memory_record1, memory_record2): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + await retry(lambda: memory.upsert_batch_async("test_collection", [memory_record1, memory_record2])) + await retry(lambda: memory.remove_batch_async("test_collection", [memory_record1._id, memory_record2._id])) + + with pytest.raises(KeyError): + _ = await memory.get_async("test_collection", memory_record1._id, with_embedding=True) + + with pytest.raises(KeyError): + _ = await memory.get_async("test_collection", memory_record2._id, with_embedding=True) + + +@pytest.mark.asyncio +async def test_get_nearest_match_async(get_astradb_config, memory_record1, memory_record2): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + await retry(lambda: memory.upsert_batch_async("test_collection", [memory_record1, memory_record2])) + + test_embedding = memory_record1.embedding + test_embedding[0] = test_embedding[0] + 0.01 + + result = await retry( + lambda: memory.get_nearest_match_async( + "test_collection", + test_embedding, + min_relevance_score=0.0, + with_embedding=True, + ) + ) + + assert result is not None + assert result[0]._id == memory_record1._id + + +@pytest.mark.asyncio +async def test_get_nearest_matches_async(get_astradb_config, memory_record1, memory_record2, memory_record3): + app_token, db_id, region, keyspace = get_astradb_config + memory = AstraDBMemoryStore(app_token, db_id, region, keyspace, 2, "cosine") + + await retry(lambda: memory.create_collection_async("test_collection")) + await retry(lambda: memory.upsert_batch_async("test_collection", [memory_record1, memory_record2, memory_record3])) + + test_embedding = memory_record2.embedding + test_embedding[0] = test_embedding[0] + 0.025 + + result = await retry( + lambda: memory.get_nearest_matches_async( + "test_collection", + test_embedding, + limit=2, + min_relevance_score=0.0, + with_embeddings=True, + ) + ) + + assert len(result) == 2 + assert result[0][0]._id in [memory_record3._id, memory_record2._id] + assert result[1][0]._id in [memory_record3._id, memory_record2._id] diff --git a/python/tests/integration/connectors/memory/test_azure_cognitive_search.py b/python/tests/integration/connectors/memory/test_azure_cognitive_search.py index 8146fbcdcaac..8df772469a24 100644 --- a/python/tests/integration/connectors/memory/test_azure_cognitive_search.py +++ b/python/tests/integration/connectors/memory/test_azure_cognitive_search.py @@ -25,9 +25,7 @@ @pytest.mark.asyncio async def test_constructor(): test_endpoint = "https://test-endpoint.search.windows.net" - async with AzureCognitiveSearchMemoryStore( - vector_size=4, search_endpoint=test_endpoint - ) as memory_store: + async with AzureCognitiveSearchMemoryStore(vector_size=4, search_endpoint=test_endpoint) as memory_store: assert memory_store is not None assert memory_store._search_index_client is not None @@ -36,30 +34,30 @@ async def test_constructor(): async def test_collections(): collection = f"int-tests-{randint(1000, 9999)}" async with AzureCognitiveSearchMemoryStore(vector_size=4) as memory_store: - await memory_store.create_collection_async(collection) + await memory_store.create_collection(collection) time.sleep(1) try: - assert await memory_store.does_collection_exist_async(collection) + assert await memory_store.does_collection_exist(collection) except: - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) raise - many = await memory_store.get_collections_async() + many = await memory_store.get_collections() assert collection in many - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) time.sleep(1) - assert not await memory_store.does_collection_exist_async(collection) + assert not await memory_store.does_collection_exist(collection) @pytest.mark.asyncio async def test_upsert(): collection = f"int-tests-{randint(1000, 9999)}" async with AzureCognitiveSearchMemoryStore(vector_size=4) as memory_store: - await memory_store.create_collection_async(collection) + await memory_store.create_collection(collection) time.sleep(1) try: - assert await memory_store.does_collection_exist_async(collection) + assert await memory_store.does_collection_exist(collection) rec = MemoryRecord( is_reference=False, external_source_name=None, @@ -69,30 +67,30 @@ async def test_upsert(): additional_metadata=None, embedding=np.array([0.2, 0.1, 0.2, 0.7]), ) - id = await memory_store.upsert_async(collection, rec) + id = await memory_store.upsert(collection, rec) time.sleep(1) - many = await memory_store.get_batch_async(collection, [id]) - one = await memory_store.get_async(collection, id) + many = await memory_store.get_batch(collection, [id]) + one = await memory_store.get(collection, id) assert many[0]._id == id assert one._id == id assert one._text == rec._text except: - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) raise - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) @pytest.mark.asyncio async def test_record_not_found(): collection = f"int-tests-{randint(1000, 9999)}" async with AzureCognitiveSearchMemoryStore(vector_size=4) as memory_store: - await memory_store.create_collection_async(collection) + await memory_store.create_collection(collection) time.sleep(1) try: - assert await memory_store.does_collection_exist_async(collection) + assert await memory_store.does_collection_exist(collection) rec = MemoryRecord( is_reference=False, external_source_name=None, @@ -102,35 +100,35 @@ async def test_record_not_found(): additional_metadata=None, embedding=np.array([0.2, 0.1, 0.2, 0.7]), ) - id = await memory_store.upsert_async(collection, rec) + id = await memory_store.upsert(collection, rec) except: - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) raise try: - await memory_store.remove_async(collection, id) + await memory_store.remove(collection, id) time.sleep(1) # KeyError exception should occur - await memory_store.get_async(collection, id) + await memory_store.get(collection, id) # Clean up and fail - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) assert False except KeyError: pass - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) @pytest.mark.asyncio async def test_search(): collection = f"int-tests-{randint(1000, 9999)}" async with AzureCognitiveSearchMemoryStore(vector_size=4) as memory_store: - await memory_store.create_collection_async(collection) + await memory_store.create_collection(collection) time.sleep(1) try: - assert await memory_store.does_collection_exist_async(collection) + assert await memory_store.does_collection_exist(collection) rec = MemoryRecord( is_reference=False, external_source_name=None, @@ -140,14 +138,12 @@ async def test_search(): additional_metadata=None, embedding=np.array([0.1, 0.2, 0.3, 0.4]), ) - await memory_store.upsert_async(collection, rec) + await memory_store.upsert(collection, rec) time.sleep(1) - result = await memory_store.get_nearest_match_async( - collection, np.array([0.1, 0.2, 0.3, 0.38]) - ) + result = await memory_store.get_nearest_match(collection, np.array([0.1, 0.2, 0.3, 0.38])) assert result[0]._id == rec._id except: - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) raise - await memory_store.delete_collection_async(collection) + await memory_store.delete_collection(collection) diff --git a/python/tests/integration/connectors/memory/test_azure_cosmosdb_memory_store.py b/python/tests/integration/connectors/memory/test_azure_cosmosdb_memory_store.py new file mode 100644 index 000000000000..9542ac2b63a8 --- /dev/null +++ b/python/tests/integration/connectors/memory/test_azure_cosmosdb_memory_store.py @@ -0,0 +1,177 @@ +# Copyright (c) Microsoft. All rights reserved. + +from datetime import datetime + +import numpy as np +import pytest + +try: + from semantic_kernel.connectors.memory.azure_cosmosdb.azure_cosmos_db_memory_store import ( + AzureCosmosDBMemoryStore, + ) + + azure_cosmosdb_memory_store_installed = True +except AssertionError: + azure_cosmosdb_memory_store_installed = False + +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.memory_store_base import MemoryStoreBase + +index_name = "sk_test_vector_search_index" +vector_dimensions = 1536 +num_lists = 1 +similarity = "COS" +collection_name = "sk_test_collection" +database_name = "sk_test_database" + +pytestmark = pytest.mark.skipif( + not azure_cosmosdb_memory_store_installed, + reason="Azure CosmosDB Memory Store is not installed", +) + + +def create_embedding(non_zero_pos: int) -> np.ndarray: + # Create a NumPy array with a single non-zero value of dimension 1546 + embedding = np.zeros(vector_dimensions) + embedding[non_zero_pos - 1] = 1.0 + return embedding + + +@pytest.fixture +def memory_record1(): + return MemoryRecord( + id="test_id1", + text="sample text1", + is_reference=False, + embedding=create_embedding(non_zero_pos=1), + description="description", + additional_metadata="additional metadata", + external_source_name="external source", + timestamp=datetime.now(), + ) + + +@pytest.fixture +def memory_record2(): + return MemoryRecord( + id="test_id2", + text="sample text2", + is_reference=False, + embedding=create_embedding(non_zero_pos=2), + description="description", + additional_metadata="additional metadata", + external_source_name="external source", + timestamp=datetime.now(), + ) + + +@pytest.fixture +def memory_record3(): + return MemoryRecord( + id="test_id3", + text="sample text3", + is_reference=False, + embedding=create_embedding(non_zero_pos=3), + description="description", + additional_metadata="additional metadata", + external_source_name="external source", + timestamp=datetime.now(), + ) + + +async def azurecosmosdb_memorystore() -> MemoryStoreBase: + store = await AzureCosmosDBMemoryStore.create( + database_name=database_name, + collection_name=collection_name, + index_name=index_name, + vector_dimensions=vector_dimensions, + num_lists=num_lists, + similarity=similarity, + ) + return store + + +@pytest.mark.asyncio +async def test_create_get_drop_exists_collection(): + store = await azurecosmosdb_memorystore() + test_collection = "test_collection" + + await store.create_collection(test_collection) + + collection_list = await store.get_collections() + assert test_collection in collection_list + + await store.delete_collection(test_collection) + + result = await store.does_collection_exist(test_collection) + assert result is True + + +@pytest.mark.asyncio +async def test_upsert_and_get_and_remove( + memory_record1: MemoryRecord, +): + store = await azurecosmosdb_memorystore() + doc_id = await store.upsert(str(), memory_record1) + assert doc_id == memory_record1._id + + result = await store.get(str(), memory_record1._id, with_embedding=True) + + assert result is not None + assert result._id == memory_record1._id + assert all(result._embedding[i] == memory_record1._embedding[i] for i in range(len(result._embedding))) + + await store.remove(str(), memory_record1._id) + + +@pytest.mark.asyncio +async def test_upsert_batch_and_get_batch_remove_batch(memory_record2: MemoryRecord, memory_record3: MemoryRecord): + store = await azurecosmosdb_memorystore() + doc_ids = await store.upsert_batch(str(), [memory_record2, memory_record3]) + assert len(doc_ids) == 2 + assert all(doc_id in [memory_record2._id, memory_record3._id] for doc_id in doc_ids) + + results = await store.get_batch(str(), [memory_record2._id, memory_record3._id], with_embeddings=True) + + assert len(results) == 2 + assert all(result._id in [memory_record2._id, memory_record3._id] for result in results) + + await store.remove_batch(str(), [memory_record2._id, memory_record3._id]) + + +@pytest.mark.asyncio +async def test_get_nearest_match(memory_record1: MemoryRecord, memory_record2: MemoryRecord): + store = await azurecosmosdb_memorystore() + await store.upsert_batch(str(), [memory_record1, memory_record2]) + test_embedding = memory_record1.embedding.copy() + test_embedding[0] = test_embedding[0] + 0.1 + + result = await store.get_nearest_match( + collection_name, test_embedding, min_relevance_score=0.0, with_embedding=True + ) + + assert result is not None + assert result[0]._id == memory_record1._id + assert all(result[0]._embedding[i] == memory_record1._embedding[i] for i in range(len(result[0]._embedding))) + + await store.remove_batch(str(), [memory_record1._id, memory_record2._id]) + + +@pytest.mark.asyncio +async def test_get_nearest_matches( + memory_record1: MemoryRecord, + memory_record2: MemoryRecord, + memory_record3: MemoryRecord, +): + store = await azurecosmosdb_memorystore() + await store.upsert_batch(str(), [memory_record1, memory_record2, memory_record3]) + test_embedding = memory_record2.embedding.copy() + test_embedding[0] = test_embedding[4] + 0.1 + + result = await store.get_nearest_matches( + str(), test_embedding, limit=2, min_relevance_score=0.0, with_embeddings=True + ) + assert len(result) == 2 + assert all(result[i][0]._id in [memory_record1._id, memory_record2._id] for i in range(2)) + + await store.remove_batch(str(), [memory_record1._id, memory_record2._id, memory_record3._id]) diff --git a/python/tests/integration/connectors/memory/test_chroma.py b/python/tests/integration/connectors/memory/test_chroma.py index a580c937b12d..438d4cf8466a 100644 --- a/python/tests/integration/connectors/memory/test_chroma.py +++ b/python/tests/integration/connectors/memory/test_chroma.py @@ -15,9 +15,7 @@ except ImportError: chromadb_installed = False -pytestmark = pytest.mark.skipif( - not chromadb_installed, reason="chromadb is not installed" -) +pytestmark = pytest.mark.skipif(not chromadb_installed, reason="chromadb is not installed") @pytest.fixture @@ -25,9 +23,9 @@ def setup_chroma(): persist_directory = "chroma/TEMP/" memory = ChromaMemoryStore(persist_directory=persist_directory) yield memory - collections = asyncio.run(memory.get_collections_async()) + collections = asyncio.run(memory.get_collections()) for collection in collections: - asyncio.run(memory.delete_collection_async(collection)) + asyncio.run(memory.delete_collection(collection)) @pytest.fixture @@ -64,61 +62,61 @@ def test_constructor(setup_chroma): @pytest.mark.asyncio -async def test_create_and_get_collection_async(setup_chroma): +async def test_create_and_get_collection(setup_chroma): memory = setup_chroma - await memory.create_collection_async("test_collection") - result = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + result = await memory.get_collection("test_collection") assert result.name == "test_collection" @pytest.mark.asyncio -async def test_get_collections_async(setup_chroma): +async def test_get_collections(setup_chroma): memory = setup_chroma - await memory.create_collection_async("test_collection1") - await memory.create_collection_async("test_collection2") - await memory.create_collection_async("test_collection3") - result = await memory.get_collections_async() + await memory.create_collection("test_collection1") + await memory.create_collection("test_collection2") + await memory.create_collection("test_collection3") + result = await memory.get_collections() assert len(result) == 3 @pytest.mark.asyncio -async def test_delete_collection_async(setup_chroma): +async def test_delete_collection(setup_chroma): memory = setup_chroma - await memory.create_collection_async("test_collection") - await memory.delete_collection_async("test_collection") - result = await memory.get_collections_async() + await memory.create_collection("test_collection") + await memory.delete_collection("test_collection") + result = await memory.get_collections() assert len(result) == 0 - await memory.create_collection_async("test_collection") - await memory.delete_collection_async("TEST_COLLECTION") - result = await memory.get_collections_async() + await memory.create_collection("test_collection") + await memory.delete_collection("TEST_COLLECTION") + result = await memory.get_collections() assert len(result) == 0 @pytest.mark.asyncio -async def test_does_collection_exist_async(setup_chroma): +async def test_does_collection_exist(setup_chroma): memory = setup_chroma - await memory.create_collection_async("test_collection") - result = await memory.does_collection_exist_async("test_collection") + await memory.create_collection("test_collection") + result = await memory.does_collection_exist("test_collection") assert result is True - result = await memory.does_collection_exist_async("TEST_COLLECTION") + result = await memory.does_collection_exist("TEST_COLLECTION") assert result is True @pytest.mark.asyncio -async def test_upsert_and_get_async(setup_chroma, memory_record1): +async def test_upsert_and_get(setup_chroma, memory_record1): memory = setup_chroma - await memory.create_collection_async("test_collection") - collection = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + collection = await memory.get_collection("test_collection") - await memory.upsert_async(collection.name, memory_record1) + await memory.upsert(collection.name, memory_record1) - result = await memory.get_async(collection.name, "test_id1", True) + result = await memory.get(collection.name, "test_id1", True) assert result._id == "test_id1" assert result._text == "sample text1" assert result._is_reference is False @@ -130,14 +128,14 @@ async def test_upsert_and_get_async(setup_chroma, memory_record1): @pytest.mark.asyncio -async def test_upsert_and_get_async_with_no_embedding(setup_chroma, memory_record1): +async def test_upsert_and_get_with_no_embedding(setup_chroma, memory_record1): memory = setup_chroma - await memory.create_collection_async("test_collection") - collection = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + collection = await memory.get_collection("test_collection") - await memory.upsert_async(collection.name, memory_record1) + await memory.upsert(collection.name, memory_record1) - result = await memory.get_async(collection.name, "test_id1", False) + result = await memory.get(collection.name, "test_id1", False) assert result._id == "test_id1" assert result._text == "sample text1" assert result._is_reference is False @@ -149,16 +147,14 @@ async def test_upsert_and_get_async_with_no_embedding(setup_chroma, memory_recor @pytest.mark.asyncio -async def test_upsert_and_get_batch_async(setup_chroma, memory_record1, memory_record2): +async def test_upsert_and_get_batch(setup_chroma, memory_record1, memory_record2): memory = setup_chroma - await memory.create_collection_async("test_collection") - collection = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + collection = await memory.get_collection("test_collection") - await memory.upsert_batch_async(collection.name, [memory_record1, memory_record2]) + await memory.upsert_batch(collection.name, [memory_record1, memory_record2]) - result = await memory.get_batch_async( - "test_collection", ["test_id1", "test_id2"], True - ) + result = await memory.get_batch("test_collection", ["test_id1", "test_id2"], True) assert len(result) == 2 assert result[0]._id == "test_id1" assert result[0]._text == "sample text1" @@ -171,45 +167,41 @@ async def test_upsert_and_get_batch_async(setup_chroma, memory_record1, memory_r @pytest.mark.asyncio -async def test_remove_async(setup_chroma, memory_record1): +async def test_remove(setup_chroma, memory_record1): memory = setup_chroma - await memory.create_collection_async("test_collection") - collection = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + collection = await memory.get_collection("test_collection") - await memory.upsert_async(collection.name, memory_record1) - await memory.remove_async(collection.name, "test_id1") + await memory.upsert(collection.name, memory_record1) + await memory.remove(collection.name, "test_id1") - # memory.get_async should raise Exception if record is not found + # memory.get should raise Exception if record is not found with pytest.raises(Exception): - await memory.get_async(collection.name, "test_id1", True) + await memory.get(collection.name, "test_id1", True) @pytest.mark.asyncio -async def test_remove_batch_async(setup_chroma, memory_record1, memory_record2): +async def test_remove_batch(setup_chroma, memory_record1, memory_record2): memory = setup_chroma - await memory.create_collection_async("test_collection") - collection = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + collection = await memory.get_collection("test_collection") - await memory.upsert_batch_async(collection.name, [memory_record1, memory_record2]) - await memory.remove_batch_async(collection.name, ["test_id1", "test_id2"]) + await memory.upsert_batch(collection.name, [memory_record1, memory_record2]) + await memory.remove_batch(collection.name, ["test_id1", "test_id2"]) - result = await memory.get_batch_async( - "test_collection", ["test_id1", "test_id2"], True - ) + result = await memory.get_batch("test_collection", ["test_id1", "test_id2"], True) assert result == [] @pytest.mark.asyncio -async def test_get_nearest_matches_async(setup_chroma, memory_record1, memory_record2): +async def test_get_nearest_matches(setup_chroma, memory_record1, memory_record2): memory = setup_chroma - await memory.create_collection_async("test_collection") - collection = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + collection = await memory.get_collection("test_collection") - await memory.upsert_batch_async(collection.name, [memory_record1, memory_record2]) + await memory.upsert_batch(collection.name, [memory_record1, memory_record2]) - results = await memory.get_nearest_matches_async( - "test_collection", np.array([0.5, 0.5]), limit=2 - ) + results = await memory.get_nearest_matches("test_collection", np.array([0.5, 0.5]), limit=2) assert len(results) == 2 assert isinstance(results[0][0], MemoryRecord) @@ -217,16 +209,14 @@ async def test_get_nearest_matches_async(setup_chroma, memory_record1, memory_re @pytest.mark.asyncio -async def test_get_nearest_match_async(setup_chroma, memory_record1, memory_record2): +async def test_get_nearest_match(setup_chroma, memory_record1, memory_record2): memory = setup_chroma - await memory.create_collection_async("test_collection") - collection = await memory.get_collection_async("test_collection") + await memory.create_collection("test_collection") + collection = await memory.get_collection("test_collection") - await memory.upsert_batch_async(collection.name, [memory_record1, memory_record2]) + await memory.upsert_batch(collection.name, [memory_record1, memory_record2]) - result = await memory.get_nearest_match_async( - "test_collection", np.array([0.5, 0.5]) - ) + result = await memory.get_nearest_match("test_collection", np.array([0.5, 0.5])) assert len(result) == 2 assert isinstance(result[0], MemoryRecord) diff --git a/python/tests/integration/connectors/memory/test_milvus.py b/python/tests/integration/connectors/memory/test_milvus.py index a55f6e6bee47..3404b089f4f1 100644 --- a/python/tests/integration/connectors/memory/test_milvus.py +++ b/python/tests/integration/connectors/memory/test_milvus.py @@ -15,9 +15,7 @@ except ImportError: milvus_installed = False -pytestmark = pytest.mark.skipif( - not milvus_installed, reason="local milvus is not installed" -) +pytestmark = pytest.mark.skipif(not milvus_installed, reason="local milvus is not installed") pytestmark = pytest.mark.skipif( platform.system() == "Windows", @@ -65,66 +63,66 @@ def memory_record2(): @pytest.mark.asyncio -async def test_create_and_get_collection_async(setup_milvus): +async def test_create_and_get_collection(setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) - result = await memory.get_collections_async() + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) + result = await memory.get_collections() assert result == ["test_collection"] @pytest.mark.asyncio -async def test_get_collections_async(setup_milvus): +async def test_get_collections(setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection1", 2) - await memory.create_collection_async("test_collection2", 2) - await memory.create_collection_async("test_collection3", 2) - result = await memory.get_collections_async() + await memory.delete_collection(all=True) + await memory.create_collection("test_collection1", 2) + await memory.create_collection("test_collection2", 2) + await memory.create_collection("test_collection3", 2) + result = await memory.get_collections() assert len(result) == 3 @pytest.mark.asyncio -async def test_delete_collection_async(setup_milvus): +async def test_delete_collection(setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) - await memory.delete_collection_async("test_collection", 2) - result = await memory.get_collections_async() + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) + await memory.delete_collection("test_collection", 2) + result = await memory.get_collections() assert len(result) == 0 - await memory.create_collection_async("test_collection", 2) - await memory.delete_collection_async("TEST_COLLECTION", 2) - result = await memory.get_collections_async() + await memory.create_collection("test_collection", 2) + await memory.delete_collection("TEST_COLLECTION", 2) + result = await memory.get_collections() assert len(result) == 0 @pytest.mark.asyncio -async def test_does_collection_exist_async(setup_milvus): +async def test_does_collection_exist(setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) - result = await memory.does_collection_exist_async("test_collection") + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) + result = await memory.does_collection_exist("test_collection") assert result is True - result = await memory.does_collection_exist_async("TEST_COLLECTION") + result = await memory.does_collection_exist("TEST_COLLECTION") assert result is False @pytest.mark.asyncio -async def test_upsert_and_get_async(memory_record1, setup_milvus): +async def test_upsert_and_get(memory_record1, setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) + await memory.delete_collection(all=True) - await memory.create_collection_async("test_collection", 2) - await memory.upsert_async("test_collection", memory_record1) + await memory.create_collection("test_collection", 2) + await memory.upsert("test_collection", memory_record1) - result = await memory.get_async("test_collection", "test_id1", True) + result = await memory.get("test_collection", "test_id1", True) assert result._id == "test_id1" assert result._text == "sample text1" assert result._is_reference is False @@ -136,15 +134,15 @@ async def test_upsert_and_get_async(memory_record1, setup_milvus): @pytest.mark.asyncio -async def test_upsert_and_get_async_with_no_embedding(memory_record1, setup_milvus): +async def test_upsert_and_get_with_no_embedding(memory_record1, setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) - await memory.upsert_async("test_collection", memory_record1) + await memory.upsert("test_collection", memory_record1) - result = await memory.get_async("test_collection", "test_id1", False) + result = await memory.get("test_collection", "test_id1", False) assert result._id == "test_id1" assert result._text == "sample text1" assert result._is_reference is False @@ -156,17 +154,15 @@ async def test_upsert_and_get_async_with_no_embedding(memory_record1, setup_milv @pytest.mark.asyncio -async def test_upsert_and_get_batch_async(memory_record1, memory_record2, setup_milvus): +async def test_upsert_and_get_batch(memory_record1, memory_record2, setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) - result = await memory.get_batch_async( - "test_collection", ["test_id1", "test_id2"], True - ) + result = await memory.get_batch("test_collection", ["test_id1", "test_id2"], True) assert len(result) == 2 assert result[0]._id == "test_id1" assert result[0]._text == "sample text1" @@ -179,62 +175,56 @@ async def test_upsert_and_get_batch_async(memory_record1, memory_record2, setup_ @pytest.mark.asyncio -async def test_remove_async(memory_record1, setup_milvus): +async def test_remove(memory_record1, setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) - await memory.upsert_async("test_collection", memory_record1) - await memory.remove_async("test_collection", "test_id1") + await memory.upsert("test_collection", memory_record1) + await memory.remove("test_collection", "test_id1") - # memory.get_async should raise Exception if record is not found + # memory.get should raise Exception if record is not found with pytest.raises(Exception): - await memory.get_async("test_collection", "test_id1", True) + await memory.get("test_collection", "test_id1", True) @pytest.mark.asyncio -async def test_remove_batch_async(memory_record1, memory_record2, setup_milvus): +async def test_remove_batch(memory_record1, memory_record2, setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) - await memory.remove_batch_async("test_collection", ["test_id1", "test_id2"]) + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + await memory.remove_batch("test_collection", ["test_id1", "test_id2"]) - result = await memory.get_batch_async( - "test_collection", ["test_id1", "test_id2"], True - ) + result = await memory.get_batch("test_collection", ["test_id1", "test_id2"], True) assert result == [] @pytest.mark.asyncio -async def test_get_nearest_matches_async(memory_record1, memory_record2, setup_milvus): +async def test_get_nearest_matches(memory_record1, memory_record2, setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) - results = await memory.get_nearest_matches_async( - "test_collection", np.array([0.5, 0.5]), limit=2 - ) + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + results = await memory.get_nearest_matches("test_collection", np.array([0.5, 0.5]), limit=2) assert len(results) == 2 assert isinstance(results[0][0], MemoryRecord) assert results[0][1] == pytest.approx(0.5, abs=1e-5) @pytest.mark.asyncio -async def test_get_nearest_match_async(memory_record1, memory_record2, setup_milvus): +async def test_get_nearest_match(memory_record1, memory_record2, setup_milvus): URI, TOKEN = setup_milvus memory = MilvusMemoryStore(uri=URI, token=TOKEN) - await memory.delete_collection_async(all=True) - await memory.create_collection_async("test_collection", 2) - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + await memory.delete_collection(all=True) + await memory.create_collection("test_collection", 2) + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) - result = await memory.get_nearest_match_async( - "test_collection", np.array([0.5, 0.5]) - ) + result = await memory.get_nearest_match("test_collection", np.array([0.5, 0.5])) assert len(result) == 2 assert isinstance(result[0], MemoryRecord) assert result[1] == pytest.approx(0.5, abs=1e-5) diff --git a/python/tests/integration/connectors/memory/test_mongodb_atlas.py b/python/tests/integration/connectors/memory/test_mongodb_atlas.py index 4f1013dcf316..1a8010dcb093 100644 --- a/python/tests/integration/connectors/memory/test_mongodb_atlas.py +++ b/python/tests/integration/connectors/memory/test_mongodb_atlas.py @@ -30,9 +30,7 @@ DIMENSIONS = 3 -def is_equal_memory_record( - mem1: MemoryRecord, mem2: MemoryRecord, with_embeddings: bool -): +def is_equal_memory_record(mem1: MemoryRecord, mem2: MemoryRecord, with_embeddings: bool): """Comparator for two memory records""" def dictify_memory_record(mem): @@ -70,12 +68,10 @@ def test_collection(): async def vector_search_store(): if "Python_Integration_Tests" in os.environ: connection_string = os.environ["MONGODB_ATLAS_CONNECTION_STRING"] - async with MongoDBAtlasMemoryStore( - connection_string=connection_string, database_name="pyMSKTest" - ) as memory: + async with MongoDBAtlasMemoryStore(connection_string=connection_string, database_name="pyMSKTest") as memory: # Delete all collections before and after - for cname in await memory.get_collections_async(): - await memory.delete_collection_async(cname) + for cname in await memory.get_collections(): + await memory.delete_collection(cname) def patch_index_exception(fn): """Function patch for collection creation call to retry @@ -96,16 +92,14 @@ async def _patch(collection_name): return _patch - memory.create_collection_async = patch_index_exception( - memory.create_collection_async - ) + memory.create_collection = patch_index_exception(memory.create_collection) try: yield memory finally: pass - for cname in await memory.get_collections_async(): - await memory.delete_collection_async(cname) + for cname in await memory.get_collections(): + await memory.delete_collection(cname) @pytest_asyncio.fixture @@ -113,10 +107,8 @@ async def nearest_match_store(): """Fixture for read only vector store; the URI for test needs atlas configured""" if "Python_Integration_Tests" in os.environ: connection_string = os.environ["MONGODB_ATLAS_CONNECTION_STRING"] - async with MongoDBAtlasMemoryStore( - connection_string=connection_string, database_name="pyMSKTest" - ) as memory: - if not await memory.does_collection_exist_async("nearestSearch"): + async with MongoDBAtlasMemoryStore(connection_string=connection_string, database_name="pyMSKTest") as memory: + if not await memory.does_collection_exist("nearestSearch"): pytest.skip( reason="db: readOnly collection: nearestSearch not found, " + "please ensure your Atlas Test Cluster has this collection configured" @@ -131,89 +123,71 @@ async def test_constructor(vector_search_store): @pytest.mark.asyncio async def test_collection_create_and_delete(vector_search_store, test_collection): - await vector_search_store.create_collection_async(test_collection) - assert await vector_search_store.does_collection_exist_async(test_collection) - await vector_search_store.delete_collection_async(test_collection) - assert not await vector_search_store.does_collection_exist_async(test_collection) + await vector_search_store.create_collection(test_collection) + assert await vector_search_store.does_collection_exist(test_collection) + await vector_search_store.delete_collection(test_collection) + assert not await vector_search_store.does_collection_exist(test_collection) @pytest.mark.asyncio -async def test_collection_upsert( - vector_search_store, test_collection, memory_record_gen -): +async def test_collection_upsert(vector_search_store, test_collection, memory_record_gen): mems = [memory_record_gen(i) for i in range(1, 4)] - mem1 = await vector_search_store.upsert_async(test_collection, mems[0]) + mem1 = await vector_search_store.upsert(test_collection, mems[0]) assert mem1 == mems[0]._id @pytest.mark.asyncio -async def test_collection_batch_upsert( - vector_search_store, test_collection, memory_record_gen -): +async def test_collection_batch_upsert(vector_search_store, test_collection, memory_record_gen): mems = [memory_record_gen(i) for i in range(1, 4)] - mems_check = await vector_search_store.upsert_batch_async(test_collection, mems) + mems_check = await vector_search_store.upsert_batch(test_collection, mems) assert [m._id for m in mems] == mems_check @pytest.mark.asyncio -async def test_collection_deletion( - vector_search_store, test_collection, memory_record_gen -): +async def test_collection_deletion(vector_search_store, test_collection, memory_record_gen): mem = memory_record_gen(1) - await vector_search_store.upsert_async(test_collection, mem) - insertion_val = await vector_search_store.get_async(test_collection, mem._id, True) + await vector_search_store.upsert(test_collection, mem) + insertion_val = await vector_search_store.get(test_collection, mem._id, True) assert mem._id == insertion_val._id assert mem._embedding.tolist() == insertion_val._embedding.tolist() assert insertion_val is not None - await vector_search_store.remove_async(test_collection, mem._id) - val = await vector_search_store.get_async(test_collection, mem._id, False) + await vector_search_store.remove(test_collection, mem._id) + val = await vector_search_store.get(test_collection, mem._id, False) assert val is None @pytest.mark.asyncio -async def test_collection_batch_deletion( - vector_search_store, test_collection, memory_record_gen -): +async def test_collection_batch_deletion(vector_search_store, test_collection, memory_record_gen): mems = [memory_record_gen(i) for i in range(1, 4)] - await vector_search_store.upsert_batch_async(test_collection, mems) + await vector_search_store.upsert_batch(test_collection, mems) ids = [mem._id for mem in mems] - insertion_val = await vector_search_store.get_batch_async( - test_collection, ids, True - ) + insertion_val = await vector_search_store.get_batch(test_collection, ids, True) assert len(insertion_val) == len(mems) - await vector_search_store.remove_batch_async(test_collection, ids) - assert not await vector_search_store.get_batch_async(test_collection, ids, False) + await vector_search_store.remove_batch(test_collection, ids) + assert not await vector_search_store.get_batch(test_collection, ids, False) @pytest.mark.asyncio async def test_collection_get(vector_search_store, test_collection, memory_record_gen): mem = memory_record_gen(1) - await vector_search_store.upsert_async(test_collection, mem) - insertion_val = await vector_search_store.get_async(test_collection, mem._id, False) + await vector_search_store.upsert(test_collection, mem) + insertion_val = await vector_search_store.get(test_collection, mem._id, False) is_equal_memory_record(mem, insertion_val, False) - refetched_record = await vector_search_store.get_async( - test_collection, mem._id, True - ) + refetched_record = await vector_search_store.get(test_collection, mem._id, True) is_equal_memory_record(mem, refetched_record, True) @pytest.mark.asyncio -async def test_collection_batch_get( - vector_search_store, test_collection, memory_record_gen -): +async def test_collection_batch_get(vector_search_store, test_collection, memory_record_gen): mems = {str(i): memory_record_gen(i) for i in range(1, 4)} - await vector_search_store.upsert_batch_async(test_collection, list(mems.values())) - insertion_val = await vector_search_store.get_batch_async( - test_collection, list(mems.keys()), False - ) + await vector_search_store.upsert_batch(test_collection, list(mems.values())) + insertion_val = await vector_search_store.get_batch(test_collection, list(mems.keys()), False) assert len(insertion_val) == len(mems) for val in insertion_val: is_equal_memory_record(mems[val._id], val, False) - refetched_vals = await vector_search_store.get_batch_async( - test_collection, list(mems.keys()), True - ) + refetched_vals = await vector_search_store.get_batch(test_collection, list(mems.keys()), True) for ref in refetched_vals: is_equal_memory_record(mems[ref._id], ref, True) @@ -221,8 +195,8 @@ async def test_collection_batch_get( @pytest.mark.asyncio async def test_collection_knn_match(nearest_match_store, memory_record_gen): mem = memory_record_gen(7) - await nearest_match_store.upsert_async(READ_ONLY_COLLECTION, mem) - result, score = await nearest_match_store.get_nearest_match_async( + await nearest_match_store.upsert(READ_ONLY_COLLECTION, mem) + result, score = await nearest_match_store.get_nearest_match( collection_name=READ_ONLY_COLLECTION, embedding=mem._embedding, with_embedding=True, @@ -238,7 +212,7 @@ async def knn_matcher( query_limit, expected_limit, ): - results_and_scores = await nearest_match_store.get_nearest_matches_async( + results_and_scores = await nearest_match_store.get_nearest_matches( collection_name=test_collection, embedding=mems["2"]._embedding, limit=query_limit, @@ -254,7 +228,7 @@ async def knn_matcher( @pytest.mark.asyncio async def test_collection_knn_matches(nearest_match_store, memory_record_gen): mems = {str(i): memory_record_gen(i) for i in range(1, 4)} - await nearest_match_store.upsert_batch_async(READ_ONLY_COLLECTION, mems.values()) + await nearest_match_store.upsert_batch(READ_ONLY_COLLECTION, mems.values()) await knn_matcher( nearest_match_store, READ_ONLY_COLLECTION, diff --git a/python/tests/integration/connectors/memory/test_pinecone.py b/python/tests/integration/connectors/memory/test_pinecone.py index 583ed2cbb0f8..c59b612d3959 100644 --- a/python/tests/integration/connectors/memory/test_pinecone.py +++ b/python/tests/integration/connectors/memory/test_pinecone.py @@ -17,9 +17,7 @@ except ImportError: pinecone_installed = False -pytestmark = pytest.mark.skipif( - not pinecone_installed, reason="pinecone is not installed" -) +pytestmark = pytest.mark.skipif(not pinecone_installed, reason="pinecone is not installed") async def retry(func, retries=1): @@ -101,72 +99,62 @@ def test_constructor(get_pinecone_config): @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_create_and_get_collection_async(get_pinecone_config): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_create_and_get_collection(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - result = await retry(lambda: memory.describe_collection_async("test-collection")) + await retry(lambda: memory.create_collection("test-collection")) + result = await retry(lambda: memory.describe_collection("test-collection")) assert result is not None assert result.name == "test-collection" @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_get_collections_async(get_pinecone_config): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_get_collections(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection", 2)) - result = await retry(lambda: memory.get_collections_async()) + await retry(lambda: memory.create_collection("test-collection", 2)) + result = await retry(lambda: memory.get_collections()) assert "test-collection" in result @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_delete_collection_async(get_pinecone_config): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_delete_collection(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - await retry(lambda: memory.delete_collection_async("test-collection")) - result = await retry(lambda: memory.get_collections_async()) + await retry(lambda: memory.create_collection("test-collection")) + await retry(lambda: memory.delete_collection("test-collection")) + result = await retry(lambda: memory.get_collections()) assert "test-collection" not in result @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_does_collection_exist_async(get_pinecone_config): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_does_collection_exist(get_pinecone_config): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - result = await retry(lambda: memory.does_collection_exist_async("test-collection")) + await retry(lambda: memory.create_collection("test-collection")) + result = await retry(lambda: memory.does_collection_exist("test-collection")) assert result is True @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_upsert_async_and_get_async(get_pinecone_config, memory_record1): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_upsert_and_get(get_pinecone_config, memory_record1): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - await retry(lambda: memory.upsert_async("test-collection", memory_record1)) + await retry(lambda: memory.create_collection("test-collection")) + await retry(lambda: memory.upsert("test-collection", memory_record1)) result = await retry( - lambda: memory.get_async( + lambda: memory.get( "test-collection", memory_record1._id, with_embedding=True, @@ -181,24 +169,16 @@ async def test_upsert_async_and_get_async(get_pinecone_config, memory_record1): @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_upsert_batch_async_and_get_batch_async( - get_pinecone_config, memory_record1, memory_record2 -): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_upsert_batch_and_get_batch(get_pinecone_config, memory_record1, memory_record2): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - await retry( - lambda: memory.upsert_batch_async( - "test-collection", [memory_record1, memory_record2] - ) - ) + await retry(lambda: memory.create_collection("test-collection")) + await retry(lambda: memory.upsert_batch("test-collection", [memory_record1, memory_record2])) results = await retry( - lambda: memory.get_batch_async( + lambda: memory.get_batch( "test-collection", [memory_record1._id, memory_record2._id], with_embeddings=True, @@ -211,76 +191,50 @@ async def test_upsert_batch_async_and_get_batch_async( @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_remove_async(get_pinecone_config, memory_record1): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_remove(get_pinecone_config, memory_record1): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - await retry(lambda: memory.upsert_async("test-collection", memory_record1)) - await retry(lambda: memory.remove_async("test-collection", memory_record1._id)) + await retry(lambda: memory.create_collection("test-collection")) + await retry(lambda: memory.upsert("test-collection", memory_record1)) + await retry(lambda: memory.remove("test-collection", memory_record1._id)) with pytest.raises(KeyError): - _ = await memory.get_async( - "test-collection", memory_record1._id, with_embedding=True - ) + _ = await memory.get("test-collection", memory_record1._id, with_embedding=True) @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_remove_batch_async(get_pinecone_config, memory_record1, memory_record2): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_remove_batch(get_pinecone_config, memory_record1, memory_record2): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - await retry( - lambda: memory.upsert_batch_async( - "test-collection", [memory_record1, memory_record2] - ) - ) - await retry( - lambda: memory.remove_batch_async( - "test-collection", [memory_record1._id, memory_record2._id] - ) - ) + await retry(lambda: memory.create_collection("test-collection")) + await retry(lambda: memory.upsert_batch("test-collection", [memory_record1, memory_record2])) + await retry(lambda: memory.remove_batch("test-collection", [memory_record1._id, memory_record2._id])) with pytest.raises(KeyError): - _ = await memory.get_async( - "test-collection", memory_record1._id, with_embedding=True - ) + _ = await memory.get("test-collection", memory_record1._id, with_embedding=True) with pytest.raises(KeyError): - _ = await memory.get_async( - "test-collection", memory_record2._id, with_embedding=True - ) + _ = await memory.get("test-collection", memory_record2._id, with_embedding=True) @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_get_nearest_match_async( - get_pinecone_config, memory_record1, memory_record2 -): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_get_nearest_match(get_pinecone_config, memory_record1, memory_record2): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - await retry( - lambda: memory.upsert_batch_async( - "test-collection", [memory_record1, memory_record2] - ) - ) + await retry(lambda: memory.create_collection("test-collection")) + await retry(lambda: memory.upsert_batch("test-collection", [memory_record1, memory_record2])) test_embedding = memory_record1.embedding test_embedding[0] = test_embedding[0] + 0.01 result = await retry( - lambda: memory.get_nearest_match_async( + lambda: memory.get_nearest_match( "test-collection", test_embedding, min_relevance_score=0.0, @@ -293,27 +247,19 @@ async def test_get_nearest_match_async( @pytest.mark.asyncio -@pytest.mark.xfail( - reason="Test failed due to known unreliable communications with Pinecone free tier" -) -async def test_get_nearest_matches_async( - get_pinecone_config, memory_record1, memory_record2, memory_record3 -): +@pytest.mark.xfail(reason="Test failed due to known unreliable communications with Pinecone free tier") +async def test_get_nearest_matches(get_pinecone_config, memory_record1, memory_record2, memory_record3): api_key, environment = get_pinecone_config memory = PineconeMemoryStore(api_key, environment, 2) - await retry(lambda: memory.create_collection_async("test-collection")) - await retry( - lambda: memory.upsert_batch_async( - "test-collection", [memory_record1, memory_record2, memory_record3] - ) - ) + await retry(lambda: memory.create_collection("test-collection")) + await retry(lambda: memory.upsert_batch("test-collection", [memory_record1, memory_record2, memory_record3])) test_embedding = memory_record2.embedding test_embedding[0] = test_embedding[0] + 0.025 result = await retry( - lambda: memory.get_nearest_matches_async( + lambda: memory.get_nearest_matches( "test-collection", test_embedding, limit=2, diff --git a/python/tests/integration/connectors/memory/test_postgres.py b/python/tests/integration/connectors/memory/test_postgres.py index 5a23d521386a..444ad7978016 100644 --- a/python/tests/integration/connectors/memory/test_postgres.py +++ b/python/tests/integration/connectors/memory/test_postgres.py @@ -18,9 +18,7 @@ except ImportError: psycopg_installed = False -pytestmark = pytest.mark.skipif( - not psycopg_installed, reason="psycopg is not installed" -) +pytestmark = pytest.mark.skipif(not psycopg_installed, reason="psycopg is not installed") try: import psycopg_pool # noqa: F401 @@ -29,9 +27,7 @@ except ImportError: psycopg_pool_installed = False -pytestmark = pytest.mark.skipif( - not psycopg_pool_installed, reason="psycopg_pool is not installed" -) +pytestmark = pytest.mark.skipif(not psycopg_pool_installed, reason="psycopg_pool is not installed") # Needed because the test service may not support a high volume of requests @@ -100,55 +96,53 @@ def test_constructor(connection_string): @pytest.mark.asyncio -async def test_create_and_does_collection_exist_async(connection_string): +async def test_create_and_does_collection_exist(connection_string): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - result = await memory.does_collection_exist_async("test_collection") + await memory.create_collection("test_collection") + result = await memory.does_collection_exist("test_collection") assert result is not None @pytest.mark.asyncio -async def test_get_collections_async(connection_string): +async def test_get_collections(connection_string): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - result = await memory.get_collections_async() + await memory.create_collection("test_collection") + result = await memory.get_collections() assert "test_collection" in result @pytest.mark.asyncio -async def test_delete_collection_async(connection_string): +async def test_delete_collection(connection_string): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") + await memory.create_collection("test_collection") - result = await memory.get_collections_async() + result = await memory.get_collections() assert "test_collection" in result - await memory.delete_collection_async("test_collection") - result = await memory.get_collections_async() + await memory.delete_collection("test_collection") + result = await memory.get_collections() assert "test_collection" not in result @pytest.mark.asyncio -async def test_does_collection_exist_async(connection_string): +async def test_does_collection_exist(connection_string): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - result = await memory.does_collection_exist_async("test_collection") + await memory.create_collection("test_collection") + result = await memory.does_collection_exist("test_collection") assert result is True @pytest.mark.asyncio -async def test_upsert_async_and_get_async(connection_string, memory_record1): +async def test_upsert_and_get(connection_string, memory_record1): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - await memory.upsert_async("test_collection", memory_record1) - result = await memory.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + await memory.create_collection("test_collection") + await memory.upsert("test_collection", memory_record1) + result = await memory.get("test_collection", memory_record1._id, with_embedding=True) assert result is not None assert result._id == memory_record1._id assert result._text == memory_record1._text @@ -158,15 +152,13 @@ async def test_upsert_async_and_get_async(connection_string, memory_record1): @pytest.mark.asyncio -async def test_upsert_batch_async_and_get_batch_async( - connection_string, memory_record1, memory_record2 -): +async def test_upsert_batch_and_get_batch(connection_string, memory_record1, memory_record2): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) - results = await memory.get_batch_async( + results = await memory.get_batch( "test_collection", [memory_record1._id, memory_record2._id], with_embeddings=True, @@ -178,56 +170,44 @@ async def test_upsert_batch_async_and_get_batch_async( @pytest.mark.asyncio -async def test_remove_async(connection_string, memory_record1): +async def test_remove(connection_string, memory_record1): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - await memory.upsert_async("test_collection", memory_record1) + await memory.create_collection("test_collection") + await memory.upsert("test_collection", memory_record1) - result = await memory.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + result = await memory.get("test_collection", memory_record1._id, with_embedding=True) assert result is not None - await memory.remove_async("test_collection", memory_record1._id) + await memory.remove("test_collection", memory_record1._id) with pytest.raises(KeyError): - _ = await memory.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + _ = await memory.get("test_collection", memory_record1._id, with_embedding=True) @pytest.mark.asyncio -async def test_remove_batch_async(connection_string, memory_record1, memory_record2): +async def test_remove_batch(connection_string, memory_record1, memory_record2): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) - await memory.remove_batch_async( - "test_collection", [memory_record1._id, memory_record2._id] - ) + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + await memory.remove_batch("test_collection", [memory_record1._id, memory_record2._id]) with pytest.raises(KeyError): - _ = await memory.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + _ = await memory.get("test_collection", memory_record1._id, with_embedding=True) with pytest.raises(KeyError): - _ = await memory.get_async( - "test_collection", memory_record2._id, with_embedding=True - ) + _ = await memory.get("test_collection", memory_record2._id, with_embedding=True) @pytest.mark.asyncio -async def test_get_nearest_match_async( - connection_string, memory_record1, memory_record2 -): +async def test_get_nearest_match(connection_string, memory_record1, memory_record2): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) test_embedding = memory_record1.embedding.copy() test_embedding[0] = test_embedding[0] + 0.01 - result = await memory.get_nearest_match_async( + result = await memory.get_nearest_match( "test_collection", test_embedding, min_relevance_score=0.0, with_embedding=True ) assert result is not None @@ -239,19 +219,15 @@ async def test_get_nearest_match_async( @pytest.mark.asyncio -async def test_get_nearest_matches_async( - connection_string, memory_record1, memory_record2, memory_record3 -): +async def test_get_nearest_matches(connection_string, memory_record1, memory_record2, memory_record3): memory = PostgresMemoryStore(connection_string, 2, 1, 5) - await memory.create_collection_async("test_collection") - await memory.upsert_batch_async( - "test_collection", [memory_record1, memory_record2, memory_record3] - ) + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2, memory_record3]) test_embedding = memory_record2.embedding test_embedding[0] = test_embedding[0] + 0.025 - result = await memory.get_nearest_matches_async( + result = await memory.get_nearest_matches( "test_collection", test_embedding, limit=2, diff --git a/python/tests/integration/connectors/memory/test_qdrant_memory_store.py b/python/tests/integration/connectors/memory/test_qdrant_memory_store.py index 67fa74f109b7..4ae3e91d7e38 100644 --- a/python/tests/integration/connectors/memory/test_qdrant_memory_store.py +++ b/python/tests/integration/connectors/memory/test_qdrant_memory_store.py @@ -16,9 +16,7 @@ except ImportError: qdrant_client_installed = False -pytestmark = pytest.mark.skipif( - not qdrant_client_installed, reason="qdrant-client is not installed" -) +pytestmark = pytest.mark.skipif(not qdrant_client_installed, reason="qdrant-client is not installed") @pytest.fixture @@ -69,57 +67,57 @@ def test_qdrant_constructor(): @pytest.mark.asyncio -async def test_create_and_get_collection_async(): +async def test_create_and_get_collection(): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - result = await qdrant_mem_store.get_collection_async("test_collection") + await qdrant_mem_store.create_collection("test_collection") + result = await qdrant_mem_store.get_collection("test_collection") assert result.status == "green" @pytest.mark.asyncio -async def test_get_collections_async(): +async def test_get_collections(): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection1") - await qdrant_mem_store.create_collection_async("test_collection2") - await qdrant_mem_store.create_collection_async("test_collection3") - result = await qdrant_mem_store.get_collections_async() + await qdrant_mem_store.create_collection("test_collection1") + await qdrant_mem_store.create_collection("test_collection2") + await qdrant_mem_store.create_collection("test_collection3") + result = await qdrant_mem_store.get_collections() assert len(result) == 3 @pytest.mark.asyncio -async def test_delete_collection_async(): +async def test_delete_collection(): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async( + await qdrant_mem_store.create_collection( "test_collection4", ) - result = await qdrant_mem_store.get_collections_async() + result = await qdrant_mem_store.get_collections() assert len(result) == 1 - await qdrant_mem_store.delete_collection_async("test_collection4") - result = await qdrant_mem_store.get_collections_async() + await qdrant_mem_store.delete_collection("test_collection4") + result = await qdrant_mem_store.get_collections() assert len(result) == 0 @pytest.mark.asyncio -async def test_does_collection_exist_async(): +async def test_does_collection_exist(): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - result = await qdrant_mem_store.does_collection_exist_async("test_collection") + await qdrant_mem_store.create_collection("test_collection") + result = await qdrant_mem_store.does_collection_exist("test_collection") assert result is True - result = await qdrant_mem_store.does_collection_exist_async("test_collection2") + result = await qdrant_mem_store.does_collection_exist("test_collection2") assert result is False @pytest.mark.asyncio -async def test_upsert_async_and_get_async(memory_record1): +async def test_upsert_and_get(memory_record1): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - await qdrant_mem_store.upsert_async("test_collection", memory_record1) - result = await qdrant_mem_store.get_async("test_collection", memory_record1._id) + await qdrant_mem_store.create_collection("test_collection") + await qdrant_mem_store.upsert("test_collection", memory_record1) + result = await qdrant_mem_store.get("test_collection", memory_record1._id) assert result is not None assert result._id == memory_record1._id assert result._text == memory_record1._text @@ -127,15 +125,13 @@ async def test_upsert_async_and_get_async(memory_record1): @pytest.mark.asyncio -async def test_upsert_batch_async_and_get_batch_async(memory_record1, memory_record2): +async def test_upsert_batch_and_get_batch(memory_record1, memory_record2): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - await qdrant_mem_store.upsert_batch_async( - "test_collection", [memory_record1, memory_record2] - ) + await qdrant_mem_store.create_collection("test_collection") + await qdrant_mem_store.upsert_batch("test_collection", [memory_record1, memory_record2]) - results = await qdrant_mem_store.get_batch_async( + results = await qdrant_mem_store.get_batch( "test_collection", [memory_record1._id, memory_record2._id], with_embeddings=True, @@ -147,68 +143,48 @@ async def test_upsert_batch_async_and_get_batch_async(memory_record1, memory_rec @pytest.mark.asyncio -async def test_remove_async(memory_record1): +async def test_remove(memory_record1): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - await qdrant_mem_store.upsert_async("test_collection", memory_record1) + await qdrant_mem_store.create_collection("test_collection") + await qdrant_mem_store.upsert("test_collection", memory_record1) - result = await qdrant_mem_store.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + result = await qdrant_mem_store.get("test_collection", memory_record1._id, with_embedding=True) assert result is not None - await qdrant_mem_store.remove_async("test_collection", memory_record1._id) + await qdrant_mem_store.remove("test_collection", memory_record1._id) - result = await qdrant_mem_store.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + result = await qdrant_mem_store.get("test_collection", memory_record1._id, with_embedding=True) assert result is None @pytest.mark.asyncio -async def test_remove_batch_async(memory_record1, memory_record2): +async def test_remove_batch(memory_record1, memory_record2): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - await qdrant_mem_store.upsert_batch_async( - "test_collection", [memory_record1, memory_record2] - ) - result = await qdrant_mem_store.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + await qdrant_mem_store.create_collection("test_collection") + await qdrant_mem_store.upsert_batch("test_collection", [memory_record1, memory_record2]) + result = await qdrant_mem_store.get("test_collection", memory_record1._id, with_embedding=True) assert result is not None - result = await qdrant_mem_store.get_async( - "test_collection", memory_record2._id, with_embedding=True - ) + result = await qdrant_mem_store.get("test_collection", memory_record2._id, with_embedding=True) assert result is not None - await qdrant_mem_store.remove_batch_async( - "test_collection", [memory_record1._id, memory_record2._id] - ) - result = await qdrant_mem_store.get_async( - "test_collection", memory_record1._id, with_embedding=True - ) + await qdrant_mem_store.remove_batch("test_collection", [memory_record1._id, memory_record2._id]) + result = await qdrant_mem_store.get("test_collection", memory_record1._id, with_embedding=True) assert result is None - result = await qdrant_mem_store.get_async( - "test_collection", memory_record2._id, with_embedding=True - ) + result = await qdrant_mem_store.get("test_collection", memory_record2._id, with_embedding=True) assert result is None @pytest.mark.asyncio -async def test_get_nearest_match_async(memory_record1, memory_record2): +async def test_get_nearest_match(memory_record1, memory_record2): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - await qdrant_mem_store.upsert_batch_async( - "test_collection", [memory_record1, memory_record2] - ) + await qdrant_mem_store.create_collection("test_collection") + await qdrant_mem_store.upsert_batch("test_collection", [memory_record1, memory_record2]) test_embedding = memory_record1.embedding.copy() test_embedding[0] = test_embedding[0] + 0.01 - result = await qdrant_mem_store.get_nearest_match_async( - "test_collection", test_embedding, min_relevance_score=0.0 - ) + result = await qdrant_mem_store.get_nearest_match("test_collection", test_embedding, min_relevance_score=0.0) assert result is not None assert result[0]._id == memory_record1._id assert result[0]._text == memory_record1._text @@ -216,19 +192,15 @@ async def test_get_nearest_match_async(memory_record1, memory_record2): @pytest.mark.asyncio -async def test_get_nearest_matches_async( - memory_record1, memory_record2, memory_record3 -): +async def test_get_nearest_matches(memory_record1, memory_record2, memory_record3): qdrant_mem_store = QdrantMemoryStore(vector_size=TEST_VECTOR_SIZE, local=True) - await qdrant_mem_store.create_collection_async("test_collection") - await qdrant_mem_store.upsert_batch_async( - "test_collection", [memory_record1, memory_record2, memory_record3] - ) + await qdrant_mem_store.create_collection("test_collection") + await qdrant_mem_store.upsert_batch("test_collection", [memory_record1, memory_record2, memory_record3]) test_embedding = memory_record2.embedding test_embedding[0] = test_embedding[0] + 0.025 - result = await qdrant_mem_store.get_nearest_matches_async( + result = await qdrant_mem_store.get_nearest_matches( "test_collection", test_embedding, limit=2, diff --git a/python/tests/integration/connectors/memory/test_redis.py b/python/tests/integration/connectors/memory/test_redis.py index ee76e05762e2..d5ef2eaf7315 100644 --- a/python/tests/integration/connectors/memory/test_redis.py +++ b/python/tests/integration/connectors/memory/test_redis.py @@ -88,7 +88,7 @@ def memory_store(connection_string): yield redis_mem_store # Delete test collection after test - asyncio.run(redis_mem_store.delete_collection_async(TEST_COLLECTION_NAME)) + asyncio.run(redis_mem_store.delete_collection(TEST_COLLECTION_NAME)) def test_constructor(memory_store): @@ -97,64 +97,64 @@ def test_constructor(memory_store): @pytest.mark.asyncio -async def test_create_and_does_collection_exist_async(memory_store): +async def test_create_and_does_collection_exist(memory_store): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) - exists = await memory.does_collection_exist_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) + exists = await memory.does_collection_exist(TEST_COLLECTION_NAME) assert exists @pytest.mark.asyncio -async def test_delete_collection_async(memory_store): +async def test_delete_collection(memory_store): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) - await memory.delete_collection_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) + await memory.delete_collection(TEST_COLLECTION_NAME) - exists = await memory.does_collection_exist_async(TEST_COLLECTION_NAME) + exists = await memory.does_collection_exist(TEST_COLLECTION_NAME) assert not exists # Delete a non-existent collection with no error - await memory.delete_collection_async(TEST_COLLECTION_NAME) + await memory.delete_collection(TEST_COLLECTION_NAME) @pytest.mark.asyncio -async def test_get_collections_async(memory_store): +async def test_get_collections(memory_store): memory = memory_store collection_names = ["c1", "c2", "c3"] for c_n in collection_names: - await memory.create_collection_async(c_n) + await memory.create_collection(c_n) - names_from_func = await memory.get_collections_async() + names_from_func = await memory.get_collections() for c_n in collection_names: assert c_n in names_from_func - await memory.delete_collection_async(c_n) + await memory.delete_collection(c_n) @pytest.mark.asyncio -async def test_does_collection_exist_async(memory_store): +async def test_does_collection_exist(memory_store): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) - exists = await memory.does_collection_exist_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) + exists = await memory.does_collection_exist(TEST_COLLECTION_NAME) assert exists - await memory.delete_collection_async(TEST_COLLECTION_NAME) - exists = await memory.does_collection_exist_async(TEST_COLLECTION_NAME) + await memory.delete_collection(TEST_COLLECTION_NAME) + exists = await memory.does_collection_exist(TEST_COLLECTION_NAME) assert not exists @pytest.mark.asyncio -async def test_upsert_async_and_get_async(memory_store, memory_record1): +async def test_upsert_and_get(memory_store, memory_record1): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) # Insert a record - await memory.upsert_async(TEST_COLLECTION_NAME, memory_record1) - fetch_1 = await memory.get_async(TEST_COLLECTION_NAME, memory_record1._id, True) + await memory.upsert(TEST_COLLECTION_NAME, memory_record1) + fetch_1 = await memory.get(TEST_COLLECTION_NAME, memory_record1._id, True) assert fetch_1 is not None, "Could not get record" assert fetch_1._id == memory_record1._id @@ -170,27 +170,23 @@ async def test_upsert_async_and_get_async(memory_store, memory_record1): # Update a record memory_record1._text = "updated sample text1" - await memory.upsert_async(TEST_COLLECTION_NAME, memory_record1) - fetch_1 = await memory.get_async(TEST_COLLECTION_NAME, memory_record1._id, True) + await memory.upsert(TEST_COLLECTION_NAME, memory_record1) + fetch_1 = await memory.get(TEST_COLLECTION_NAME, memory_record1._id, True) assert fetch_1 is not None, "Could not get record" assert fetch_1._text == memory_record1._text, "Did not update record" @pytest.mark.asyncio -async def test_upsert_batch_async_and_get_batch_async( - memory_store, memory_record1, memory_record2 -): +async def test_upsert_batch_and_get_batch(memory_store, memory_record1, memory_record2): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) ids = [memory_record1._id, memory_record2._id] - await memory.upsert_batch_async( - TEST_COLLECTION_NAME, [memory_record1, memory_record2] - ) + await memory.upsert_batch(TEST_COLLECTION_NAME, [memory_record1, memory_record2]) - fetched = await memory.get_batch_async(TEST_COLLECTION_NAME, ids, True) + fetched = await memory.get_batch(TEST_COLLECTION_NAME, ids, True) assert len(fetched) > 0, "Could not get records" for f in fetched: @@ -198,47 +194,43 @@ async def test_upsert_batch_async_and_get_batch_async( @pytest.mark.asyncio -async def test_remove_async(memory_store, memory_record1): +async def test_remove(memory_store, memory_record1): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) - await memory.upsert_async(TEST_COLLECTION_NAME, memory_record1) - await memory.remove_async(TEST_COLLECTION_NAME, memory_record1._id) - get_record = await memory.get_async(TEST_COLLECTION_NAME, memory_record1._id, False) + await memory.upsert(TEST_COLLECTION_NAME, memory_record1) + await memory.remove(TEST_COLLECTION_NAME, memory_record1._id) + get_record = await memory.get(TEST_COLLECTION_NAME, memory_record1._id, False) assert not get_record, "Record was not removed" @pytest.mark.asyncio -async def test_remove_batch_async(memory_store, memory_record1, memory_record2): +async def test_remove_batch(memory_store, memory_record1, memory_record2): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) ids = [memory_record1._id, memory_record2._id] - await memory.upsert_batch_async( - TEST_COLLECTION_NAME, [memory_record1, memory_record2] - ) - await memory.remove_batch_async(TEST_COLLECTION_NAME, ids) - get_records = await memory.get_batch_async(TEST_COLLECTION_NAME, ids, False) + await memory.upsert_batch(TEST_COLLECTION_NAME, [memory_record1, memory_record2]) + await memory.remove_batch(TEST_COLLECTION_NAME, ids) + get_records = await memory.get_batch(TEST_COLLECTION_NAME, ids, False) assert len(get_records) == 0, "Records were not removed" @pytest.mark.asyncio -async def test_get_nearest_match_async(memory_store, memory_record1, memory_record2): +async def test_get_nearest_match(memory_store, memory_record1, memory_record2): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) - await memory.upsert_batch_async( - TEST_COLLECTION_NAME, [memory_record1, memory_record2] - ) + await memory.upsert_batch(TEST_COLLECTION_NAME, [memory_record1, memory_record2]) test_embedding = memory_record1.embedding.copy() test_embedding[0] = test_embedding[0] + 0.01 - result = await memory.get_nearest_match_async( + result = await memory.get_nearest_match( TEST_COLLECTION_NAME, test_embedding, min_relevance_score=0.0, @@ -258,20 +250,16 @@ async def test_get_nearest_match_async(memory_store, memory_record1, memory_reco @pytest.mark.asyncio -async def test_get_nearest_matches_async( - memory_store, memory_record1, memory_record2, memory_record3 -): +async def test_get_nearest_matches(memory_store, memory_record1, memory_record2, memory_record3): memory = memory_store - await memory.create_collection_async(TEST_COLLECTION_NAME) + await memory.create_collection(TEST_COLLECTION_NAME) - await memory.upsert_batch_async( - TEST_COLLECTION_NAME, [memory_record1, memory_record2, memory_record3] - ) + await memory.upsert_batch(TEST_COLLECTION_NAME, [memory_record1, memory_record2, memory_record3]) test_embedding = memory_record2.embedding.copy() test_embedding[0] = test_embedding[0] + 0.025 - result = await memory.get_nearest_matches_async( + result = await memory.get_nearest_matches( TEST_COLLECTION_NAME, test_embedding, limit=2, diff --git a/python/tests/integration/connectors/memory/test_usearch.py b/python/tests/integration/connectors/memory/test_usearch.py index 30e24295b42e..da2a96060468 100644 --- a/python/tests/integration/connectors/memory/test_usearch.py +++ b/python/tests/integration/connectors/memory/test_usearch.py @@ -89,9 +89,7 @@ def memory_record3(): ) -def gen_memory_records( - count: int, ndim: int, start_index: int = 0 -) -> List[MemoryRecord]: +def gen_memory_records(count: int, ndim: int, start_index: int = 0) -> List[MemoryRecord]: return [ MemoryRecord( is_reference=False, @@ -107,14 +105,10 @@ def gen_memory_records( ] -def compare_memory_records( - record1: MemoryRecord, record2: MemoryRecord, with_embedding: bool -): +def compare_memory_records(record1: MemoryRecord, record2: MemoryRecord, with_embedding: bool): """Compare two MemoryRecord instances and assert they are the same.""" - assert ( - record1._key == record2._key - ), f"_key mismatch: {record1._key} != {record2._key}" + assert record1._key == record2._key, f"_key mismatch: {record1._key} != {record2._key}" assert ( record1._timestamp == record2._timestamp ), f"_timestamp mismatch: {record1._timestamp} != {record2._timestamp}" @@ -128,91 +122,81 @@ def compare_memory_records( assert ( record1._description == record2._description ), f"_description mismatch: {record1._description} != {record2._description}" - assert ( - record1._text == record2._text - ), f"_text mismatch: {record1._text} != {record2._text}" + assert record1._text == record2._text, f"_text mismatch: {record1._text} != {record2._text}" assert ( record1._additional_metadata == record2._additional_metadata ), f"_additional_metadata mismatch: {record1._additional_metadata} != {record2._additional_metadata}" if with_embedding is True: - assert np.array_equal( - record1._embedding, record2._embedding - ), "_embedding arrays are not equal" + assert np.array_equal(record1._embedding, record2._embedding), "_embedding arrays are not equal" @pytest.mark.asyncio -async def test_create_and_get_collection_async(): +async def test_create_and_get_collection(): memory = USearchMemoryStore() - await memory.create_collection_async("test_collection1") - await memory.create_collection_async("test_collection2") - await memory.create_collection_async("test_collection3") - result = await memory.get_collections_async() + await memory.create_collection("test_collection1") + await memory.create_collection("test_collection2") + await memory.create_collection("test_collection3") + result = await memory.get_collections() assert len(result) == 3 assert result == ["test_collection1", "test_collection2", "test_collection3"] @pytest.mark.asyncio -async def test_delete_collection_async(): +async def test_delete_collection(): memory = USearchMemoryStore() - await memory.create_collection_async("test_collection") - await memory.delete_collection_async("test_collection") - result = await memory.get_collections_async() + await memory.create_collection("test_collection") + await memory.delete_collection("test_collection") + result = await memory.get_collections() assert len(result) == 0 - await memory.create_collection_async("test_collection") - await memory.delete_collection_async("TEST_COLLECTION") - result = await memory.get_collections_async() + await memory.create_collection("test_collection") + await memory.delete_collection("TEST_COLLECTION") + result = await memory.get_collections() assert len(result) == 0 @pytest.mark.asyncio -async def test_does_collection_exist_async(): +async def test_does_collection_exist(): memory = USearchMemoryStore() - await memory.create_collection_async("test_collection") - result = await memory.does_collection_exist_async("test_collection") + await memory.create_collection("test_collection") + result = await memory.does_collection_exist("test_collection") assert result is True - result = await memory.does_collection_exist_async("TEST_COLLECTION") + result = await memory.does_collection_exist("TEST_COLLECTION") assert result is True @pytest.mark.asyncio -async def test_upsert_and_get_async_with_no_embedding(memory_record1: MemoryRecord): +async def test_upsert_and_get_with_no_embedding(memory_record1: MemoryRecord): memory = USearchMemoryStore() - await memory.create_collection_async("test_collection", ndim=2) - await memory.upsert_async("test_collection", memory_record1) + await memory.create_collection("test_collection", ndim=2) + await memory.upsert("test_collection", memory_record1) - result = await memory.get_async("test_collection", "test_id1", False) + result = await memory.get("test_collection", "test_id1", False) compare_memory_records(result, memory_record1, False) @pytest.mark.asyncio -async def test_upsert_and_get_async_with_embedding(memory_record1: MemoryRecord): +async def test_upsert_and_get_with_embedding(memory_record1: MemoryRecord): memory = USearchMemoryStore() - await memory.create_collection_async("test_collection", ndim=2) - await memory.upsert_async("test_collection", memory_record1) + await memory.create_collection("test_collection", ndim=2) + await memory.upsert("test_collection", memory_record1) - result = await memory.get_async("test_collection", "test_id1", True) + result = await memory.get("test_collection", "test_id1", True) compare_memory_records(result, memory_record1, True) @pytest.mark.asyncio -async def test_upsert_and_get_batch_async( - memory_record1: MemoryRecord, memory_record2: MemoryRecord -): +async def test_upsert_and_get_batch(memory_record1: MemoryRecord, memory_record2: MemoryRecord): memory = USearchMemoryStore() - await memory.create_collection_async( - "test_collection", ndim=memory_record1.embedding.shape[0] - ) + await memory.create_collection("test_collection", ndim=memory_record1.embedding.shape[0]) - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) - result = await memory.get_batch_async( - "test_collection", ["test_id1", "test_id2"], True - ) + result = await memory.get_batch("test_collection", ["test_id1", "test_id2"], True) assert len(result) == 2 compare_memory_records(result[0], memory_record1, True) @@ -220,54 +204,40 @@ async def test_upsert_and_get_batch_async( @pytest.mark.asyncio -async def test_remove_async(memory_record1): +async def test_remove(memory_record1): memory = USearchMemoryStore() - await memory.create_collection_async( - "test_collection", ndim=memory_record1.embedding.shape[0] - ) + await memory.create_collection("test_collection", ndim=memory_record1.embedding.shape[0]) - await memory.upsert_async("test_collection", memory_record1) - await memory.remove_async("test_collection", "test_id1") + await memory.upsert("test_collection", memory_record1) + await memory.remove("test_collection", "test_id1") - # memory.get_async should raise Exception if record is not found + # memory.get should raise Exception if record is not found with pytest.raises(KeyError): - await memory.get_async("test_collection", "test_id1", True) + await memory.get("test_collection", "test_id1", True) @pytest.mark.asyncio -async def test_remove_batch_async( - memory_record1: MemoryRecord, memory_record2: MemoryRecord -): +async def test_remove_batch(memory_record1: MemoryRecord, memory_record2: MemoryRecord): memory = USearchMemoryStore() - await memory.create_collection_async( - "test_collection", ndim=memory_record1.embedding.shape[0] - ) + await memory.create_collection("test_collection", ndim=memory_record1.embedding.shape[0]) - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) - await memory.remove_batch_async("test_collection", ["test_id1", "test_id2"]) + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + await memory.remove_batch("test_collection", ["test_id1", "test_id2"]) - result = await memory.get_batch_async( - "test_collection", ["test_id1", "test_id2"], True - ) + result = await memory.get_batch("test_collection", ["test_id1", "test_id2"], True) assert len(result) == 0 @pytest.mark.asyncio -async def test_get_nearest_match_async( - memory_record1: MemoryRecord, memory_record2: MemoryRecord -): +async def test_get_nearest_match(memory_record1: MemoryRecord, memory_record2: MemoryRecord): memory = USearchMemoryStore() collection_name = "test_collection" - await memory.create_collection_async( - collection_name, ndim=memory_record1.embedding.shape[0], metric="cos" - ) + await memory.create_collection(collection_name, ndim=memory_record1.embedding.shape[0], metric="cos") - await memory.upsert_batch_async(collection_name, [memory_record1, memory_record2]) + await memory.upsert_batch(collection_name, [memory_record1, memory_record2]) - result = await memory.get_nearest_match_async( - collection_name, np.array([0.5, 0.5]), exact=True - ) + result = await memory.get_nearest_match(collection_name, np.array([0.5, 0.5]), exact=True) assert len(result) == 2 assert isinstance(result[0], MemoryRecord) @@ -275,21 +245,15 @@ async def test_get_nearest_match_async( @pytest.mark.asyncio -async def test_get_nearest_matches_async( - memory_record1: MemoryRecord, memory_record2: MemoryRecord -): +async def test_get_nearest_matches(memory_record1: MemoryRecord, memory_record2: MemoryRecord): memory = USearchMemoryStore() collection_name = "test_collection" - await memory.create_collection_async( - collection_name, ndim=memory_record1.embedding.shape[0], metric="cos" - ) + await memory.create_collection(collection_name, ndim=memory_record1.embedding.shape[0], metric="cos") - await memory.upsert_batch_async(collection_name, [memory_record1, memory_record2]) + await memory.upsert_batch(collection_name, [memory_record1, memory_record2]) - results = await memory.get_nearest_matches_async( - collection_name, np.array([0.5, 0.5]), limit=2, exact=True - ) + results = await memory.get_nearest_matches(collection_name, np.array([0.5, 0.5]), limit=2, exact=True) assert len(results) == 2 assert isinstance(results[0][0], MemoryRecord) @@ -298,24 +262,16 @@ async def test_get_nearest_matches_async( @pytest.mark.asyncio -async def test_create_and_save_collection_async( - tmpdir, memory_record1, memory_record2, memory_record3 -): +async def test_create_and_save_collection(tmpdir, memory_record1, memory_record2, memory_record3): memory = USearchMemoryStore(tmpdir) - await memory.create_collection_async("test_collection1", ndim=2) - await memory.create_collection_async("test_collection2", ndim=2) - await memory.create_collection_async("test_collection3", ndim=2) - await memory.upsert_batch_async( - "test_collection1", [memory_record1, memory_record2] - ) - await memory.upsert_batch_async( - "test_collection2", [memory_record2, memory_record3] - ) - await memory.upsert_batch_async( - "test_collection3", [memory_record1, memory_record3] - ) - await memory.close_async() + await memory.create_collection("test_collection1", ndim=2) + await memory.create_collection("test_collection2", ndim=2) + await memory.create_collection("test_collection3", ndim=2) + await memory.upsert_batch("test_collection1", [memory_record1, memory_record2]) + await memory.upsert_batch("test_collection2", [memory_record2, memory_record3]) + await memory.upsert_batch("test_collection3", [memory_record1, memory_record3]) + await memory.close() assert (tmpdir / "test_collection1.parquet").exists() assert (tmpdir / "test_collection1.usearch").exists() @@ -325,65 +281,59 @@ async def test_create_and_save_collection_async( assert (tmpdir / "test_collection3.usearch").exists() memory = USearchMemoryStore(tmpdir) - result = await memory.get_collections_async() + result = await memory.get_collections() assert len(result) == 3 assert set(result) == {"test_collection1", "test_collection2", "test_collection3"} - await memory.delete_collection_async("test_collection1") - await memory.delete_collection_async("test_collection3") - await memory.close_async() + await memory.delete_collection("test_collection1") + await memory.delete_collection("test_collection3") + await memory.close() memory = USearchMemoryStore(tmpdir) - result = await memory.get_collections_async() + result = await memory.get_collections() assert len(result) == 1 assert set(result) == {"test_collection2"} - await memory.delete_collection_async("test_collection2") - await memory.close_async() + await memory.delete_collection("test_collection2") + await memory.close() memory = USearchMemoryStore(tmpdir) - result = await memory.get_collections_async() + result = await memory.get_collections() assert len(result) == 0 @pytest.mark.asyncio -async def test_upsert_and_get_async_with_embedding_with_persist( +async def test_upsert_and_get_with_embedding_with_persist( tmpdir, memory_record1: MemoryRecord, memory_record1_with_collision: MemoryRecord ): memory = USearchMemoryStore(tmpdir) - assert len(await memory.get_collections_async()) == 0 - await memory.create_collection_async("test_collection", ndim=2) - await memory.upsert_async("test_collection", memory_record1) - await memory.close_async() + assert len(await memory.get_collections()) == 0 + await memory.create_collection("test_collection", ndim=2) + await memory.upsert("test_collection", memory_record1) + await memory.close() memory = USearchMemoryStore(tmpdir) - assert len(await memory.get_collections_async()) == 1 - result = await memory.get_async("test_collection", "test_id1", True) + assert len(await memory.get_collections()) == 1 + result = await memory.get("test_collection", "test_id1", True) compare_memory_records(result, memory_record1, True) - await memory.upsert_async("test_collection", memory_record1_with_collision) - result = await memory.get_async("test_collection", "test_id1", True) + await memory.upsert("test_collection", memory_record1_with_collision) + result = await memory.get("test_collection", "test_id1", True) compare_memory_records(result, memory_record1_with_collision, True) - await memory.close_async() + await memory.close() memory = USearchMemoryStore(tmpdir) - assert len(await memory.get_collections_async()) == 1 - result = await memory.get_async("test_collection", "test_id1", True) + assert len(await memory.get_collections()) == 1 + result = await memory.get("test_collection", "test_id1", True) compare_memory_records(result, memory_record1_with_collision, True) @pytest.mark.asyncio -async def test_remove_get_async( - memory_record1: MemoryRecord, memory_record2: MemoryRecord -): +async def test_remove_get(memory_record1: MemoryRecord, memory_record2: MemoryRecord): memory = USearchMemoryStore() - await memory.create_collection_async( - "test_collection", ndim=memory_record1.embedding.shape[0] - ) + await memory.create_collection("test_collection", ndim=memory_record1.embedding.shape[0]) - await memory.upsert_batch_async("test_collection", [memory_record1, memory_record2]) - await memory.remove_async("test_collection", "test_id1") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + await memory.remove("test_collection", "test_id1") - result = await memory.get_batch_async( - "test_collection", ["test_id1", "test_id2"], True - ) + result = await memory.get_batch("test_collection", ["test_id1", "test_id2"], True) assert len(result) == 1 compare_memory_records(result[0], memory_record2, True) diff --git a/python/tests/integration/connectors/memory/test_weaviate_memory_store.py b/python/tests/integration/connectors/memory/test_weaviate_memory_store.py index cf7d2e7cc821..84b884dc0e8c 100644 --- a/python/tests/integration/connectors/memory/test_weaviate_memory_store.py +++ b/python/tests/integration/connectors/memory/test_weaviate_memory_store.py @@ -98,21 +98,19 @@ def memory_store(): @pytest.fixture def memory_store_with_empty_collection(memory_store, event_loop): collection_name = "MindRepository" - event_loop.run_until_complete(memory_store.create_collection_async(collection_name)) + event_loop.run_until_complete(memory_store.create_collection(collection_name)) return collection_name, memory_store @pytest.fixture def memory_store_with_collection(memory_store, event_loop, documents): collection_name = "BigMemory" - event_loop.run_until_complete(memory_store.create_collection_async(collection_name)) + event_loop.run_until_complete(memory_store.create_collection(collection_name)) keys = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon"] for document, key in zip(documents, keys): document._key = key - event_loop.run_until_complete( - memory_store.upsert_async(collection_name, document) - ) + event_loop.run_until_complete(memory_store.upsert(collection_name, document)) return collection_name, memory_store @@ -127,7 +125,7 @@ def test_embedded_weaviate(): @pytest.mark.asyncio async def test_create_collection(memory_store): collection_name = "MemoryVault" - await memory_store.create_collection_async(collection_name) + await memory_store.create_collection(collection_name) assert memory_store.client.schema.get(collection_name) @@ -137,9 +135,9 @@ async def test_get_collections(memory_store): collection_names = ["MemoryVault", "ThoughtArchive"] for collection_name in collection_names: - await memory_store.create_collection_async(collection_name) + await memory_store.create_collection(collection_name) - results = await memory_store.get_collections_async() + results = await memory_store.get_collections() assert set(results) == set(collection_names) @@ -151,7 +149,7 @@ async def test_delete_collection(memory_store_with_empty_collection): schemas = memory_store.client.schema.get()["classes"] assert len(schemas) == 1 - await memory_store.delete_collection_async(collection_name) + await memory_store.delete_collection(collection_name) schemas = memory_store.client.schema.get()["classes"] assert len(schemas) == 0 @@ -163,8 +161,8 @@ async def test_collection_exists(memory_store_with_empty_collection): memory_store.client.schema.get()["classes"] - assert await memory_store.does_collection_exist_async(collection_name) - assert not await memory_store.does_collection_exist_async("NotACollection") + assert await memory_store.does_collection_exist(collection_name) + assert not await memory_store.does_collection_exist("NotACollection") @pytest.mark.asyncio @@ -172,11 +170,9 @@ async def test_upsert(memory_store_with_empty_collection, documents): collection_name, memory_store = memory_store_with_empty_collection for doc in documents[:2]: - await memory_store.upsert_async(collection_name, doc) + await memory_store.upsert(collection_name, doc) - total_docs = memory_store.client.data_object.get(class_name=collection_name)[ - "totalResults" - ] + total_docs = memory_store.client.data_object.get(class_name=collection_name)["totalResults"] assert total_docs == 2 @@ -184,11 +180,9 @@ async def test_upsert(memory_store_with_empty_collection, documents): async def test_upsert_batch(memory_store_with_empty_collection, documents): collection_name, memory_store = memory_store_with_empty_collection - await memory_store.upsert_batch_async(collection_name, documents) + await memory_store.upsert_batch(collection_name, documents) - total_docs = memory_store.client.data_object.get(class_name=collection_name)[ - "totalResults" - ] + total_docs = memory_store.client.data_object.get(class_name=collection_name)["totalResults"] assert total_docs == len(documents) @@ -199,21 +193,15 @@ async def test_get(memory_store_with_collection, documents): key = "Alpha" expected_result = [doc for doc in documents if doc._key == key][0] - actual_result = await memory_store.get_async( - collection_name, key, with_embedding=True - ) + actual_result = await memory_store.get(collection_name, key, with_embedding=True) npt.assert_equal(expected_result.__dict__, actual_result.__dict__) - actual_result = await memory_store.get_async( - collection_name, key, with_embedding=False - ) + actual_result = await memory_store.get(collection_name, key, with_embedding=False) expected_result.__dict__["_embedding"] = None npt.assert_equal(expected_result.__dict__, actual_result.__dict__) key = "NotInCollection" - actual_result = await memory_store.get_async( - collection_name, key, with_embedding=True - ) + actual_result = await memory_store.get(collection_name, key, with_embedding=True) assert actual_result is None @@ -226,16 +214,12 @@ async def test_get_batch(memory_store_with_collection, documents): expected_results = [doc for doc in documents if doc._key in keys] - actual_results = await memory_store.get_batch_async( - collection_name, keys, with_embedding=True - ) + actual_results = await memory_store.get_batch(collection_name, keys, with_embedding=True) for expected, actual in zip(expected_results, actual_results): npt.assert_equal(expected.__dict__, actual.__dict__) - actual_results = await memory_store.get_batch_async( - collection_name, keys, with_embedding=False - ) + actual_results = await memory_store.get_batch(collection_name, keys, with_embedding=False) for expected, actual in zip(expected_results, actual_results): expected.__dict__["_embedding"] = None @@ -248,11 +232,9 @@ async def test_remove_batch(memory_store_with_collection, documents): keys = ["Alpha", "Beta", "Gamma"] - await memory_store.remove_batch_async(collection_name, keys) + await memory_store.remove_batch(collection_name, keys) - remaining_docs = memory_store.client.data_object.get(class_name=collection_name)[ - "totalResults" - ] + remaining_docs = memory_store.client.data_object.get(class_name=collection_name)["totalResults"] assert remaining_docs == len(documents) - len(keys) @@ -262,11 +244,9 @@ async def test_remove(memory_store_with_collection, documents): key = "Alpha" - await memory_store.remove_async(collection_name, key) + await memory_store.remove(collection_name, key) - remaining_docs = memory_store.client.data_object.get(class_name=collection_name)[ - "totalResults" - ] + remaining_docs = memory_store.client.data_object.get(class_name=collection_name)["totalResults"] assert remaining_docs == len(documents) - 1 @@ -279,7 +259,7 @@ async def test_get_nearest_matches(memory_store_with_collection, documents): limit = 4 expected_result = [documents[3], documents[4]] - actual_result = await memory_store.get_nearest_matches_async( + actual_result = await memory_store.get_nearest_matches( collection_name, search_query, limit, min_relevance_score, with_embeddings=True ) actual_docss, _ = list(zip(*actual_result)) @@ -288,7 +268,7 @@ async def test_get_nearest_matches(memory_store_with_collection, documents): for expected, actual in zip(expected_result, actual_docss): npt.assert_equal(expected.__dict__, actual.__dict__) - actual_result = await memory_store.get_nearest_matches_async( + actual_result = await memory_store.get_nearest_matches( collection_name, search_query, limit, min_relevance_score, with_embeddings=False ) actual_docss, _ = list(zip(*actual_result)) @@ -307,13 +287,13 @@ async def test_get_nearest_match(memory_store_with_collection, documents): min_relevance_score = 0.9 expected_result = documents[3] - actual_result = await memory_store.get_nearest_match_async( + actual_result = await memory_store.get_nearest_match( collection_name, search_query, min_relevance_score, with_embedding=True ) npt.assert_equal(expected_result.__dict__, actual_result[0].__dict__) - actual_result = await memory_store.get_nearest_match_async( + actual_result = await memory_store.get_nearest_match( collection_name, search_query, min_relevance_score, with_embedding=False ) diff --git a/python/tests/integration/embeddings/test_azure_oai_embedding_service.py b/python/tests/integration/embeddings/test_azure_oai_embedding_service.py index 79851f65907a..fd53a0e17398 100644 --- a/python/tests/integration/embeddings/test_azure_oai_embedding_service.py +++ b/python/tests/integration/embeddings/test_azure_oai_embedding_service.py @@ -3,6 +3,7 @@ import os import pytest +from openai import AsyncAzureOpenAI import semantic_kernel as sk import semantic_kernel.connectors.ai.open_ai as sk_oai @@ -20,14 +21,54 @@ async def test_azure_text_embedding_service(create_kernel, get_aoai_config): deployment_name = "text-embedding-ada-002" kernel.add_text_embedding_generation_service( - "aoai-ada", sk_oai.AzureTextEmbedding(deployment_name, endpoint, api_key) + "aoai-ada", + sk_oai.AzureTextEmbedding( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + ), ) kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) - await kernel.memory.save_information_async( - "test", id="info1", text="this is a test" + await kernel.memory.save_information("test", id="info1", text="this is a test") + await kernel.memory.save_reference( + "test", + external_id="info1", + text="this is a test", + external_source_name="external source", + ) + + +@pytest.mark.asyncio +async def test_azure_text_embedding_service_with_provided_client(create_kernel, get_aoai_config): + kernel = create_kernel + + _, api_key, endpoint = get_aoai_config + + if "Python_Integration_Tests" in os.environ: + deployment_name = os.environ["AzureOpenAIEmbeddings__DeploymentName"] + else: + deployment_name = "text-embedding-ada-002" + + client = AsyncAzureOpenAI( + azure_endpoint=endpoint, + azure_deployment=deployment_name, + api_key=api_key, + api_version="2023-05-15", + default_headers={"Test-User-X-ID": "test"}, + ) + + kernel.add_text_embedding_generation_service( + "aoai-ada-2", + sk_oai.AzureTextEmbedding( + deployment_name=deployment_name, + async_client=client, + ), ) - await kernel.memory.save_reference_async( + kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) + + await kernel.memory.save_information("test", id="info1", text="this is a test") + await kernel.memory.save_reference( "test", external_id="info1", text="this is a test", @@ -46,10 +87,12 @@ async def test_batch_azure_embeddings(get_aoai_config): else: deployment_name = "ada-002" - embeddings_service = sk_oai.AzureTextEmbedding(deployment_name, endpoint, api_key) - texts = ["hello world", "goodbye world"] - results = await embeddings_service.generate_embeddings_async(texts) - batch_results = await embeddings_service.generate_embeddings_async( - texts, batch_size=1 + embeddings_service = sk_oai.AzureTextEmbedding( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, ) + texts = ["hello world", "goodbye world"] + results = await embeddings_service.generate_embeddings(texts) + batch_results = await embeddings_service.generate_embeddings(texts, batch_size=1) assert len(results) == len(batch_results) diff --git a/python/tests/integration/embeddings/test_gp_embedding_service.py b/python/tests/integration/embeddings/test_gp_embedding_service.py index 7e851ab3a6e3..398d2bd2d499 100644 --- a/python/tests/integration/embeddings/test_gp_embedding_service.py +++ b/python/tests/integration/embeddings/test_gp_embedding_service.py @@ -11,9 +11,7 @@ import semantic_kernel.connectors.ai.google_palm as sk_gp pytestmark = [ - pytest.mark.skipif( - sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater" - ), + pytest.mark.skipif(sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater"), pytest.mark.skipif( "Python_Integration_Tests" in os.environ, reason="Google Palm integration tests are only set up to run locally", @@ -27,16 +25,12 @@ async def test_gp_embedding_service(create_kernel, get_gp_config): api_key = get_gp_config - palm_text_embed = sk_gp.GooglePalmTextEmbedding( - "models/embedding-gecko-001", api_key - ) + palm_text_embed = sk_gp.GooglePalmTextEmbedding("models/embedding-gecko-001", api_key) kernel.add_text_embedding_generation_service("gecko", palm_text_embed) kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) - await kernel.memory.save_information_async( - "test", id="info1", text="this is a test" - ) - await kernel.memory.save_reference_async( + await kernel.memory.save_information("test", id="info1", text="this is a test") + await kernel.memory.save_reference( "test", external_id="info1", text="this is a test", diff --git a/python/tests/integration/embeddings/test_hf_embedding_service.py b/python/tests/integration/embeddings/test_hf_embedding_service.py index 7e89213e279e..fb59249651c0 100644 --- a/python/tests/integration/embeddings/test_hf_embedding_service.py +++ b/python/tests/integration/embeddings/test_hf_embedding_service.py @@ -13,32 +13,20 @@ async def test_hf_embeddings_with_memories(): # Configure LLM service kernel.add_text_embedding_generation_service( "sentence-transformers/all-MiniLM-L6-v2", - sk_hf.HuggingFaceTextEmbedding("sentence-transformers/all-MiniLM-L6-v2"), + sk_hf.HuggingFaceTextEmbedding(ai_model_id="sentence-transformers/all-MiniLM-L6-v2"), ) kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) # Add some documents to the semantic memory - await kernel.memory.save_information_async( - "test", id="info1", text="Sharks are fish." - ) - await kernel.memory.save_information_async( - "test", id="info2", text="Whales are mammals." - ) - await kernel.memory.save_information_async( - "test", id="info3", text="Penguins are birds." - ) - await kernel.memory.save_information_async( - "test", id="info4", text="Dolphins are mammals." - ) - await kernel.memory.save_information_async( - "test", id="info5", text="Flies are insects." - ) + await kernel.memory.save_information("test", id="info1", text="Sharks are fish.") + await kernel.memory.save_information("test", id="info2", text="Whales are mammals.") + await kernel.memory.save_information("test", id="info3", text="Penguins are birds.") + await kernel.memory.save_information("test", id="info4", text="Dolphins are mammals.") + await kernel.memory.save_information("test", id="info5", text="Flies are insects.") # Search for documents query = "What are mammals?" - result = await kernel.memory.search_async( - "test", query, limit=2, min_relevance_score=0.0 - ) + result = await kernel.memory.search("test", query, limit=2, min_relevance_score=0.0) print(f"Query: {query}") print(f"\tAnswer 1: {result[0].text}") print(f"\tAnswer 2: {result[1].text}\n") @@ -46,25 +34,19 @@ async def test_hf_embeddings_with_memories(): assert "mammals." in result[1].text query = "What are fish?" - result = await kernel.memory.search_async( - "test", query, limit=1, min_relevance_score=0.0 - ) + result = await kernel.memory.search("test", query, limit=1, min_relevance_score=0.0) print(f"Query: {query}") print(f"\tAnswer: {result[0].text}\n") assert result[0].text == "Sharks are fish." query = "What are insects?" - result = await kernel.memory.search_async( - "test", query, limit=1, min_relevance_score=0.0 - ) + result = await kernel.memory.search("test", query, limit=1, min_relevance_score=0.0) print(f"Query: {query}") print(f"\tAnswer: {result[0].text}\n") assert result[0].text == "Flies are insects." query = "What are birds?" - result = await kernel.memory.search_async( - "test", query, limit=1, min_relevance_score=0.0 - ) + result = await kernel.memory.search("test", query, limit=1, min_relevance_score=0.0) print(f"Query: {query}") print(f"\tAnswer: {result[0].text}\n") assert result[0].text == "Penguins are birds." diff --git a/python/tests/integration/embeddings/test_oai_embedding_service.py b/python/tests/integration/embeddings/test_oai_embedding_service.py index aebd95cecb54..947137c3908a 100644 --- a/python/tests/integration/embeddings/test_oai_embedding_service.py +++ b/python/tests/integration/embeddings/test_oai_embedding_service.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import pytest +from openai import AsyncOpenAI import semantic_kernel as sk import semantic_kernel.connectors.ai.open_ai as sk_oai @@ -18,10 +19,37 @@ async def test_oai_embedding_service(create_kernel, get_oai_config): ) kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) - await kernel.memory.save_information_async( - "test", id="info1", text="this is a test" + await kernel.memory.save_information("test", id="info1", text="this is a test") + await kernel.memory.save_reference( + "test", + external_id="info1", + text="this is a test", + external_source_name="external source", ) - await kernel.memory.save_reference_async( + + +@pytest.mark.asyncio +async def test_oai_embedding_service_with_provided_client(create_kernel, get_oai_config): + kernel = create_kernel + + api_key, org_id = get_oai_config + + client = AsyncOpenAI( + api_key=api_key, + organization=org_id, + ) + + kernel.add_text_embedding_generation_service( + "oai-ada-2", + sk_oai.OpenAITextEmbedding( + ai_model_id="text-embedding-ada-002", + async_client=client, + ), + ) + kernel.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) + + await kernel.memory.save_information("test", id="info1", text="this is a test") + await kernel.memory.save_reference( "test", external_id="info1", text="this is a test", diff --git a/python/tests/integration/fakes/email_plugin_fake.py b/python/tests/integration/fakes/email_plugin_fake.py new file mode 100644 index 000000000000..0ad4d0bd0d63 --- /dev/null +++ b/python/tests/integration/fakes/email_plugin_fake.py @@ -0,0 +1,25 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function + + +class EmailPluginFake: + @kernel_function( + description="Given an email address and message body, send an email", + name="SendEmail", + ) + def send_email(self, input: str) -> str: + return f"Sent email to: . Body: {input}" + + @kernel_function( + description="Lookup an email address for a person given a name", + name="GetEmailAddress", + ) + def get_email_address(self, input: str) -> str: + if input == "": + return "johndoe1234@example.com" + return f"{input}@example.com" + + @kernel_function(description="Write a short poem for an e-mail", name="WritePoem") + def write_poem(self, input: str) -> str: + return f"Roses are red, violets are blue, {input} is hard, so is this test." diff --git a/python/tests/integration/fakes/email_skill_fake.py b/python/tests/integration/fakes/email_skill_fake.py deleted file mode 100644 index 02ac2dc9a0bc..000000000000 --- a/python/tests/integration/fakes/email_skill_fake.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.skill_definition.sk_function_decorator import sk_function - - -class EmailSkillFake: - @sk_function( - description="Given an email address and message body, send an email", - name="SendEmail", - ) - def send_email(self, input: str) -> str: - return f"Sent email to: . Body: {input}" - - @sk_function( - description="Lookup an email address for a person given a name", - name="GetEmailAddress", - ) - def get_email_address(self, input: str) -> str: - if input == "": - return "johndoe1234@example.com" - return f"{input}@example.com" - - @sk_function(description="Write a short poem for an e-mail", name="WritePoem") - def write_poem(self, input: str) -> str: - return f"Roses are red, violets are blue, {input} is hard, so is this test." diff --git a/python/tests/integration/fakes/fun_plugin_fake.py b/python/tests/integration/fakes/fun_plugin_fake.py new file mode 100644 index 000000000000..215861c7e0da --- /dev/null +++ b/python/tests/integration/fakes/fun_plugin_fake.py @@ -0,0 +1,15 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function + + +# TODO: this fake plugin is temporal usage. +# C# supports import plugin from samples dir by using test helper and python should do the same +# `semantic-kernel/dotnet/src/IntegrationTests/TestHelpers.cs` +class FunPluginFake: + @kernel_function( + description="Write a joke", + name="WriteJoke", + ) + def write_joke(self) -> str: + return "WriteJoke" diff --git a/python/tests/integration/fakes/fun_skill_fake.py b/python/tests/integration/fakes/fun_skill_fake.py deleted file mode 100644 index 034c4a0b2923..000000000000 --- a/python/tests/integration/fakes/fun_skill_fake.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.skill_definition.sk_function_decorator import sk_function - - -# TODO: this fake skill is temporal usage. -# C# supports import skill from samples dir by using test helper and python should do the same -# `semantic-kernel/dotnet/src/IntegrationTests/TestHelpers.cs` -class FunSkillFake: - @sk_function( - description="Write a joke", - name="WriteJoke", - ) - def write_joke(self) -> str: - return "WriteJoke" diff --git a/python/tests/integration/fakes/summarize_plugin_fake.py b/python/tests/integration/fakes/summarize_plugin_fake.py new file mode 100644 index 000000000000..8d20c88c4618 --- /dev/null +++ b/python/tests/integration/fakes/summarize_plugin_fake.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function + +# TODO: this fake plugin is temporal usage. +# C# supports import plugin from samples dir by using test helper and python should do the same +# `semantic-kernel/dotnet/src/IntegrationTests/TestHelpers.cs` + + +class SummarizePluginFake: + @kernel_function( + description="Summarize", + name="Summarize", + ) + def translate(self) -> str: + return "Summarize" diff --git a/python/tests/integration/fakes/summarize_skill_fake.py b/python/tests/integration/fakes/summarize_skill_fake.py deleted file mode 100644 index 0d0857a55be4..000000000000 --- a/python/tests/integration/fakes/summarize_skill_fake.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.skill_definition.sk_function_decorator import sk_function - -# TODO: this fake skill is temporal usage. -# C# supports import skill from samples dir by using test helper and python should do the same -# `semantic-kernel/dotnet/src/IntegrationTests/TestHelpers.cs` - - -class SummarizeSkillFake: - @sk_function( - description="Summarize", - name="Summarize", - ) - def translate(self) -> str: - return "Summarize" diff --git a/python/tests/integration/fakes/writer_plugin_fake.py b/python/tests/integration/fakes/writer_plugin_fake.py new file mode 100644 index 000000000000..548e1859db35 --- /dev/null +++ b/python/tests/integration/fakes/writer_plugin_fake.py @@ -0,0 +1,25 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter + +# TODO: this fake plugin is temporal usage. +# C# supports import plugin from samples dir by using test helper and python should do the same +# `semantic-kernel/dotnet/src/IntegrationTests/TestHelpers.cs` + + +class WriterPluginFake: + @kernel_function( + description="Translate", + name="Translate", + ) + def translate(self, language: str) -> str: + return f"Translate: {language}" + + @kernel_function(description="Write an outline for a novel", name="NovelOutline") + @kernel_function_context_parameter( + name="endMarker", + description="The marker to use to end each chapter.", + default_value="", + ) + def write_novel_outline(self, input: str) -> str: + return f"Novel outline: {input}" diff --git a/python/tests/integration/fakes/writer_skill_fake.py b/python/tests/integration/fakes/writer_skill_fake.py deleted file mode 100644 index c19f8bf3c855..000000000000 --- a/python/tests/integration/fakes/writer_skill_fake.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter - -# TODO: this fake skill is temporal usage. -# C# supports import skill from samples dir by using test helper and python should do the same -# `semantic-kernel/dotnet/src/IntegrationTests/TestHelpers.cs` - - -class WriterSkillFake: - @sk_function( - description="Translate", - name="Translate", - ) - def translate(self, language: str) -> str: - return f"Translate: {language}" - - @sk_function(description="Write an outline for a novel", name="NovelOutline") - @sk_function_context_parameter( - name="endMarker", - description="The marker to use to end each chapter.", - default_value="", - ) - def write_novel_outline(self, input: str) -> str: - return f"Novel outline: {input}" diff --git a/python/tests/integration/planning/sequential_planner/test_sequential_plan_parser.py b/python/tests/integration/planning/sequential_planner/test_sequential_plan_parser.py index 9f1c0371f178..6361493d1f26 100644 --- a/python/tests/integration/planning/sequential_planner/test_sequential_plan_parser.py +++ b/python/tests/integration/planning/sequential_planner/test_sequential_plan_parser.py @@ -7,9 +7,9 @@ from semantic_kernel.planning.sequential_planner.sequential_planner_parser import ( SequentialPlanParser, ) -from tests.integration.fakes.email_skill_fake import EmailSkillFake -from tests.integration.fakes.summarize_skill_fake import SummarizeSkillFake -from tests.integration.fakes.writer_skill_fake import WriterSkillFake +from tests.integration.fakes.email_plugin_fake import EmailPluginFake +from tests.integration.fakes.summarize_plugin_fake import SummarizePluginFake +from tests.integration.fakes.writer_plugin_fake import WriterPluginFake @pytest.mark.asyncio @@ -20,16 +20,20 @@ async def test_can_call_to_plan_from_xml(get_aoai_config): # Configure LLM service kernel.add_text_completion_service( "text_completion", - sk_oai.AzureChatCompletion(deployment_name, endpoint, api_key), + sk_oai.AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + ), ) - kernel.import_skill(EmailSkillFake(), "email") - kernel.import_skill(SummarizeSkillFake(), "SummarizeSkill") - kernel.import_skill(WriterSkillFake(), "WriterSkill") + kernel.import_plugin(EmailPluginFake(), "email") + kernel.import_plugin(SummarizePluginFake(), "SummarizePlugin") + kernel.import_plugin(WriterPluginFake(), "WriterPlugin") plan_string = """ - - + + @@ -39,34 +43,31 @@ async def test_can_call_to_plan_from_xml(get_aoai_config): plan = SequentialPlanParser.to_plan_from_xml( plan_string, goal, - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) assert plan is not None - assert ( - plan.description - == "Summarize an input, translate to french, and e-mail to John Doe" - ) + assert plan.description == "Summarize an input, translate to french, and e-mail to John Doe" assert len(plan._steps) == 4 step = plan._steps[0] - assert step.skill_name == "SummarizeSkill" + assert step.plugin_name == "SummarizePlugin" assert step.name == "Summarize" step = plan._steps[1] - assert step.skill_name == "WriterSkill" + assert step.plugin_name == "WriterPlugin" assert step.name == "Translate" assert step.parameters["language"] == "French" assert "TRANSLATED_SUMMARY" in step._outputs step = plan._steps[2] - assert step.skill_name == "email" + assert step.plugin_name == "email" assert step.name == "GetEmailAddress" assert step.parameters["input"] == "John Doe" assert "EMAIL_ADDRESS" in step._outputs step = plan._steps[3] - assert step.skill_name == "email" + assert step.plugin_name == "email" assert step.name == "SendEmail" assert step.parameters["input"] == "$TRANSLATED_SUMMARY" assert step.parameters["email_address"] == "$EMAIL_ADDRESS" diff --git a/python/tests/integration/planning/sequential_planner/test_sequential_planner.py b/python/tests/integration/planning/sequential_planner/test_sequential_planner.py index 36f9be9e6156..2d3fdac632a2 100644 --- a/python/tests/integration/planning/sequential_planner/test_sequential_planner.py +++ b/python/tests/integration/planning/sequential_planner/test_sequential_planner.py @@ -11,9 +11,9 @@ from semantic_kernel.planning.sequential_planner.sequential_planner_config import ( SequentialPlannerConfig, ) -from tests.integration.fakes.email_skill_fake import EmailSkillFake -from tests.integration.fakes.fun_skill_fake import FunSkillFake -from tests.integration.fakes.writer_skill_fake import WriterSkillFake +from tests.integration.fakes.email_plugin_fake import EmailPluginFake +from tests.integration.fakes.fun_plugin_fake import FunPluginFake +from tests.integration.fakes.writer_plugin_fake import WriterPluginFake async def retry(func, retries=3): @@ -36,67 +36,74 @@ def initialize_kernel(get_aoai_config, use_embeddings=False, use_chat_model=Fals if use_chat_model: kernel.add_chat_service( "chat_completion", - sk_oai.AzureChatCompletion("gpt-35-turbo", endpoint, api_key), + sk_oai.AzureChatCompletion( + deployment_name="gpt-35-turbo", + endpoint=endpoint, + api_key=api_key, + ), ) else: kernel.add_text_completion_service( "text_completion", - sk_oai.AzureChatCompletion("gpt-35-turbo", endpoint, api_key), + sk_oai.AzureChatCompletion( + deployment_name="gpt-35-turbo", + endpoint=endpoint, + api_key=api_key, + ), ) if use_embeddings: kernel.add_text_embedding_generation_service( "text_embedding", - sk_oai.AzureTextEmbedding("text-embedding-ada-002", endpoint, api_key), + sk_oai.AzureTextEmbedding( + deployment_name="text-embedding-ada-002", + endpoint=endpoint, + api_key=api_key, + ), ) return kernel @pytest.mark.parametrize( - "use_chat_model, prompt, expected_function, expected_skill", + "use_chat_model, prompt, expected_function, expected_plugin", [ ( False, "Write a joke and send it in an e-mail to Kai.", "SendEmail", - "_GLOBAL_FUNCTIONS_", + "email_plugin_fake", ), ( True, "Write a joke and send it in an e-mail to Kai.", "SendEmail", - "_GLOBAL_FUNCTIONS_", + "email_plugin_fake", ), ], ) @pytest.mark.asyncio -async def test_create_plan_function_flow_async( - get_aoai_config, use_chat_model, prompt, expected_function, expected_skill -): +async def test_create_plan_function_flow(get_aoai_config, use_chat_model, prompt, expected_function, expected_plugin): # Arrange kernel = initialize_kernel(get_aoai_config, False, use_chat_model) - kernel.import_skill(EmailSkillFake()) - kernel.import_skill(FunSkillFake()) + kernel.import_plugin(EmailPluginFake(), "email_plugin_fake") + kernel.import_plugin(FunPluginFake(), "fun_plugin_fake") planner = SequentialPlanner(kernel) # Act - plan = await planner.create_plan_async(prompt) + plan = await planner.create_plan(prompt) # Assert - assert any( - step.name == expected_function and step.skill_name == expected_skill - for step in plan._steps - ) + assert any(step.name == expected_function and step.plugin_name == expected_plugin for step in plan._steps) @pytest.mark.parametrize( - "prompt, expected_function, expected_skill, expected_default", + "prompt, expected_function, expected_plugin, expected_default", [ ( "Write a novel outline.", "NovelOutline", - "WriterSkill", + "WriterPlugin", "", ) ], @@ -106,35 +113,33 @@ async def test_create_plan_function_flow_async( raises=semantic_kernel.planning.planning_exception.PlanningException, reason="Test is known to occasionally produce unexpected results.", ) -async def test_create_plan_with_defaults_async( - get_aoai_config, prompt, expected_function, expected_skill, expected_default -): +async def test_create_plan_with_defaults(get_aoai_config, prompt, expected_function, expected_plugin, expected_default): # Arrange kernel = initialize_kernel(get_aoai_config) - kernel.import_skill(EmailSkillFake()) - kernel.import_skill(WriterSkillFake(), "WriterSkill") + kernel.import_plugin(EmailPluginFake(), "email_plugin_fake") + kernel.import_plugin(WriterPluginFake(), "WriterPlugin") planner = SequentialPlanner(kernel) # Act - plan = await retry(lambda: planner.create_plan_async(prompt)) + plan = await retry(lambda: planner.create_plan(prompt)) # Assert assert any( step.name == expected_function - and step.skill_name == expected_skill + and step.plugin_name == expected_plugin and step.parameters["endMarker"] == expected_default for step in plan._steps ) @pytest.mark.parametrize( - "prompt, expected_function, expected_skill", + "prompt, expected_function, expected_plugin", [ ( "Write a poem or joke and send it in an e-mail to Kai.", "SendEmail", - "_GLOBAL_FUNCTIONS_", + "email_plugin_fake", ) ], ) @@ -143,14 +148,12 @@ async def test_create_plan_with_defaults_async( raises=semantic_kernel.planning.planning_exception.PlanningException, reason="Test is known to occasionally produce unexpected results.", ) -async def test_create_plan_goal_relevant_async( - get_aoai_config, prompt, expected_function, expected_skill -): +async def test_create_plan_goal_relevant(get_aoai_config, prompt, expected_function, expected_plugin): # Arrange kernel = initialize_kernel(get_aoai_config, use_embeddings=True) - kernel.import_skill(EmailSkillFake()) - kernel.import_skill(FunSkillFake()) - kernel.import_skill(WriterSkillFake()) + kernel.import_plugin(EmailPluginFake(), "email_plugin_fake") + kernel.import_plugin(FunPluginFake(), "fun_plugin_fake") + kernel.import_plugin(WriterPluginFake(), "writer_plugin_fake") planner = SequentialPlanner( kernel, @@ -158,10 +161,7 @@ async def test_create_plan_goal_relevant_async( ) # Act - plan = await retry(lambda: planner.create_plan_async(prompt)) + plan = await retry(lambda: planner.create_plan(prompt)) # Assert - assert any( - step.name == expected_function and step.skill_name == expected_skill - for step in plan._steps - ) + assert any(step.name == expected_function and step.plugin_name == expected_plugin for step in plan._steps) diff --git a/python/tests/integration/planning/stepwise_planner/test_stepwise_planner.py b/python/tests/integration/planning/stepwise_planner/test_stepwise_planner.py index 0e48e2dc3ce8..18c4d4e0245b 100644 --- a/python/tests/integration/planning/stepwise_planner/test_stepwise_planner.py +++ b/python/tests/integration/planning/stepwise_planner/test_stepwise_planner.py @@ -8,43 +8,41 @@ import semantic_kernel as sk import semantic_kernel.connectors.ai.open_ai as sk_oai from semantic_kernel.connectors.search_engine import BingConnector -from semantic_kernel.core_skills.math_skill import MathSkill -from semantic_kernel.core_skills.time_skill import TimeSkill +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.time_plugin import TimePlugin from semantic_kernel.kernel import Kernel -from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.kernel_context import KernelContext from semantic_kernel.planning import StepwisePlanner from semantic_kernel.planning.stepwise_planner.stepwise_planner_config import ( StepwisePlannerConfig, ) -from semantic_kernel.skill_definition import sk_function, sk_function_context_parameter +from semantic_kernel.plugin_definition import kernel_function, kernel_function_context_parameter -class TempWebSearchEngineSkill: +class TempWebSearchEnginePlugin: """ - TODO: replace this class with semantic_kernel.core_skills.web_search_engine_skill.WebSearchEngineSkill + TODO: replace this class with semantic_kernel.core_plugins.web_search_engine_plugin.WebSearchEnginePlugin - SKFunction.describe() does not contains info for arguments. + KernelFunction.describe() does not contains info for arguments. so that `query: str` is not shown in the function description, BUT this argument must be passed to planner to work appropriately. This function temporarily add `query` as parameter by using @sk_function_context_parameter. - original file is here: semantic-kernel/python/semantic_kernel/core_skills/web_search_engine_skill.py + original file is here: semantic-kernel/python/semantic_kernel/core_plugins/web_search_engine_plugin.py """ def __init__(self, connector) -> None: self._connector = connector - @sk_function( - description="Performs a web search for a given query", name="searchAsync" - ) - @sk_function_context_parameter( + @kernel_function(description="Performs a web search for a given query", name="searchAsync") + @kernel_function_context_parameter( name="query", description="The search query", ) - async def search_async(self, query: str, context: SKContext) -> str: + async def search(self, query: str, context: KernelContext) -> str: query = query or context.variables.get("query") - result = await self._connector.search_async(query, num_results=5, offset=0) + result = await self._connector.search(query, num_results=5, offset=0) return str(result) @@ -66,24 +64,32 @@ def initialize_kernel(get_aoai_config, use_embeddings=False, use_chat_model=Fals if use_chat_model: kernel.add_chat_service( "chat_completion", - sk_oai.AzureChatCompletion("gpt-35-turbo", endpoint, api_key), + sk_oai.AzureChatCompletion(deployment_name="gpt-35-turbo", endpoint=endpoint, api_key=api_key), ) else: kernel.add_text_completion_service( "text_completion", - sk_oai.AzureChatCompletion("gpt-35-turbo", endpoint, api_key), + sk_oai.AzureChatCompletion( + deployment_name="gpt-35-turbo", + endpoint=endpoint, + api_key=api_key, + ), ) if use_embeddings: kernel.add_text_embedding_generation_service( "text_embedding", - sk_oai.AzureTextEmbedding("text-embedding-ada-002", endpoint, api_key), + sk_oai.AzureTextEmbedding( + deployment_name="text-embedding-ada-002", + endpoint=endpoint, + api_key=api_key, + ), ) return kernel @pytest.mark.parametrize( - "use_chat_model, prompt, expected_function, expected_skill", + "use_chat_model, prompt, expected_function, expected_plugin", [ ( False, @@ -106,28 +112,23 @@ async def test_can_create_stepwise_plan( use_chat_model, prompt, expected_function, - expected_skill, + expected_plugin, ): # Arrange use_embeddings = False kernel = initialize_kernel(get_aoai_config, use_embeddings, use_chat_model) bing_connector = BingConnector(api_key=get_bing_config) - web_search_engine_skill = TempWebSearchEngineSkill(bing_connector) - kernel.import_skill(web_search_engine_skill, "WebSearch") - kernel.import_skill(TimeSkill(), "time") + web_search_engine_plugin = TempWebSearchEnginePlugin(bing_connector) + kernel.import_plugin(web_search_engine_plugin, "WebSearch") + kernel.import_plugin(TimePlugin(), "time") - planner = StepwisePlanner( - kernel, StepwisePlannerConfig(max_iterations=10, min_iteration_time_ms=1000) - ) + planner = StepwisePlanner(kernel, StepwisePlannerConfig(max_iterations=10, min_iteration_time_ms=1000)) # Act plan = planner.create_plan(prompt) # Assert - assert any( - step.name == expected_function and step.skill_name == expected_skill - for step in plan._steps - ) + assert any(step.name == expected_function and step.plugin_name == expected_plugin for step in plan._steps) @pytest.mark.parametrize( @@ -140,6 +141,9 @@ async def test_can_create_stepwise_plan( ], ) @pytest.mark.asyncio +@pytest.mark.xfail( + reason="Test is known to occasionally produce unexpected results.", +) async def test_can_execute_stepwise_plan( get_aoai_config, get_bing_config, @@ -150,18 +154,16 @@ async def test_can_execute_stepwise_plan( use_embeddings = False kernel = initialize_kernel(get_aoai_config, use_embeddings, use_chat_model) bing_connector = BingConnector(api_key=get_bing_config) - web_search_engine_skill = TempWebSearchEngineSkill(bing_connector) - kernel.import_skill(web_search_engine_skill, "WebSearch") - kernel.import_skill(TimeSkill(), "time") - kernel.import_skill(MathSkill(), "math") + web_search_engine_plugin = TempWebSearchEnginePlugin(bing_connector) + kernel.import_plugin(web_search_engine_plugin, "WebSearch") + kernel.import_plugin(TimePlugin(), "time") + kernel.import_plugin(MathPlugin(), "math") - planner = StepwisePlanner( - kernel, StepwisePlannerConfig(max_iterations=10, min_iteration_time_ms=1000) - ) + planner = StepwisePlanner(kernel, StepwisePlannerConfig(max_iterations=10, min_iteration_time_ms=1000)) # Act plan = planner.create_plan(prompt) - result = await plan.invoke_async() + result = await plan.invoke() steps_taken_string = result.variables["steps_taken"] assert steps_taken_string is not None diff --git a/python/tests/template_engine/prompt_template_e2e_tests.py b/python/tests/template_engine/prompt_template_e2e_tests.py index a73534e15bc9..b6efa7ecce10 100644 --- a/python/tests/template_engine/prompt_template_e2e_tests.py +++ b/python/tests/template_engine/prompt_template_e2e_tests.py @@ -6,7 +6,7 @@ from pytest import mark, raises from semantic_kernel import Kernel -from semantic_kernel.skill_definition import sk_function +from semantic_kernel.plugin_definition import kernel_function from semantic_kernel.template_engine.prompt_template_engine import PromptTemplateEngine @@ -33,12 +33,12 @@ def _get_template_language_tests() -> List[Tuple[str, str]]: return test_data -class MySkill: - @sk_function() +class MyPlugin: + @kernel_function() def check123(self, input: str) -> str: return "123 ok" if input == "123" else f"{input} != 123" - @sk_function() + @kernel_function() def asis(self, input: str) -> str: return input @@ -48,7 +48,7 @@ def setup_method(self): self.target = PromptTemplateEngine() @mark.asyncio - async def test_it_supports_variables_async(self): + async def test_it_supports_variables(self): # Arrange input = "template tests" winner = "SK" @@ -60,16 +60,14 @@ async def test_it_supports_variables_async(self): context["winner"] = winner # Act - result = await self.target.render_async(template, context) + result = await self.target.render(template, context) # Assert - expected = template.replace("{{$input}}", input).replace( - "{{ $winner }}", winner - ) + expected = template.replace("{{$input}}", input).replace("{{ $winner }}", winner) assert expected == result @mark.asyncio - async def test_it_supports_values_async(self): + async def test_it_supports_values(self): # Arrange template = "And the winner\n of {{'template\ntests'}} \nis: {{ \"SK\" }}!" expected = "And the winner\n of template\ntests \nis: SK!" @@ -78,86 +76,82 @@ async def test_it_supports_values_async(self): context = kernel.create_new_context() # Act - result = await self.target.render_async(template, context) + result = await self.target.render(template, context) # Assert assert expected == result @mark.asyncio - async def test_it_allows_to_pass_variables_to_functions_async(self): + async def test_it_allows_to_pass_variables_to_functions(self): # Arrange template = "== {{my.check123 $call}} ==" kernel = Kernel() - kernel.import_skill(MySkill(), "my") + kernel.import_plugin(MyPlugin(), "my") context = kernel.create_new_context() context["call"] = "123" # Act - result = await self.target.render_async(template, context) + result = await self.target.render(template, context) # Assert assert "== 123 ok ==" == result @mark.asyncio - async def test_it_allows_to_pass_values_to_functions_async(self): + async def test_it_allows_to_pass_values_to_functions(self): # Arrange template = "== {{my.check123 '234'}} ==" kernel = Kernel() - kernel.import_skill(MySkill(), "my") + kernel.import_plugin(MyPlugin(), "my") context = kernel.create_new_context() # Act - result = await self.target.render_async(template, context) + result = await self.target.render(template, context) # Assert assert "== 234 != 123 ==" == result @mark.asyncio - async def test_it_allows_to_pass_escaped_values1_to_functions_async(self): + async def test_it_allows_to_pass_escaped_values1_to_functions(self): # Arrange template = "== {{my.check123 'a\\'b'}} ==" kernel = Kernel() - kernel.import_skill(MySkill(), "my") + kernel.import_plugin(MyPlugin(), "my") context = kernel.create_new_context() # Act - result = await self.target.render_async(template, context) + result = await self.target.render(template, context) # Assert assert "== a'b != 123 ==" == result @mark.asyncio - async def test_it_allows_to_pass_escaped_values2_to_functions_async(self): + async def test_it_allows_to_pass_escaped_values2_to_functions(self): # Arrange template = '== {{my.check123 "a\\"b"}} ==' kernel = Kernel() - kernel.import_skill(MySkill(), "my") + kernel.import_plugin(MyPlugin(), "my") context = kernel.create_new_context() # Act - result = await self.target.render_async(template, context) + result = await self.target.render(template, context) # Assert assert '== a"b != 123 ==' == result @mark.asyncio - @mark.parametrize( - "template,expected_result", [(t, r) for t, r in _get_template_language_tests()] - ) - async def test_it_handle_edge_cases_async( - self, template: str, expected_result: str - ): + @mark.parametrize("template,expected_result", [(t, r) for t, r in _get_template_language_tests()]) + async def test_it_handle_edge_cases(self, template: str, expected_result: str): # Arrange kernel = Kernel() - kernel.import_skill(MySkill()) + kernel.import_plugin(MyPlugin(), "my_plugin") context = kernel.create_new_context() # Act if expected_result.startswith("ERROR"): with raises(ValueError): - await self.target.render_async(template, context) + await self.target.render(template, context) else: - result = await self.target.render_async(template, context) + result = await self.target.render(template, context) # Assert assert expected_result == result diff --git a/python/tests/test_native_plugins/TestNativePlugin/native_function.py b/python/tests/test_native_plugins/TestNativePlugin/native_function.py new file mode 100644 index 000000000000..0510b52005a6 --- /dev/null +++ b/python/tests/test_native_plugins/TestNativePlugin/native_function.py @@ -0,0 +1,26 @@ +from semantic_kernel.plugin_definition import kernel_function + + +class TestNativeEchoBotPlugin: + """ + Description: Test Native Plugin for testing purposes + """ + + @kernel_function( + description="Echo for input text", + name="echoAsync", + input_description="The text to echo", + ) + async def echo(self, text: str) -> str: + """ + Echo for input text + + Example: + "hello world" => "hello world" + Args: + text -- The text to echo + + Returns: + input text + """ + return text diff --git a/python/tests/test_native_skills/TestNativeSkill/native_function.py b/python/tests/test_native_skills/TestNativeSkill/native_function.py deleted file mode 100644 index 0b690bc6e113..000000000000 --- a/python/tests/test_native_skills/TestNativeSkill/native_function.py +++ /dev/null @@ -1,26 +0,0 @@ -from semantic_kernel.skill_definition import sk_function - - -class TestNativeEchoBotSkill: - """ - Description: Test Native Skill for testing purposes - """ - - @sk_function( - description="Echo for input text", - name="echoAsync", - input_description="The text to echo", - ) - async def echo(self, text: str) -> str: - """ - Echo for input text - - Example: - "hello world" => "hello world" - Args: - text -- The text to echo - - Returns: - input text - """ - return text diff --git a/python/tests/test_skills/TestSkill/TestFunction/config.json b/python/tests/test_plugins/TestPlugin/TestFunction/config.json similarity index 100% rename from python/tests/test_skills/TestSkill/TestFunction/config.json rename to python/tests/test_plugins/TestPlugin/TestFunction/config.json diff --git a/python/tests/test_skills/TestSkill/TestFunction/skprompt.txt b/python/tests/test_plugins/TestPlugin/TestFunction/skprompt.txt similarity index 100% rename from python/tests/test_skills/TestSkill/TestFunction/skprompt.txt rename to python/tests/test_plugins/TestPlugin/TestFunction/skprompt.txt diff --git a/python/tests/unit/ai/google_palm/services/test_palm_chat_completion.py b/python/tests/unit/ai/google_palm/services/test_palm_chat_completion.py index d22f19e2d992..207bbe59437e 100644 --- a/python/tests/unit/ai/google_palm/services/test_palm_chat_completion.py +++ b/python/tests/unit/ai/google_palm/services/test_palm_chat_completion.py @@ -5,78 +5,86 @@ from unittest.mock import MagicMock, patch import pytest - -from semantic_kernel.connectors.ai.chat_request_settings import ( - ChatRequestSettings, -) +from pydantic import ValidationError if sys.version_info >= (3, 9): + from google.generativeai.types import ChatResponse, MessageDict + + from semantic_kernel.connectors.ai.google_palm import ( + GooglePalmChatPromptExecutionSettings, + ) from semantic_kernel.connectors.ai.google_palm.services.gp_chat_completion import ( GooglePalmChatCompletion, ) -pytestmark = pytest.mark.skipif( - sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater" -) +pytestmark = pytest.mark.skipif(sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater") def test_google_palm_chat_completion_init() -> None: - model_id = "test_model_id" + ai_model_id = "test_model_id" api_key = "test_api_key" gp_chat_completion = GooglePalmChatCompletion( - model_id=model_id, + ai_model_id=ai_model_id, api_key=api_key, ) - assert gp_chat_completion._model_id == model_id - assert gp_chat_completion._api_key == api_key + assert gp_chat_completion.ai_model_id == ai_model_id + assert gp_chat_completion.api_key == api_key assert isinstance(gp_chat_completion, GooglePalmChatCompletion) def test_google_palm_chat_completion_init_with_empty_api_key() -> None: - model_id = "test_model_id" + ai_model_id = "test_model_id" # api_key = "test_api_key" - with pytest.raises( - ValueError, match="The Google PaLM API key cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="api_key"): GooglePalmChatCompletion( - model_id=model_id, + ai_model_id=ai_model_id, api_key="", ) @pytest.mark.asyncio -async def test_google_palm_text_completion_complete_chat_async_call_with_parameters() -> None: +async def test_google_palm_text_completion_complete_chat_call_with_parameters() -> None: + class MockChatResponse(ChatResponse): + def last(self): + return "" + + def reply(self): + return self + + gp_response = MockChatResponse() + gp_response.candidates = [MessageDict(content="Example response", author="assistant")] + gp_response.filters = None mock_response = MagicMock() mock_response.last = asyncio.Future() - mock_response.last.set_result("Example response") + mock_response.last.set_result(gp_response) mock_gp = MagicMock() - mock_gp.chat.return_value = mock_response + mock_gp.chat.return_value = gp_response with patch( "semantic_kernel.connectors.ai.google_palm.services.gp_chat_completion.palm", new=mock_gp, ): - model_id = "test_model_id" + ai_model_id = "test_model_id" api_key = "test_api_key" - prompt = [("user", "hello world")] + prompt = [{"role": "user", "content": "hello world"}] + rewritten_prompt = [{"author": "user", "content": "hello world"}] gp_chat_completion = GooglePalmChatCompletion( - model_id=model_id, + ai_model_id=ai_model_id, api_key=api_key, ) - settings = ChatRequestSettings() - response = await gp_chat_completion.complete_chat_async(prompt, settings) - assert isinstance(response.result(), str) and len(response.result()) > 0 + settings = GooglePalmChatPromptExecutionSettings() + response = await gp_chat_completion.complete_chat(prompt, settings) + assert isinstance(response[0].content, str) and len(response) > 0 + print(mock_gp.chat) mock_gp.chat.assert_called_once_with( - model=model_id, - context="", - examples=None, + model=ai_model_id, temperature=settings.temperature, - candidate_count=settings.number_of_responses, top_p=settings.top_p, - prompt=None, - messages=prompt[-1][1], + top_k=settings.top_k, + candidate_count=settings.candidate_count, + messages=rewritten_prompt, ) diff --git a/python/tests/unit/ai/google_palm/services/test_palm_text_completion.py b/python/tests/unit/ai/google_palm/services/test_palm_text_completion.py index 3a9ede666f58..431da1294702 100644 --- a/python/tests/unit/ai/google_palm/services/test_palm_text_completion.py +++ b/python/tests/unit/ai/google_palm/services/test_palm_text_completion.py @@ -1,81 +1,80 @@ # Copyright (c) Microsoft. All rights reserved. -import asyncio import sys from unittest.mock import MagicMock, patch import pytest - -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, -) +from pydantic import ValidationError if sys.version_info >= (3, 9): + from google.generativeai.types import Completion + from google.generativeai.types.text_types import TextCompletion + + from semantic_kernel.connectors.ai.google_palm import ( + GooglePalmTextPromptExecutionSettings, + ) from semantic_kernel.connectors.ai.google_palm.services.gp_text_completion import ( GooglePalmTextCompletion, ) -pytestmark = pytest.mark.skipif( - sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater" -) +pytestmark = pytest.mark.skipif(sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater") def test_google_palm_text_completion_init() -> None: - model_id = "test_model_id" + ai_model_id = "test_model_id" api_key = "test_api_key" # Test successful initialization gp_text_completion = GooglePalmTextCompletion( - model_id=model_id, + ai_model_id=ai_model_id, api_key=api_key, ) - assert gp_text_completion._model_id == model_id - assert gp_text_completion._api_key == api_key + assert gp_text_completion.ai_model_id == ai_model_id + assert gp_text_completion.api_key == api_key assert isinstance(gp_text_completion, GooglePalmTextCompletion) def test_google_palm_text_completion_init_with_empty_api_key() -> None: - model_id = "test_model_id" + ai_model_id = "test_model_id" # api_key = "test_api_key" - with pytest.raises( - ValueError, match="The Google PaLM API key cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="api_key"): GooglePalmTextCompletion( - model_id=model_id, + ai_model_id=ai_model_id, api_key="", ) @pytest.mark.asyncio -async def test_google_palm_text_completion_complete_async_call_with_parameters() -> None: - mock_response = MagicMock() - mock_response.result = asyncio.Future() - mock_response.result.set_result("Example response") +async def test_google_palm_text_completion_complete_call_with_parameters() -> None: + gp_completion = Completion() + gp_completion.candidates = [TextCompletion(output="Example response")] + gp_completion.filters = None + gp_completion.safety_feedback = None mock_gp = MagicMock() - mock_gp.generate_text.return_value = mock_response + mock_gp.generate_text.return_value = gp_completion with patch( "semantic_kernel.connectors.ai.google_palm.services.gp_text_completion.palm", new=mock_gp, ): - model_id = "test_model_id" + ai_model_id = "test_model_id" api_key = "test_api_key" prompt = "hello world" gp_text_completion = GooglePalmTextCompletion( - model_id=model_id, + ai_model_id=ai_model_id, api_key=api_key, ) - settings = CompleteRequestSettings() - response = await gp_text_completion.complete_async(prompt, settings) - assert isinstance(response.result(), str) and len(response.result()) > 0 + settings = GooglePalmTextPromptExecutionSettings() + response = await gp_text_completion.complete(prompt, settings) + assert isinstance(response[0].text, str) and len(response) > 0 mock_gp.generate_text.assert_called_once_with( - model=model_id, + model=ai_model_id, prompt=prompt, temperature=settings.temperature, - max_output_tokens=settings.max_tokens, - stop_sequences=None, - candidate_count=settings.number_of_responses, + max_output_tokens=settings.max_output_tokens, + candidate_count=settings.candidate_count, top_p=settings.top_p, + top_k=settings.top_k, ) diff --git a/python/tests/unit/ai/google_palm/services/test_palm_text_embedding.py b/python/tests/unit/ai/google_palm/services/test_palm_text_embedding.py index add7507acba1..6e9f99df47b8 100644 --- a/python/tests/unit/ai/google_palm/services/test_palm_text_embedding.py +++ b/python/tests/unit/ai/google_palm/services/test_palm_text_embedding.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock, patch import pytest +from pydantic import ValidationError if sys.version_info >= (3, 9): from semantic_kernel.connectors.ai.google_palm.services.gp_text_embedding import ( @@ -11,35 +12,31 @@ ) -pytestmark = pytest.mark.skipif( - sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater" -) +pytestmark = pytest.mark.skipif(sys.version_info < (3, 9), reason="Google Palm requires Python 3.9 or greater") def test_google_palm_text_embedding_init() -> None: - model_id = "test_model_id" + ai_model_id = "test_model_id" api_key = "test_api_key" # Test successful initialization gp_text_embed = GooglePalmTextEmbedding( - model_id=model_id, + ai_model_id=ai_model_id, api_key=api_key, ) - assert gp_text_embed._model_id == model_id - assert gp_text_embed._api_key == api_key + assert gp_text_embed.ai_model_id == ai_model_id + assert gp_text_embed.api_key == api_key assert isinstance(gp_text_embed, GooglePalmTextEmbedding) def test_google_palm_text_embedding_init_with_empty_api_key() -> None: - model_id = "test_model_id" + ai_model_id = "test_model_id" # api_key = "test_api_key" - with pytest.raises( - ValueError, match="The Google PaLM API key cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="api_key"): GooglePalmTextEmbedding( - model_id=model_id, + ai_model_id=ai_model_id, api_key="", ) @@ -52,19 +49,19 @@ async def test_google_palm_text_embedding_calls_with_parameters() -> None: "semantic_kernel.connectors.ai.google_palm.services.gp_text_embedding.palm", new=mock_gp, ): - model_id = "test_model_id" + ai_model_id = "test_model_id" api_key = "test_api_key" texts = ["hello world"] text = "hello world" gp_text_embedding = GooglePalmTextEmbedding( - model_id=model_id, + ai_model_id=ai_model_id, api_key=api_key, ) - await gp_text_embedding.generate_embeddings_async(texts) + await gp_text_embedding.generate_embeddings(texts) mock_gp.generate_embeddings.assert_called_once_with( - model=model_id, + model=ai_model_id, text=text, ) diff --git a/python/tests/unit/ai/hugging_face/test_hf_local_text_completions.py b/python/tests/unit/ai/hugging_face/test_hf_local_text_completions.py new file mode 100644 index 000000000000..024f0a5e7042 --- /dev/null +++ b/python/tests/unit/ai/hugging_face/test_hf_local_text_completions.py @@ -0,0 +1,105 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +import semantic_kernel.connectors.ai.hugging_face as sk_hf +from semantic_kernel.kernel import Kernel + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + ("model_name", "task", "input_str"), + [ + ( + "patrickvonplaten/t5-tiny-random", + "text2text-generation", + "translate English to Dutch: Hello, how are you?", + ), + ( + "jotamunz/billsum_tiny_summarization", + "summarization", + """ + Summarize: Whales are fully aquatic, open-ocean animals: + they can feed, mate, give birth, suckle and raise their young at sea. + Whales range in size from the 2.6 metres (8.5 ft) and 135 kilograms (298 lb) + dwarf sperm whale to the 29.9 metres (98 ft) and 190 tonnes (210 short tons) blue whale, + which is the largest known animal that has ever lived. The sperm whale is the largest + toothed predator on Earth. Several whale species exhibit sexual dimorphism, + in that the females are larger than males. + """, + ), + ("HuggingFaceM4/tiny-random-LlamaForCausalLM", "text-generation", "Hello, I like sleeping and "), + ], + ids=["text2text-generation", "summarization", "text-generation"], +) +async def test_text_completion(model_name, task, input_str): + kernel = Kernel() + + # Configure LLM service + kernel.add_text_completion_service( + service_id=model_name, + service=sk_hf.HuggingFaceTextCompletion(ai_model_id=model_name, task=task), + ) + + # Define semantic function using SK prompt template language + sk_prompt = "{{$input}}" + + # Create the semantic function + function = kernel.create_semantic_function(sk_prompt, max_tokens=25, temperature=0.7, top_p=0.5) + + summary = await kernel.run(function, input_str=input_str) + + output = str(summary).strip() + print(f"Completion using input string: '{output}'") + assert len(output) > 0 + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + ("model_name", "task", "input_str"), + [ + ( + "patrickvonplaten/t5-tiny-random", + "text2text-generation", + "translate English to Dutch: Hello, how are you?", + ), + ( + "jotamunz/billsum_tiny_summarization", + "summarization", + """ + Summarize: Whales are fully aquatic, open-ocean animals: + they can feed, mate, give birth, suckle and raise their young at sea. + Whales range in size from the 2.6 metres (8.5 ft) and 135 kilograms (298 lb) + dwarf sperm whale to the 29.9 metres (98 ft) and 190 tonnes (210 short tons) blue whale, + which is the largest known animal that has ever lived. The sperm whale is the largest + toothed predator on Earth. Several whale species exhibit sexual dimorphism, + in that the females are larger than males. + """, + ), + # skipped for now, as it takes too long + ("HuggingFaceM4/tiny-random-LlamaForCausalLM", "text-generation", "Hello, I like sleeping and "), + ], + ids=["text2text-generation", "summarization", "text-generation"], +) +async def test_text_completion_stream(model_name, task, input_str): + kernel = Kernel() + + # Configure LLM service + kernel.add_text_completion_service( + service_id=model_name, + service=sk_hf.HuggingFaceTextCompletion(ai_model_id=model_name, task=task), + ) + + # Define semantic function using SK prompt template language + sk_prompt = "{{$input}}" + + # Create the semantic function + function = kernel.create_semantic_function(sk_prompt, max_tokens=25, temperature=0.7, top_p=0.5) + + summary = "" + async for text in kernel.run_stream(function, input_str=input_str): + summary += str(text[0]) + + output = str(summary).strip() + print(f"Completion using input string: '{output}'") + assert len(output) > 0 diff --git a/python/tests/unit/ai/ollama/services/test_ollama_chat_completion.py b/python/tests/unit/ai/ollama/services/test_ollama_chat_completion.py new file mode 100644 index 000000000000..f299a952df3b --- /dev/null +++ b/python/tests/unit/ai/ollama/services/test_ollama_chat_completion.py @@ -0,0 +1,96 @@ +from unittest.mock import patch + +import pytest + +from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import ( + OllamaChatPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.ollama.services.ollama_chat_completion import ( + OllamaChatCompletion, +) +from tests.unit.ai.ollama.utils import MockResponse + + +def test_settings(): + ollama = OllamaChatCompletion(ai_model_id="test_model") + settings = ollama.get_prompt_execution_settings_class() + assert settings == OllamaChatPromptExecutionSettings + + +@pytest.mark.asyncio +@patch("aiohttp.ClientSession.post") +async def test_complete_chat(mock_post): + mock_post.return_value = MockResponse(response={"message": {"content": "test_response"}}) + ollama = OllamaChatCompletion(ai_model_id="test_model") + response = await ollama.complete_chat( + [{"role": "user", "content": "test_prompt"}], + OllamaChatPromptExecutionSettings(ai_model_id="test_model", options={"test": "test"}), + ) + assert response[0].content == "test_response" + mock_post.assert_called_once_with( + "http://localhost:11434/api/chat", + json={ + "model": "test_model", + "messages": [{"role": "user", "content": "test_prompt"}], + "options": {"test": "test"}, + "stream": False, + }, + ) + + +@pytest.mark.asyncio +@patch("aiohttp.ClientSession.post") +async def test_complete(mock_post): + mock_post.return_value = MockResponse(response={"message": {"content": "test_response"}}) + ollama = OllamaChatCompletion(ai_model_id="test_model") + response = await ollama.complete( + "test_prompt", + OllamaChatPromptExecutionSettings(ai_model_id="test-model", options={"test": "test"}), + ) + assert response[0].text == "test_response" + + +@pytest.mark.asyncio +@patch("aiohttp.ClientSession.post") +async def test_complete_chat_stream(mock_post): + mock_post.return_value = MockResponse(response={"message": {"content": "test_response"}}) + ollama = OllamaChatCompletion(ai_model_id="test_model") + response = ollama.complete_chat_stream( + [{"role": "user", "content": "test_prompt"}], + OllamaChatPromptExecutionSettings(ai_model_id="test_model", options={"test": "test"}), + ) + async for line in response: + if line: + assert line[0].content == "test_response" + mock_post.assert_called_once_with( + "http://localhost:11434/api/chat", + json={ + "model": "test_model", + "messages": [{"role": "user", "content": "test_prompt"}], + "options": {"test": "test"}, + "stream": True, + }, + ) + + +@pytest.mark.asyncio +@patch("aiohttp.ClientSession.post") +async def test_complete_stream(mock_post): + mock_post.return_value = MockResponse(response={"message": {"content": "test_response"}}) + ollama = OllamaChatCompletion(ai_model_id="test_model") + response = ollama.complete_stream( + "test_prompt", + OllamaChatPromptExecutionSettings(ai_model_id="test_model", options={"test": "test"}), + ) + async for line in response: + if line: + assert line[0].text == "test_response" + mock_post.assert_called_once_with( + "http://localhost:11434/api/chat", + json={ + "model": "test_model", + "options": {"test": "test"}, + "stream": True, + "messages": [{"role": "user", "content": "test_prompt"}], + }, + ) diff --git a/python/tests/unit/ai/ollama/services/test_ollama_test_completion.py b/python/tests/unit/ai/ollama/services/test_ollama_test_completion.py new file mode 100644 index 000000000000..e34e56737cbc --- /dev/null +++ b/python/tests/unit/ai/ollama/services/test_ollama_test_completion.py @@ -0,0 +1,53 @@ +from unittest.mock import patch + +import pytest + +from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import ( + OllamaTextPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.ollama.services.ollama_text_completion import ( + OllamaTextCompletion, +) +from tests.unit.ai.ollama.utils import MockResponse + + +def test_settings(): + ollama = OllamaTextCompletion(ai_model_id="test_model") + settings = ollama.get_prompt_execution_settings_class() + assert settings == OllamaTextPromptExecutionSettings + + +@pytest.mark.asyncio +@patch("aiohttp.ClientSession.post") +async def test_complete(mock_post): + mock_post.return_value = MockResponse(response="test_response") + ollama = OllamaTextCompletion(ai_model_id="test_model") + response = await ollama.complete( + "test_prompt", + OllamaTextPromptExecutionSettings(ai_model_id="test-model", options={"test": "test"}), + ) + assert response[0].text == "test_response" + + +@pytest.mark.asyncio +@patch("aiohttp.ClientSession.post") +async def test_complete_stream(mock_post): + mock_post.return_value = MockResponse(response={"response": "test_response"}) + ollama = OllamaTextCompletion(ai_model_id="test_model") + response = ollama.complete_stream( + "test_prompt", + OllamaTextPromptExecutionSettings(ai_model_id="test_model", options={"test": "test"}), + ) + async for line in response: + if line: + assert line[0].text == "test_response" + mock_post.assert_called_once_with( + "http://localhost:11434/api/generate", + json={ + "model": "test_model", + "options": {"test": "test"}, + "stream": True, + "prompt": "test_prompt", + "raw": False, + }, + ) diff --git a/python/tests/unit/ai/ollama/services/test_ollama_text_embedding.py b/python/tests/unit/ai/ollama/services/test_ollama_text_embedding.py new file mode 100644 index 000000000000..da71ba53857c --- /dev/null +++ b/python/tests/unit/ai/ollama/services/test_ollama_text_embedding.py @@ -0,0 +1,28 @@ +from unittest.mock import patch + +import pytest +from numpy import array + +from semantic_kernel.connectors.ai.ollama.services.ollama_text_embedding import ( + OllamaTextEmbedding, +) +from tests.unit.ai.ollama.utils import MockResponse + + +@pytest.mark.asyncio +@patch("aiohttp.ClientSession.post") +async def test_embedding(mock_post): + mock_post.return_value = MockResponse(response=[0.1, 0.2, 0.3]) + ollama = OllamaTextEmbedding(ai_model_id="test_model") + response = await ollama.generate_embeddings( + ["test_prompt"], + ) + assert response.all() == array([0.1, 0.2, 0.3]).all() + mock_post.assert_called_once_with( + "http://localhost:11434/api/embeddings", + json={ + "model": "test_model", + "texts": ["test_prompt"], + "options": {}, + }, + ) diff --git a/python/tests/unit/ai/ollama/utils.py b/python/tests/unit/ai/ollama/utils.py new file mode 100644 index 000000000000..98958f996c12 --- /dev/null +++ b/python/tests/unit/ai/ollama/utils.py @@ -0,0 +1,27 @@ +import json + + +class MockResponse: + def __init__(self, response, status=200): + self._response = response + self.status = status + + async def text(self): + return self._response + + async def json(self): + return self._response + + def raise_for_status(self): + pass + + @property + async def content(self): + yield json.dumps(self._response).encode("utf-8") + yield json.dumps({"done": True}).encode("utf-8") + + async def __aexit__(self, exc_type, exc, tb): + pass + + async def __aenter__(self): + return self diff --git a/python/tests/unit/ai/open_ai/models/chat/test_function_call.py b/python/tests/unit/ai/open_ai/models/chat/test_function_call.py index e0200ff37d1f..9d72bba34751 100644 --- a/python/tests/unit/ai/open_ai/models/chat/test_function_call.py +++ b/python/tests/unit/ai/open_ai/models/chat/test_function_call.py @@ -6,9 +6,10 @@ def test_function_call(): # Test initialization with default values - fc = FunctionCall(name="Test-Function", arguments="""{"input": "world"}""") + fc = FunctionCall(name="Test-Function", arguments="""{"input": "world"}""", id="1234") assert fc.name == "Test-Function" assert fc.arguments == """{"input": "world"}""" + assert fc.id == "1234" @pytest.mark.asyncio @@ -19,6 +20,7 @@ async def test_function_call_to_content_variables(create_kernel): func_call = FunctionCall( name="Test-Function", arguments="""{"input": "world", "input2": "world2"}""", + id="1234", ) context = kernel.create_new_context() assert isinstance(func_call.to_context_variables(), ContextVariables) diff --git a/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py b/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py index 8ef3429232e1..3290c5260ea2 100644 --- a/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py +++ b/python/tests/unit/ai/open_ai/services/test_azure_chat_completion.py @@ -1,18 +1,34 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger from unittest.mock import AsyncMock, patch +import openai import pytest - -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from httpx import Request, Response +from openai import AsyncAzureOpenAI +from openai.resources.chat.completions import AsyncCompletions as AsyncChatCompletions +from pydantic import ValidationError + +from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.chat_completion_client_base import ( + ChatCompletionClientBase, ) -from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import ( +from semantic_kernel.connectors.ai.open_ai import ( AzureChatCompletion, ) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import ( - OpenAIChatCompletion, +from semantic_kernel.connectors.ai.open_ai.const import ( + USER_AGENT, +) +from semantic_kernel.connectors.ai.open_ai.exceptions.content_filter_ai_exception import ( + ContentFilterAIException, + ContentFilterCodes, + ContentFilterResultSeverity, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureAISearchDataSources, + AzureChatPromptExecutionSettings, + AzureDataSources, + ExtraBody, ) @@ -21,7 +37,6 @@ def test_azure_chat_completion_init() -> None: endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") # Test successful initialization azure_chat_completion = AzureChatCompletion( @@ -29,13 +44,38 @@ def test_azure_chat_completion_init() -> None: endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) - assert azure_chat_completion._endpoint == endpoint - assert azure_chat_completion._api_version == api_version - assert azure_chat_completion._api_type == "azure" - assert isinstance(azure_chat_completion, OpenAIChatCompletion) + assert azure_chat_completion.client is not None + assert isinstance(azure_chat_completion.client, AsyncAzureOpenAI) + assert azure_chat_completion.ai_model_id == deployment_name + assert isinstance(azure_chat_completion, ChatCompletionClientBase) + + +def test_azure_chat_completion_init_base_url() -> None: + deployment_name = "test_deployment" + base_url = "https://test-endpoint.com/openai/deployment/test_deployment" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + + # Custom header for testing + default_headers = {"X-Unit-Test": "test-guid"} + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + base_url=base_url, + api_key=api_key, + api_version=api_version, + default_headers=default_headers, + ) + + assert azure_chat_completion.client is not None + assert isinstance(azure_chat_completion.client, AsyncAzureOpenAI) + assert azure_chat_completion.ai_model_id == deployment_name + assert isinstance(azure_chat_completion, ChatCompletionClientBase) + for key, value in default_headers.items(): + assert key in azure_chat_completion.client.default_headers + assert azure_chat_completion.client.default_headers[key] == value def test_azure_chat_completion_init_with_empty_deployment_name() -> None: @@ -43,17 +83,13 @@ def test_azure_chat_completion_init_with_empty_deployment_name() -> None: endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The deployment name cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="ai_model_id"): AzureChatCompletion( deployment_name="", endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) @@ -62,17 +98,13 @@ def test_azure_chat_completion_init_with_empty_api_key() -> None: endpoint = "https://test-endpoint.com" # api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The Azure API key cannot be `None` or empty`" - ): + with pytest.raises(AIException, match="api_key"): AzureChatCompletion( deployment_name=deployment_name, endpoint=endpoint, api_key="", api_version=api_version, - logger=logger, ) @@ -81,17 +113,13 @@ def test_azure_chat_completion_init_with_empty_endpoint() -> None: # endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The Azure endpoint cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="url"): AzureChatCompletion( deployment_name=deployment_name, endpoint="", api_key=api_key, api_version=api_version, - logger=logger, ) @@ -100,160 +128,436 @@ def test_azure_chat_completion_init_with_invalid_endpoint() -> None: endpoint = "http://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises(ValueError, match="The Azure endpoint must start with https://"): + with pytest.raises(ValidationError, match="url"): AzureChatCompletion( deployment_name=deployment_name, endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) -@pytest.mark.asyncio -async def test_azure_chat_completion_call_with_parameters() -> None: - mock_openai = AsyncMock() - with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion.openai", - new=mock_openai, - ): - deployment_name = "test_deployment" - endpoint = "https://test-endpoint.com" - api_key = "test_api_key" - api_type = "azure" - api_version = "2023-03-15-preview" - logger = Logger("test_logger") - prompt = "hello world" - messages = [{"role": "user", "content": prompt}] - complete_request_settings = CompleteRequestSettings() - - azure_chat_completion = AzureChatCompletion( +def test_azure_chat_completion_init_with_base_url() -> None: + deployment_name = "test_deployment" + base_url = "http://test-endpoint.com/openai/deployment/test_deployment" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + + with pytest.raises(ValidationError, match="url"): + AzureChatCompletion( deployment_name=deployment_name, - endpoint=endpoint, + base_url=base_url, api_key=api_key, api_version=api_version, - logger=logger, ) - await azure_chat_completion.complete_async(prompt, complete_request_settings) - mock_openai.ChatCompletion.acreate.assert_called_once_with( - engine=deployment_name, - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - messages=messages, - temperature=complete_request_settings.temperature, - top_p=complete_request_settings.top_p, - n=complete_request_settings.number_of_responses, - stream=False, - stop=None, - max_tokens=complete_request_settings.max_tokens, - presence_penalty=complete_request_settings.presence_penalty, - frequency_penalty=complete_request_settings.frequency_penalty, - logit_bias={}, - ) +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_azure_chat_completion_call_with_parameters(mock_create) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + messages = [{"role": "user", "content": "hello world"}] + complete_prompt_execution_settings = AzureChatPromptExecutionSettings(service_id="test_service_id") + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_version=api_version, + api_key=api_key, + ) + await azure_chat_completion.complete_chat(messages=messages, settings=complete_prompt_execution_settings) + mock_create.assert_awaited_once_with( + model=deployment_name, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + logit_bias={}, + max_tokens=complete_prompt_execution_settings.max_tokens, + n=complete_prompt_execution_settings.number_of_responses, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + stream=False, + temperature=complete_prompt_execution_settings.temperature, + top_p=complete_prompt_execution_settings.top_p, + messages=messages, + ) @pytest.mark.asyncio -async def test_azure_chat_completion_call_with_parameters_and_Logit_Bias_Defined() -> None: - mock_openai = AsyncMock() - with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion.openai", - new=mock_openai, - ): - deployment_name = "test_deployment" - endpoint = "https://test-endpoint.com" - api_key = "test_api_key" - api_type = "azure" - api_version = "2023-03-15-preview" - logger = Logger("test_logger") - prompt = "hello world" - messages = [{"role": "user", "content": prompt}] - complete_request_settings = CompleteRequestSettings() - - token_bias = {1: -100} - complete_request_settings.token_selection_biases = token_bias - - azure_chat_completion = AzureChatCompletion( - deployment_name=deployment_name, - endpoint=endpoint, - api_key=api_key, - api_version=api_version, - logger=logger, - ) +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_azure_chat_completion_call_with_parameters_and_Logit_Bias_Defined( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" - await azure_chat_completion.complete_async(prompt, complete_request_settings) + prompt = "hello world" + messages = [{"role": "user", "content": prompt}] + complete_prompt_execution_settings = AzureChatPromptExecutionSettings() - mock_openai.ChatCompletion.acreate.assert_called_once_with( - engine=deployment_name, - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - messages=messages, - temperature=complete_request_settings.temperature, - top_p=complete_request_settings.top_p, - n=complete_request_settings.number_of_responses, - stream=False, - stop=None, - max_tokens=complete_request_settings.max_tokens, - presence_penalty=complete_request_settings.presence_penalty, - frequency_penalty=complete_request_settings.frequency_penalty, - logit_bias=token_bias, - ) + token_bias = {"1": -100} + complete_prompt_execution_settings.logit_bias = token_bias + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) + + await azure_chat_completion.complete_chat(messages=messages, settings=complete_prompt_execution_settings) + + mock_create.assert_awaited_once_with( + model=deployment_name, + messages=messages, + temperature=complete_prompt_execution_settings.temperature, + top_p=complete_prompt_execution_settings.top_p, + n=complete_prompt_execution_settings.number_of_responses, + stream=False, + max_tokens=complete_prompt_execution_settings.max_tokens, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + logit_bias=token_bias, + ) @pytest.mark.asyncio -async def test_azure_chat_completion_call_with_parameters_and_Stop_Defined() -> None: - mock_openai = AsyncMock() - with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion.openai", - new=mock_openai, - ): - deployment_name = "test_deployment" - endpoint = "https://test-endpoint.com" - api_key = "test_api_key" - api_type = "azure" - api_version = "2023-03-15-preview" - logger = Logger("test_logger") - prompt = "hello world" - messages = [{"role": "user", "content": prompt}] - complete_request_settings = CompleteRequestSettings() - - stop = ["!"] - complete_request_settings.stop_sequences = stop - - azure_chat_completion = AzureChatCompletion( - deployment_name=deployment_name, - endpoint=endpoint, - api_key=api_key, - api_version=api_version, - logger=logger, - ) +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_azure_chat_completion_call_with_parameters_and_Stop_Defined( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" - await azure_chat_completion.complete_async(prompt, complete_request_settings) + prompt = "hello world" + messages = [{"role": "user", "content": prompt}] + complete_prompt_execution_settings = AzureChatPromptExecutionSettings() - mock_openai.ChatCompletion.acreate.assert_called_once_with( - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - engine=deployment_name, - messages=messages, - temperature=complete_request_settings.temperature, - top_p=complete_request_settings.top_p, - n=complete_request_settings.number_of_responses, - stream=False, - stop=complete_request_settings.stop_sequences, - max_tokens=complete_request_settings.max_tokens, - presence_penalty=complete_request_settings.presence_penalty, - frequency_penalty=complete_request_settings.frequency_penalty, - logit_bias={}, - ) + stop = ["!"] + complete_prompt_execution_settings.stop = stop + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) + + await azure_chat_completion.complete(prompt, complete_prompt_execution_settings) + + mock_create.assert_awaited_once_with( + model=deployment_name, + messages=messages, + temperature=complete_prompt_execution_settings.temperature, + top_p=complete_prompt_execution_settings.top_p, + n=complete_prompt_execution_settings.number_of_responses, + stream=False, + stop=complete_prompt_execution_settings.stop, + max_tokens=complete_prompt_execution_settings.max_tokens, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + logit_bias={}, + ) + + +def test_azure_chat_completion_serialize() -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + default_headers = {"X-Test": "test"} + + settings = { + "deployment_name": deployment_name, + "endpoint": endpoint, + "api_key": api_key, + "api_version": api_version, + "default_headers": default_headers, + } + + azure_chat_completion = AzureChatCompletion.from_dict(settings) + dumped_settings = azure_chat_completion.to_dict() + assert dumped_settings["ai_model_id"] == settings["deployment_name"] + assert settings["endpoint"] in str(dumped_settings["base_url"]) + assert settings["deployment_name"] in str(dumped_settings["base_url"]) + assert settings["api_key"] == dumped_settings["api_key"] + assert settings["api_version"] == dumped_settings["api_version"] + + # Assert that the default header we added is present in the dumped_settings default headers + for key, value in default_headers.items(): + assert key in dumped_settings["default_headers"] + assert dumped_settings["default_headers"][key] == value + + # Assert that the 'User-agent' header is not present in the dumped_settings default headers + assert USER_AGENT not in dumped_settings["default_headers"] + + +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_azure_chat_completion_with_data_call_with_parameters( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + prompt = "hello world" + messages_in = [{"role": "user", "content": prompt}] + messages_out = [{"role": "user", "content": prompt}] + + expected_data_settings = { + "dataSources": [ + { + "type": "AzureCognitiveSearch", + "parameters": { + "indexName": "test_index", + "endpoint": "https://test-endpoint-search.com", + "key": "test_key", + }, + } + ] + } + + complete_prompt_execution_settings = AzureChatPromptExecutionSettings(extra_body=expected_data_settings) + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_version=api_version, + api_key=api_key, + use_extensions=True, + ) + + await azure_chat_completion.complete_chat(messages=messages_in, settings=complete_prompt_execution_settings) + + mock_create.assert_awaited_once_with( + model=deployment_name, + messages=messages_out, + temperature=complete_prompt_execution_settings.temperature, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + logit_bias={}, + top_p=complete_prompt_execution_settings.top_p, + n=complete_prompt_execution_settings.number_of_responses, + stream=False, + max_tokens=complete_prompt_execution_settings.max_tokens, + extra_body=expected_data_settings, + ) + + +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_azure_chat_completion_call_with_data_parameters_and_function_calling( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + prompt = "hello world" + messages = [{"role": "user", "content": prompt}] + + ai_source = AzureAISearchDataSources(indexName="test-index", endpoint="test-endpoint", key="test-key") + extra = ExtraBody(data_sources=[AzureDataSources(type="AzureCognitiveSearch", parameters=ai_source)]) + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + use_extensions=True, + ) + + functions = [{"name": "test-function", "description": "test-description"}] + complete_prompt_execution_settings = AzureChatPromptExecutionSettings( + function_call="test-function", + functions=functions, + extra_body=extra, + ) + + await azure_chat_completion.complete_chat( + messages=messages, + settings=complete_prompt_execution_settings, + ) + + expected_data_settings = extra.model_dump(exclude_none=True, by_alias=True) + + mock_create.assert_awaited_once_with( + model=deployment_name, + messages=messages, + temperature=complete_prompt_execution_settings.temperature, + top_p=complete_prompt_execution_settings.top_p, + n=complete_prompt_execution_settings.number_of_responses, + stream=False, + max_tokens=complete_prompt_execution_settings.max_tokens, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + logit_bias=complete_prompt_execution_settings.logit_bias, + extra_body=expected_data_settings, + functions=functions, + function_call=complete_prompt_execution_settings.function_call, + ) + + +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_azure_chat_completion_call_with_data_with_parameters_and_Stop_Defined( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + messages = [{"role": "user", "content": "hello world"}] + complete_prompt_execution_settings = AzureChatPromptExecutionSettings() + + stop = ["!"] + complete_prompt_execution_settings.stop = stop + + ai_source = AzureAISearchDataSources(indexName="test-index", endpoint="test-endpoint", key="test-key") + extra = ExtraBody(data_sources=[AzureDataSources(type="AzureCognitiveSearch", parameters=ai_source)]) + + complete_prompt_execution_settings.extra_body = extra + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + use_extensions=True, + ) + + await azure_chat_completion.complete_chat(messages, complete_prompt_execution_settings) + + expected_data_settings = extra.model_dump(exclude_none=True, by_alias=True) + + mock_create.assert_awaited_once_with( + model=deployment_name, + messages=messages, + temperature=complete_prompt_execution_settings.temperature, + top_p=complete_prompt_execution_settings.top_p, + n=complete_prompt_execution_settings.number_of_responses, + stream=False, + stop=complete_prompt_execution_settings.stop, + max_tokens=complete_prompt_execution_settings.max_tokens, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + logit_bias={}, + extra_body=expected_data_settings, + ) + + +CONTENT_FILTERED_ERROR_MESSAGE = ( + "The response was filtered due to the prompt triggering Azure OpenAI's content management policy. Please " + "modify your prompt and retry. To learn more about our content filtering policies please read our " + "documentation: https://go.microsoft.com/fwlink/?linkid=2198766" +) +CONTENT_FILTERED_ERROR_FULL_MESSAGE = ( + "Error code: 400 - {'error': {'message': \"%s\", 'type': null, 'param': 'prompt', 'code': 'content_filter', " + "'status': 400, 'innererror': {'code': 'ResponsibleAIPolicyViolation', 'content_filter_result': {'hate': " + "{'filtered': True, 'severity': 'high'}, 'self_harm': {'filtered': False, 'severity': 'safe'}, 'sexual': " + "{'filtered': False, 'severity': 'safe'}, 'violence': {'filtered': False, 'severity': 'safe'}}}}}" +) % CONTENT_FILTERED_ERROR_MESSAGE + + +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create") +async def test_azure_chat_completion_content_filtering_raises_correct_exception( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + prompt = "some prompt that would trigger the content filtering" + messages = [{"role": "user", "content": prompt}] + complete_prompt_execution_settings = AzureChatPromptExecutionSettings() + + mock_create.side_effect = openai.BadRequestError( + CONTENT_FILTERED_ERROR_FULL_MESSAGE, + response=Response(400, request=Request("POST", endpoint)), + body={ + "message": CONTENT_FILTERED_ERROR_MESSAGE, + "type": None, + "param": "prompt", + "code": "content_filter", + "status": 400, + "innererror": { + "code": "ResponsibleAIPolicyViolation", + "content_filter_result": { + "hate": {"filtered": True, "severity": "high"}, + "self_harm": {"filtered": False, "severity": "safe"}, + "sexual": {"filtered": False, "severity": "safe"}, + "violence": {"filtered": False, "severity": "safe"}, + }, + }, + }, + ) + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) + + with pytest.raises(ContentFilterAIException, match="service encountered a content error") as exc_info: + await azure_chat_completion.complete_chat(messages, complete_prompt_execution_settings) + + content_filter_exc = exc_info.value + assert content_filter_exc.param == "prompt" + assert content_filter_exc.content_filter_code == ContentFilterCodes.RESPONSIBLE_AI_POLICY_VIOLATION + assert content_filter_exc.content_filter_result["hate"].filtered + assert content_filter_exc.content_filter_result["hate"].severity == ContentFilterResultSeverity.HIGH + + +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create") +async def test_azure_chat_completion_content_filtering_without_response_code_raises_with_default_code( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + prompt = "some prompt that would trigger the content filtering" + messages = [{"role": "user", "content": prompt}] + complete_prompt_execution_settings = AzureChatPromptExecutionSettings() + + mock_create.side_effect = openai.BadRequestError( + CONTENT_FILTERED_ERROR_FULL_MESSAGE, + response=Response(400, request=Request("POST", endpoint)), + body={ + "message": CONTENT_FILTERED_ERROR_MESSAGE, + "type": None, + "param": "prompt", + "code": "content_filter", + "status": 400, + "innererror": { + "content_filter_result": { + "hate": {"filtered": True, "severity": "high"}, + "self_harm": {"filtered": False, "severity": "safe"}, + "sexual": {"filtered": False, "severity": "safe"}, + "violence": {"filtered": False, "severity": "safe"}, + }, + }, + }, + ) + + azure_chat_completion = AzureChatCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) + + with pytest.raises(ContentFilterAIException, match="service encountered a content error") as exc_info: + await azure_chat_completion.complete_chat(messages, complete_prompt_execution_settings) + + content_filter_exc = exc_info.value + assert content_filter_exc.content_filter_code == ContentFilterCodes.RESPONSIBLE_AI_POLICY_VIOLATION diff --git a/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py b/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py index 1fed027969bf..3303be04d443 100644 --- a/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py +++ b/python/tests/unit/ai/open_ai/services/test_azure_text_completion.py @@ -1,19 +1,20 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger from unittest.mock import AsyncMock, patch import pytest - -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, +from openai import AsyncAzureOpenAI +from openai.resources.completions import AsyncCompletions +from pydantic import ValidationError + +from semantic_kernel.connectors.ai import TextCompletionClientBase +from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAITextPromptExecutionSettings, ) from semantic_kernel.connectors.ai.open_ai.services.azure_text_completion import ( AzureTextCompletion, ) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion import ( - OpenAITextCompletion, -) def test_azure_text_completion_init() -> None: @@ -21,7 +22,6 @@ def test_azure_text_completion_init() -> None: endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") # Test successful initialization azure_text_completion = AzureTextCompletion( @@ -29,13 +29,39 @@ def test_azure_text_completion_init() -> None: endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) - assert azure_text_completion._endpoint == endpoint - assert azure_text_completion._api_version == api_version - assert azure_text_completion._api_type == "azure" - assert isinstance(azure_text_completion, OpenAITextCompletion) + assert azure_text_completion.client is not None + assert isinstance(azure_text_completion.client, AsyncAzureOpenAI) + assert azure_text_completion.ai_model_id == deployment_name + assert isinstance(azure_text_completion, TextCompletionClientBase) + + +def test_azure_text_completion_init_with_custom_header() -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + + # Custom header for testing + default_headers = {"X-Unit-Test": "test-guid"} + + # Test successful initialization + azure_text_completion = AzureTextCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + default_headers=default_headers, + ) + + assert azure_text_completion.client is not None + assert isinstance(azure_text_completion.client, AsyncAzureOpenAI) + assert azure_text_completion.ai_model_id == deployment_name + assert isinstance(azure_text_completion, TextCompletionClientBase) + for key, value in default_headers.items(): + assert key in azure_text_completion.client.default_headers + assert azure_text_completion.client.default_headers[key] == value def test_azure_text_completion_init_with_empty_deployment_name() -> None: @@ -43,17 +69,13 @@ def test_azure_text_completion_init_with_empty_deployment_name() -> None: endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The deployment name cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="ai_model_id"): AzureTextCompletion( deployment_name="", endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) @@ -62,17 +84,13 @@ def test_azure_text_completion_init_with_empty_api_key() -> None: endpoint = "https://test-endpoint.com" # api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The Azure API key cannot be `None` or empty`" - ): + with pytest.raises(AIException, match="api_key"): AzureTextCompletion( deployment_name=deployment_name, endpoint=endpoint, api_key="", api_version=api_version, - logger=logger, ) @@ -81,17 +99,13 @@ def test_azure_text_completion_init_with_empty_endpoint() -> None: # endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The Azure endpoint cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="endpoint"): AzureTextCompletion( deployment_name=deployment_name, endpoint="", api_key=api_key, api_version=api_version, - logger=logger, ) @@ -100,107 +114,114 @@ def test_azure_text_completion_init_with_invalid_endpoint() -> None: endpoint = "http://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises(ValueError, match="The Azure endpoint must start with https://"): + with pytest.raises(ValidationError, match="https"): AzureTextCompletion( deployment_name=deployment_name, endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) @pytest.mark.asyncio -async def test_azure_text_completion_call_with_parameters() -> None: - mock_openai = AsyncMock() - with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion.openai", - new=mock_openai, - ): - deployment_name = "test_deployment" - endpoint = "https://test-endpoint.com" - api_key = "test_api_key" - api_type = "azure" - api_version = "2023-03-15-preview" - logger = Logger("test_logger") - prompt = "hello world" - complete_request_settings = CompleteRequestSettings() - azure_text_completion = AzureTextCompletion( - deployment_name=deployment_name, - endpoint=endpoint, - api_key=api_key, - api_version=api_version, - logger=logger, - ) +@patch.object(AsyncCompletions, "create", new_callable=AsyncMock) +async def test_azure_text_completion_call_with_parameters(mock_create) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" - await azure_text_completion.complete_async(prompt, complete_request_settings) + prompt = "hello world" + complete_prompt_execution_settings = OpenAITextPromptExecutionSettings() + azure_text_completion = AzureTextCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) - mock_openai.Completion.acreate.assert_called_once_with( - engine=deployment_name, - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - prompt=prompt, - temperature=complete_request_settings.temperature, - max_tokens=complete_request_settings.max_tokens, - top_p=complete_request_settings.top_p, - presence_penalty=complete_request_settings.presence_penalty, - frequency_penalty=complete_request_settings.frequency_penalty, - stop=None, - n=complete_request_settings.number_of_responses, - stream=False, - logit_bias={}, - ) + await azure_text_completion.complete(prompt, complete_prompt_execution_settings) + + mock_create.assert_awaited_once_with( + model=deployment_name, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + logit_bias={}, + max_tokens=complete_prompt_execution_settings.max_tokens, + n=complete_prompt_execution_settings.number_of_responses, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + stream=False, + temperature=complete_prompt_execution_settings.temperature, + top_p=complete_prompt_execution_settings.top_p, + prompt=prompt, + echo=False, + ) @pytest.mark.asyncio -async def test_azure_text_completion_call_with_parameters_logit_bias_not_none() -> None: - mock_openai = AsyncMock() - with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion.openai", - new=mock_openai, - ): - deployment_name = "test_deployment" - endpoint = "https://test-endpoint.com" - api_key = "test_api_key" - api_type = "azure" - api_version = "2023-03-15-preview" - logger = Logger("test_logger") - prompt = "hello world" - complete_request_settings = CompleteRequestSettings() - - token_bias = {200: 100} - complete_request_settings.token_selection_biases = token_bias - - azure_text_completion = AzureTextCompletion( - deployment_name=deployment_name, - endpoint=endpoint, - api_key=api_key, - api_version=api_version, - logger=logger, - ) +@patch.object(AsyncCompletions, "create", new_callable=AsyncMock) +async def test_azure_text_completion_call_with_parameters_logit_bias_not_none( + mock_create, +) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" - await azure_text_completion.complete_async(prompt, complete_request_settings) + prompt = "hello world" + complete_prompt_execution_settings = OpenAITextPromptExecutionSettings() - mock_openai.Completion.acreate.assert_called_once_with( - engine=deployment_name, - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - prompt=prompt, - temperature=complete_request_settings.temperature, - max_tokens=complete_request_settings.max_tokens, - top_p=complete_request_settings.top_p, - presence_penalty=complete_request_settings.presence_penalty, - frequency_penalty=complete_request_settings.frequency_penalty, - stop=None, - n=complete_request_settings.number_of_responses, - stream=False, - logit_bias=token_bias, - ) + token_bias = {"200": 100} + complete_prompt_execution_settings.logit_bias = token_bias + + azure_text_completion = AzureTextCompletion( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) + + await azure_text_completion.complete(prompt, complete_prompt_execution_settings) + + mock_create.assert_awaited_once_with( + model=deployment_name, + frequency_penalty=complete_prompt_execution_settings.frequency_penalty, + logit_bias=complete_prompt_execution_settings.logit_bias, + max_tokens=complete_prompt_execution_settings.max_tokens, + n=complete_prompt_execution_settings.number_of_responses, + presence_penalty=complete_prompt_execution_settings.presence_penalty, + stream=False, + temperature=complete_prompt_execution_settings.temperature, + top_p=complete_prompt_execution_settings.top_p, + prompt=prompt, + echo=False, + ) + + +def test_azure_text_completion_serialize() -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + default_headers = {"X-Test": "test"} + + settings = { + "deployment_name": deployment_name, + "endpoint": endpoint, + "api_key": api_key, + "api_version": api_version, + "default_headers": default_headers, + } + + azure_text_completion = AzureTextCompletion.from_dict(settings) + dumped_settings = azure_text_completion.to_dict() + assert dumped_settings["ai_model_id"] == settings["deployment_name"] + assert settings["endpoint"] in str(dumped_settings["base_url"]) + assert settings["deployment_name"] in str(dumped_settings["base_url"]) + assert settings["api_key"] == dumped_settings["api_key"] + assert settings["api_version"] == dumped_settings["api_version"] + + # Assert that the default header we added is present in the dumped_settings default headers + for key, value in default_headers.items(): + assert key in dumped_settings["default_headers"] + assert dumped_settings["default_headers"][key] == value diff --git a/python/tests/unit/ai/open_ai/services/test_azure_text_embedding.py b/python/tests/unit/ai/open_ai/services/test_azure_text_embedding.py index af7b4b94c937..76930f4724ac 100644 --- a/python/tests/unit/ai/open_ai/services/test_azure_text_embedding.py +++ b/python/tests/unit/ai/open_ai/services/test_azure_text_embedding.py @@ -1,16 +1,19 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger from unittest.mock import AsyncMock, call, patch import pytest +from openai import AsyncAzureOpenAI +from openai.resources.embeddings import AsyncEmbeddings +from pydantic import ValidationError +from semantic_kernel.connectors.ai.ai_exception import AIException +from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import ( + EmbeddingGeneratorBase, +) from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import ( AzureTextEmbedding, ) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import ( - OpenAITextEmbedding, -) def test_azure_text_embedding_init() -> None: @@ -18,7 +21,6 @@ def test_azure_text_embedding_init() -> None: endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") # Test successful initialization azure_text_embedding = AzureTextEmbedding( @@ -26,13 +28,12 @@ def test_azure_text_embedding_init() -> None: endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) - assert azure_text_embedding._endpoint == endpoint - assert azure_text_embedding._api_version == api_version - assert azure_text_embedding._api_type == "azure" - assert isinstance(azure_text_embedding, OpenAITextEmbedding) + assert azure_text_embedding.client is not None + assert isinstance(azure_text_embedding.client, AsyncAzureOpenAI) + assert azure_text_embedding.ai_model_id == deployment_name + assert isinstance(azure_text_embedding, EmbeddingGeneratorBase) def test_azure_text_embedding_init_with_empty_deployment_name() -> None: @@ -40,17 +41,13 @@ def test_azure_text_embedding_init_with_empty_deployment_name() -> None: endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The deployment name cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="ai_model_id"): AzureTextEmbedding( deployment_name="", endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) @@ -59,17 +56,13 @@ def test_azure_text_embedding_init_with_empty_api_key() -> None: endpoint = "https://test-endpoint.com" # api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The Azure API key cannot be `None` or empty`" - ): + with pytest.raises(AIException, match="api_key"): AzureTextEmbedding( deployment_name=deployment_name, endpoint=endpoint, api_key="", api_version=api_version, - logger=logger, ) @@ -78,17 +71,13 @@ def test_azure_text_embedding_init_with_empty_endpoint() -> None: # endpoint = "https://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises( - ValueError, match="The Azure endpoint cannot be `None` or empty" - ): + with pytest.raises(ValidationError, match="endpoint"): AzureTextEmbedding( deployment_name=deployment_name, endpoint="", api_key=api_key, api_version=api_version, - logger=logger, ) @@ -97,101 +86,98 @@ def test_azure_text_embedding_init_with_invalid_endpoint() -> None: endpoint = "http://test-endpoint.com" api_key = "test_api_key" api_version = "2023-03-15-preview" - logger = Logger("test_logger") - with pytest.raises(ValueError, match="The Azure endpoint must start with https://"): + with pytest.raises(ValidationError, match="https"): AzureTextEmbedding( deployment_name=deployment_name, endpoint=endpoint, api_key=api_key, api_version=api_version, - logger=logger, ) +def test_azure_text_embedding_init_with_from_dict() -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + default_headers = {"test_header": "test_value"} + + settings = { + "deployment_name": deployment_name, + "endpoint": endpoint, + "api_key": api_key, + "api_version": api_version, + "default_headers": default_headers, + } + + azure_text_embedding = AzureTextEmbedding.from_dict(settings=settings) + + assert azure_text_embedding.client is not None + assert isinstance(azure_text_embedding.client, AsyncAzureOpenAI) + assert azure_text_embedding.ai_model_id == deployment_name + assert isinstance(azure_text_embedding, EmbeddingGeneratorBase) + assert endpoint in str(azure_text_embedding.client.base_url) + assert azure_text_embedding.client.api_key == api_key + + # Assert that the default header we added is present in the client's default headers + for key, value in default_headers.items(): + assert key in azure_text_embedding.client.default_headers + assert azure_text_embedding.client.default_headers[key] == value + + @pytest.mark.asyncio -async def test_azure_text_embedding_calls_with_parameters() -> None: - mock_openai = AsyncMock() - with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding.openai", - new=mock_openai, - ): - deployment_name = "test_deployment" - endpoint = "https://test-endpoint.com" - api_key = "test_api_key" - api_type = "azure" - api_version = "2023-03-15-preview" - logger = Logger("test_logger") - texts = ["hello world", "goodbye world"] - - azure_text_embedding = AzureTextEmbedding( - deployment_name=deployment_name, - endpoint=endpoint, - api_key=api_key, - api_version=api_version, - logger=logger, - ) +@patch.object(AsyncEmbeddings, "create", new_callable=AsyncMock) +async def test_azure_text_embedding_calls_with_parameters(mock_create) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + texts = ["hello world", "goodbye world"] + + azure_text_embedding = AzureTextEmbedding( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) - await azure_text_embedding.generate_embeddings_async(texts) + await azure_text_embedding.generate_embeddings(texts) - mock_openai.Embedding.acreate.assert_called_once_with( - engine=deployment_name, - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - input=texts, - ) + mock_create.assert_awaited_once_with( + input=texts, + model=deployment_name, + ) @pytest.mark.asyncio -async def test_azure_text_embedding_calls_with_batches() -> None: - mock_openai = AsyncMock() - with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding.openai", - new=mock_openai, - ): - deployment_name = "test_deployment" - endpoint = "https://test-endpoint.com" - api_key = "test_api_key" - api_type = "azure" - api_version = "2023-03-15-preview" - logger = Logger("test_logger") - texts = [i for i in range(0, 5)] - - azure_text_embedding = AzureTextEmbedding( - deployment_name=deployment_name, - endpoint=endpoint, - api_key=api_key, - api_version=api_version, - logger=logger, - ) +@patch.object(AsyncEmbeddings, "create", new_callable=AsyncMock) +async def test_azure_text_embedding_calls_with_batches(mock_create) -> None: + deployment_name = "test_deployment" + endpoint = "https://test-endpoint.com" + api_key = "test_api_key" + api_version = "2023-03-15-preview" + texts = [i for i in range(0, 5)] - await azure_text_embedding.generate_embeddings_async(texts, batch_size=3) - - mock_openai.assert_has_calls( - [ - call.Embedding.acreate( - engine=deployment_name, - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - input=texts[0:3], - ), - call.Embedding.acreate().__getitem__("data"), - call.Embedding.acreate().__getitem__().__iter__(), - call.Embedding.acreate( - engine=deployment_name, - api_key=api_key, - api_type=api_type, - api_base=endpoint, - api_version=api_version, - organization=None, - input=texts[3:5], - ), - ], - any_order=False, - ) + azure_text_embedding = AzureTextEmbedding( + deployment_name=deployment_name, + endpoint=endpoint, + api_key=api_key, + api_version=api_version, + ) + + await azure_text_embedding.generate_embeddings(texts, batch_size=3) + + mock_create.assert_has_awaits( + [ + call( + model=deployment_name, + input=texts[0:3], + ), + call( + model=deployment_name, + input=texts[3:5], + ), + ], + any_order=False, + ) diff --git a/python/tests/unit/ai/open_ai/services/test_openai_chat_completion.py b/python/tests/unit/ai/open_ai/services/test_openai_chat_completion.py new file mode 100644 index 000000000000..7df14f3013f8 --- /dev/null +++ b/python/tests/unit/ai/open_ai/services/test_openai_chat_completion.py @@ -0,0 +1,113 @@ +# Copyright (c) Microsoft. All rights reserved. + + +import pytest +from pydantic import ValidationError + +from semantic_kernel.connectors.ai import ChatCompletionClientBase +from semantic_kernel.connectors.ai.open_ai.const import ( + USER_AGENT, +) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import ( + OpenAIChatCompletion, +) + + +def test_open_ai_chat_completion_init() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + + # Test successful initialization + open_ai_chat_completion = OpenAIChatCompletion( + ai_model_id=ai_model_id, + api_key=api_key, + ) + + assert open_ai_chat_completion.ai_model_id == ai_model_id + assert isinstance(open_ai_chat_completion, ChatCompletionClientBase) + + +def test_open_ai_chat_completion_init_with_default_header() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + default_headers = {"X-Unit-Test": "test-guid"} + + # Test successful initialization + open_ai_chat_completion = OpenAIChatCompletion( + ai_model_id=ai_model_id, + api_key=api_key, + default_headers=default_headers, + ) + + assert open_ai_chat_completion.ai_model_id == ai_model_id + assert isinstance(open_ai_chat_completion, ChatCompletionClientBase) + + # Assert that the default header we added is present in the client's default headers + for key, value in default_headers.items(): + assert key in open_ai_chat_completion.client.default_headers + assert open_ai_chat_completion.client.default_headers[key] == value + + +def test_open_ai_chat_completion_init_with_empty_model_id() -> None: + # ai_model_id = "test_model_id" + api_key = "test_api_key" + + with pytest.raises(ValidationError, match="ai_model_id"): + OpenAIChatCompletion( + ai_model_id="", + api_key=api_key, + ) + + +def test_open_ai_chat_completion_init_with_empty_api_key() -> None: + ai_model_id = "test_model_id" + # api_key = "test_api_key" + + with pytest.raises(ValidationError, match="api_key"): + OpenAIChatCompletion( + ai_model_id=ai_model_id, + api_key="", + ) + + +def test_open_ai_chat_completion_serialize() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + default_headers = {"X-Unit-Test": "test-guid"} + + settings = { + "ai_model_id": ai_model_id, + "api_key": api_key, + "default_headers": default_headers, + } + + open_ai_chat_completion = OpenAIChatCompletion.from_dict(settings) + dumped_settings = open_ai_chat_completion.to_dict() + assert dumped_settings["ai_model_id"] == ai_model_id + assert dumped_settings["api_key"] == api_key + # Assert that the default header we added is present in the dumped_settings default headers + for key, value in default_headers.items(): + assert key in dumped_settings["default_headers"] + assert dumped_settings["default_headers"][key] == value + # Assert that the 'User-agent' header is not present in the dumped_settings default headers + assert USER_AGENT not in dumped_settings["default_headers"] + + +def test_open_ai_chat_completion_serialize_with_org_id() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + org_id = "test_org_id" + + settings = { + "ai_model_id": ai_model_id, + "api_key": api_key, + "org_id": org_id, + } + + open_ai_chat_completion = OpenAIChatCompletion.from_dict(settings) + dumped_settings = open_ai_chat_completion.to_dict() + assert dumped_settings["ai_model_id"] == ai_model_id + assert dumped_settings["api_key"] == api_key + assert dumped_settings["org_id"] == org_id + # Assert that the 'User-agent' header is not present in the dumped_settings default headers + assert USER_AGENT not in dumped_settings["default_headers"] diff --git a/python/tests/unit/ai/open_ai/services/test_openai_text_completion.py b/python/tests/unit/ai/open_ai/services/test_openai_text_completion.py new file mode 100644 index 000000000000..78a30e6b2204 --- /dev/null +++ b/python/tests/unit/ai/open_ai/services/test_openai_text_completion.py @@ -0,0 +1,104 @@ +# Copyright (c) Microsoft. All rights reserved. + + +import pytest +from pydantic import ValidationError + +from semantic_kernel.connectors.ai import TextCompletionClientBase +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion import ( + OpenAITextCompletion, +) + + +def test_open_ai_text_completion_init() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + + # Test successful initialization + open_ai_text_completion = OpenAITextCompletion( + ai_model_id=ai_model_id, + api_key=api_key, + ) + + assert open_ai_text_completion.ai_model_id == ai_model_id + assert isinstance(open_ai_text_completion, TextCompletionClientBase) + + +def test_open_ai_text_completion_init_with_default_header() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + default_headers = {"X-Unit-Test": "test-guid"} + + # Test successful initialization + open_ai_text_completion = OpenAITextCompletion( + ai_model_id=ai_model_id, + api_key=api_key, + default_headers=default_headers, + ) + + assert open_ai_text_completion.ai_model_id == ai_model_id + assert isinstance(open_ai_text_completion, TextCompletionClientBase) + for key, value in default_headers.items(): + assert key in open_ai_text_completion.client.default_headers + assert open_ai_text_completion.client.default_headers[key] == value + + +def test_open_ai_text_completion_init_with_empty_model_id() -> None: + # ai_model_id = "test_model_id" + api_key = "test_api_key" + + with pytest.raises(ValidationError, match="ai_model_id"): + OpenAITextCompletion( + ai_model_id="", + api_key=api_key, + ) + + +def test_open_ai_text_completion_init_with_empty_api_key() -> None: + ai_model_id = "test_model_id" + # api_key = "test_api_key" + + with pytest.raises(ValidationError, match="api_key"): + OpenAITextCompletion( + ai_model_id=ai_model_id, + api_key="", + ) + + +def test_open_ai_text_completion_serialize() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + default_headers = {"X-Unit-Test": "test-guid"} + + settings = { + "ai_model_id": ai_model_id, + "api_key": api_key, + "default_headers": default_headers, + } + + open_ai_text_completion = OpenAITextCompletion.from_dict(settings) + dumped_settings = open_ai_text_completion.to_dict() + assert dumped_settings["ai_model_id"] == ai_model_id + assert dumped_settings["api_key"] == api_key + # Assert that the default header we added is present in the dumped_settings default headers + for key, value in default_headers.items(): + assert key in dumped_settings["default_headers"] + assert dumped_settings["default_headers"][key] == value + + +def test_open_ai_text_completion_serialize_with_org_id() -> None: + ai_model_id = "test_model_id" + api_key = "test_api_key" + org_id = "test_org_id" + + settings = { + "ai_model_id": ai_model_id, + "api_key": api_key, + "org_id": org_id, + } + + open_ai_text_completion = OpenAITextCompletion.from_dict(settings) + dumped_settings = open_ai_text_completion.to_dict() + assert dumped_settings["ai_model_id"] == ai_model_id + assert dumped_settings["api_key"] == api_key + assert dumped_settings["org_id"] == org_id diff --git a/python/tests/unit/ai/open_ai/test_openai_request_settings.py b/python/tests/unit/ai/open_ai/test_openai_request_settings.py new file mode 100644 index 000000000000..44e21cc4cd0a --- /dev/null +++ b/python/tests/unit/ai/open_ai/test_openai_request_settings.py @@ -0,0 +1,261 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest +from pydantic import ValidationError + +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( + AzureAISearchDataSources, + AzureChatPromptExecutionSettings, + AzureDataSources, + ExtraBody, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, + OpenAITextPromptExecutionSettings, +) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + + +def test_default_openai_chat_prompt_execution_settings(): + settings = OpenAIChatPromptExecutionSettings() + assert settings.temperature == 0.0 + assert settings.top_p == 1.0 + assert settings.presence_penalty == 0.0 + assert settings.frequency_penalty == 0.0 + assert settings.max_tokens == 256 + assert settings.stop is None + assert settings.number_of_responses == 1 + assert settings.logit_bias == {} + assert settings.messages is None + + +def test_custom_openai_chat_prompt_execution_settings(): + settings = OpenAIChatPromptExecutionSettings( + temperature=0.5, + top_p=0.5, + presence_penalty=0.5, + frequency_penalty=0.5, + max_tokens=128, + stop="\n", + number_of_responses=2, + logit_bias={"1": 1}, + messages=[{"role": "system", "content": "Hello"}], + ) + assert settings.temperature == 0.5 + assert settings.top_p == 0.5 + assert settings.presence_penalty == 0.5 + assert settings.frequency_penalty == 0.5 + assert settings.max_tokens == 128 + assert settings.stop == "\n" + assert settings.number_of_responses == 2 + assert settings.logit_bias == {"1": 1} + assert settings.messages == [{"role": "system", "content": "Hello"}] + + +def test_openai_chat_prompt_execution_settings_from_default_completion_config(): + settings = PromptExecutionSettings(service_id="test_service") + chat_settings = OpenAIChatPromptExecutionSettings.from_prompt_execution_settings(settings) + assert chat_settings.service_id == "test_service" + assert chat_settings.temperature == 0.0 + assert chat_settings.top_p == 1.0 + assert chat_settings.presence_penalty == 0.0 + assert chat_settings.frequency_penalty == 0.0 + assert chat_settings.max_tokens == 256 + assert chat_settings.stop is None + assert chat_settings.number_of_responses == 1 + assert chat_settings.logit_bias == {} + + +def test_openai_chat_prompt_execution_settings_from_openai_prompt_execution_settings(): + chat_settings = OpenAIChatPromptExecutionSettings(service_id="test_service", temperature=1.0) + new_settings = OpenAIChatPromptExecutionSettings(service_id="test_2", temperature=0.0) + chat_settings.update_from_prompt_execution_settings(new_settings) + assert chat_settings.service_id == "test_2" + assert chat_settings.temperature == 0.0 + + +def test_openai_text_prompt_execution_settings_validation(): + with pytest.raises(ValidationError, match="best_of must be greater than number_of_responses"): + OpenAITextPromptExecutionSettings(best_of=1, number_of_responses=2) + + +def test_openai_text_prompt_execution_settings_validation_manual(): + text_oai = OpenAITextPromptExecutionSettings(best_of=1, number_of_responses=1) + with pytest.raises(ValidationError, match="best_of must be greater than number_of_responses"): + text_oai.number_of_responses = 2 + + +def test_openai_chat_prompt_execution_settings_from_custom_completion_config(): + settings = PromptExecutionSettings( + service_id="test_service", + extension_data={ + "temperature": 0.5, + "top_p": 0.5, + "presence_penalty": 0.5, + "frequency_penalty": 0.5, + "max_tokens": 128, + "stop": ["\n"], + "number_of_responses": 2, + "logprobs": 1, + "logit_bias": {"1": 1}, + "messages": [{"role": "system", "content": "Hello"}], + }, + ) + chat_settings = OpenAIChatPromptExecutionSettings.from_prompt_execution_settings(settings) + assert chat_settings.temperature == 0.5 + assert chat_settings.top_p == 0.5 + assert chat_settings.presence_penalty == 0.5 + assert chat_settings.frequency_penalty == 0.5 + assert chat_settings.max_tokens == 128 + assert chat_settings.stop == ["\n"] + assert chat_settings.number_of_responses == 2 + assert chat_settings.logit_bias == {"1": 1} + + +def test_openai_chat_prompt_execution_settings_from_custom_completion_config_with_none(): + settings = PromptExecutionSettings( + service_id="test_service", + extension_data={ + "temperature": 0.5, + "top_p": 0.5, + "presence_penalty": 0.5, + "frequency_penalty": 0.5, + "max_tokens": 128, + "stop": ["\n"], + "number_of_responses": 2, + "functions": None, + "logit_bias": {"1": 1}, + "messages": [{"role": "system", "content": "Hello"}], + }, + ) + chat_settings = OpenAIChatPromptExecutionSettings.from_prompt_execution_settings(settings) + assert chat_settings.temperature == 0.5 + assert chat_settings.top_p == 0.5 + assert chat_settings.presence_penalty == 0.5 + assert chat_settings.frequency_penalty == 0.5 + assert chat_settings.max_tokens == 128 + assert chat_settings.stop == ["\n"] + assert chat_settings.number_of_responses == 2 + assert chat_settings.logit_bias == {"1": 1} + assert chat_settings.functions is None + + +def test_openai_chat_prompt_execution_settings_from_custom_completion_config_with_functions(): + settings = PromptExecutionSettings( + service_id="test_service", + extension_data={ + "temperature": 0.5, + "top_p": 0.5, + "presence_penalty": 0.5, + "frequency_penalty": 0.5, + "max_tokens": 128, + "stop": ["\n"], + "number_of_responses": 2, + "functions": [{}], + "function_call": "auto", + "logit_bias": {"1": 1}, + "messages": [{"role": "system", "content": "Hello"}], + }, + ) + chat_settings = OpenAIChatPromptExecutionSettings.from_prompt_execution_settings(settings) + assert chat_settings.temperature == 0.5 + assert chat_settings.top_p == 0.5 + assert chat_settings.presence_penalty == 0.5 + assert chat_settings.frequency_penalty == 0.5 + assert chat_settings.max_tokens == 128 + assert chat_settings.stop == ["\n"] + assert chat_settings.number_of_responses == 2 + assert chat_settings.logit_bias == {"1": 1} + assert chat_settings.functions == [{}] + + +def test_create_options(): + settings = OpenAIChatPromptExecutionSettings( + temperature=0.5, + top_p=0.5, + presence_penalty=0.5, + frequency_penalty=0.5, + max_tokens=128, + stop=["\n"], + number_of_responses=2, + logit_bias={"1": 1}, + messages=[{"role": "system", "content": "Hello"}], + function_call="auto", + ) + options = settings.prepare_settings_dict() + assert options["temperature"] == 0.5 + assert options["top_p"] == 0.5 + assert options["presence_penalty"] == 0.5 + assert options["frequency_penalty"] == 0.5 + assert options["max_tokens"] == 128 + assert options["stop"] == ["\n"] + assert options["n"] == 2 + assert options["logit_bias"] == {"1": 1} + assert not options["stream"] + + +def test_create_options_azure_data(): + az_source = AzureAISearchDataSources(indexName="test-index", endpoint="test-endpoint", key="test-key") + az_data = AzureDataSources(type="AzureCognitiveSearch", parameters=az_source) + extra = ExtraBody(dataSources=[az_data]) + settings = AzureChatPromptExecutionSettings(extra_body=extra) + options = settings.prepare_settings_dict() + assert options["extra_body"] == extra.model_dump(exclude_none=True, by_alias=True) + + +def test_azure_open_ai_chat_prompt_execution_settings_with_cosmosdb_data_sources(): # noqa: E501 + input_dict = { + "messages": [{"role": "system", "content": "Hello"}], + "extra_body": { + "dataSources": [ + { + "type": "AzureCosmosDB", + "parameters": { + "authentication": { + "type": "ConnectionString", + "connectionString": "mongodb+srv://onyourdatatest:{password}$@{cluster-name}.mongocluster.cosmos.azure.com/?tls=true&authMechanism=SCRAM-SHA-256&retrywrites=false&maxIdleTimeMS=120000", + }, + "databaseName": "vectordb", + "containerName": "azuredocs", + "indexName": "azuredocindex", + "embeddingDependency": { + "type": "DeploymentName", + "deploymentName": "{embedding deployment name}", + }, + "fieldsMapping": {"vectorFields": ["contentvector"]}, + }, + } + ] + }, + } + settings = AzureChatPromptExecutionSettings.model_validate(input_dict, strict=True, from_attributes=True) + assert settings.extra_body["dataSources"][0]["type"] == "AzureCosmosDB" + + +def test_azure_open_ai_chat_prompt_execution_settings_with_aisearch_data_sources(): # noqa: E501 + input_dict = { + "messages": [{"role": "system", "content": "Hello"}], + "extra_body": { + "dataSources": [ + { + "type": "AzureCognitiveSearch", + "parameters": { + "authentication": { + "type": "APIKey", + "key": "****", + }, + "endpoint": "https://****.search.windows.net/", + "indexName": "azuredocindex", + "queryType": "vector", + "embeddingDependency": { + "type": "DeploymentName", + "deploymentName": "{embedding deployment name}", + }, + "fieldsMapping": {"vectorFields": ["contentvector"]}, + }, + } + ] + }, + } + settings = AzureChatPromptExecutionSettings.model_validate(input_dict, strict=True, from_attributes=True) + assert settings.extra_body["dataSources"][0]["type"] == "AzureCognitiveSearch" diff --git a/python/tests/unit/ai/test_ai_request_settings.py b/python/tests/unit/ai/test_ai_request_settings.py new file mode 100644 index 000000000000..5b8b5f974570 --- /dev/null +++ b/python/tests/unit/ai/test_ai_request_settings.py @@ -0,0 +1,18 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.connectors.ai import ( + PromptExecutionSettings, +) + + +def test_default_complete_prompt_execution_settings(): + settings = PromptExecutionSettings() + assert settings.service_id is None + assert settings.extension_data == {} + + +def test_custom_complete_prompt_execution_settings(): + ext_data = {"test": "test"} + settings = PromptExecutionSettings(service_id="test", extension_data=ext_data) + assert settings.service_id == "test" + assert settings.extension_data["test"] == "test" diff --git a/python/tests/unit/ai/test_request_settings.py b/python/tests/unit/ai/test_request_settings.py deleted file mode 100644 index 9d1a8d6701c9..000000000000 --- a/python/tests/unit/ai/test_request_settings.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from semantic_kernel.connectors.ai.chat_request_settings import ChatRequestSettings -from semantic_kernel.connectors.ai.complete_request_settings import ( - CompleteRequestSettings, -) - - -def test_default_complete_request_settings(): - settings = CompleteRequestSettings() - assert settings.temperature == 0.0 - assert settings.top_p == 1.0 - assert settings.presence_penalty == 0.0 - assert settings.frequency_penalty == 0.0 - assert settings.max_tokens == 256 - assert settings.stop_sequences == [] - assert settings.number_of_responses == 1 - assert settings.logprobs == 0 - assert settings.token_selection_biases == {} - assert settings.chat_system_prompt == "Assistant is a large language model." - - -def test_custom_complete_request_settings(): - settings = CompleteRequestSettings( - temperature=0.5, - top_p=0.5, - presence_penalty=0.5, - frequency_penalty=0.5, - max_tokens=128, - stop_sequences=["\n"], - number_of_responses=2, - logprobs=1, - token_selection_biases={1: 1}, - chat_system_prompt="Hello", - ) - assert settings.temperature == 0.5 - assert settings.top_p == 0.5 - assert settings.presence_penalty == 0.5 - assert settings.frequency_penalty == 0.5 - assert settings.max_tokens == 128 - assert settings.stop_sequences == ["\n"] - assert settings.number_of_responses == 2 - assert settings.logprobs == 1 - assert settings.token_selection_biases == {1: 1} - assert settings.chat_system_prompt == "Hello" - - -def test_default_chat_request_settings(): - settings = ChatRequestSettings() - assert settings.temperature == 0.0 - assert settings.top_p == 1.0 - assert settings.presence_penalty == 0.0 - assert settings.frequency_penalty == 0.0 - assert settings.max_tokens == 256 - assert settings.stop_sequences == [] - assert settings.number_of_responses == 1 - assert settings.token_selection_biases == {} - - -def test_complete_request_settings_from_default_completion_config(): - settings = CompleteRequestSettings() - chat_settings = ChatRequestSettings.from_completion_config(settings) - chat_settings = ChatRequestSettings() - assert chat_settings.temperature == 0.0 - assert chat_settings.top_p == 1.0 - assert chat_settings.presence_penalty == 0.0 - assert chat_settings.frequency_penalty == 0.0 - assert chat_settings.max_tokens == 256 - assert chat_settings.stop_sequences == [] - assert chat_settings.number_of_responses == 1 - assert chat_settings.token_selection_biases == {} - - -def test_chat_request_settings_from_custom_completion_config(): - settings = CompleteRequestSettings( - temperature=0.5, - top_p=0.5, - presence_penalty=0.5, - frequency_penalty=0.5, - max_tokens=128, - stop_sequences=["\n"], - number_of_responses=2, - logprobs=1, - token_selection_biases={1: 1}, - chat_system_prompt="Hello", - ) - chat_settings = ChatRequestSettings.from_completion_config(settings) - assert chat_settings.temperature == 0.5 - assert chat_settings.top_p == 0.5 - assert chat_settings.presence_penalty == 0.5 - assert chat_settings.frequency_penalty == 0.5 - assert chat_settings.max_tokens == 128 - assert chat_settings.stop_sequences == ["\n"] - assert chat_settings.number_of_responses == 2 - assert chat_settings.token_selection_biases == {1: 1} diff --git a/python/tests/unit/core_plugins/test_file_io_plugin.py b/python/tests/unit/core_plugins/test_file_io_plugin.py new file mode 100644 index 000000000000..46ee463dd564 --- /dev/null +++ b/python/tests/unit/core_plugins/test_file_io_plugin.py @@ -0,0 +1,96 @@ +import os +import tempfile + +import pytest + +from semantic_kernel import Kernel +from semantic_kernel.core_plugins.file_io_plugin import FileIOPlugin +from semantic_kernel.orchestration.context_variables import ContextVariables + + +def test_can_be_instantiated(): + plugin = FileIOPlugin() + assert plugin is not None + + +def test_can_be_imported(): + kernel = Kernel() + assert kernel.import_plugin(FileIOPlugin(), "file") + assert kernel.plugins["file"] is not None + assert kernel.plugins["file"].name == "file" + assert kernel.plugins["file"]["readAsync"] is not None + + +@pytest.mark.asyncio +async def test_can_read(): + plugin = FileIOPlugin() + fp = None + try: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as fp: + fp.write("Hello, world!") + fp.flush() + + content = await plugin.read(fp.name) + assert content == "Hello, world!" + finally: + if fp is not None: + os.remove(fp.name) + + +@pytest.mark.asyncio +async def test_cannot_read(): + plugin = FileIOPlugin() + filepath = None + with tempfile.NamedTemporaryFile(mode="w", delete=True) as fp: + fp.write("Hello, world!") + filepath = fp.name + + with pytest.raises(AssertionError): + await plugin.read(filepath) + + +@pytest.mark.asyncio +async def test_can_write(context_factory): + plugin = FileIOPlugin() + fp = None + try: + with tempfile.NamedTemporaryFile(mode="r", delete=False) as fp: + context_variables = ContextVariables() + + context_variables.set("path", fp.name) + context_variables.set("content", "Hello, world!") + + context = context_factory(context_variables) + + await plugin.write(context) + + content = fp.read() + + assert content == "Hello, world!" + finally: + if fp is not None: + os.remove(fp.name) + + +@pytest.mark.asyncio +async def test_cannot_write(context_factory): + plugin = FileIOPlugin() + fp = None + try: + with tempfile.NamedTemporaryFile(mode="r", delete=False) as fp: + os.chmod(fp.name, 0o500) + + context_variables = ContextVariables() + + context_variables.set("path", fp.name) + context_variables.set("content", "Hello, world!") + + context = context_factory(context_variables) + + with pytest.raises(PermissionError): + await plugin.write(context) + + os.chmod(fp.name, 0o777) + finally: + if fp is not None: + os.remove(fp.name) diff --git a/python/tests/unit/core_plugins/test_http_plugin.py b/python/tests/unit/core_plugins/test_http_plugin.py new file mode 100644 index 000000000000..7820c38ddfc6 --- /dev/null +++ b/python/tests/unit/core_plugins/test_http_plugin.py @@ -0,0 +1,109 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import patch + +import pytest + +from semantic_kernel import Kernel +from semantic_kernel.core_plugins import HttpPlugin +from semantic_kernel.orchestration.context_variables import ContextVariables + + +@pytest.mark.asyncio +async def test_it_can_be_instantiated(): + plugin = HttpPlugin() + assert plugin is not None + + +@pytest.mark.asyncio +async def test_it_can_be_imported(): + kernel = Kernel() + plugin = HttpPlugin() + assert kernel.import_plugin(plugin, "http") + assert kernel.plugins["http"] is not None + assert kernel.plugins["http"].name == "http" + assert kernel.plugins["http"]["getAsync"] is not None + assert kernel.plugins["http"]["postAsync"] is not None + + +@patch("aiohttp.ClientSession.get") +@pytest.mark.asyncio +async def test_get(mock_get): + mock_get.return_value.__aenter__.return_value.text.return_value = "Hello" + mock_get.return_value.__aenter__.return_value.status = 200 + + plugin = HttpPlugin() + response = await plugin.get("https://example.org/get") + assert response == "Hello" + + +@pytest.mark.asyncio +async def test_get_none_url(): + plugin = HttpPlugin() + with pytest.raises(ValueError): + await plugin.get(None) + + +@patch("aiohttp.ClientSession.post") +@pytest.mark.asyncio +async def test_post(mock_post, context_factory): + mock_post.return_value.__aenter__.return_value.text.return_value = "Hello World !" + mock_post.return_value.__aenter__.return_value.status = 200 + + plugin = HttpPlugin() + context_variables = ContextVariables() + context_variables.set("body", "{message: 'Hello, world!'}") + context = context_factory(context_variables) + response = await plugin.post("https://example.org/post", context) + assert response == "Hello World !" + + +@patch("aiohttp.ClientSession.post") +@pytest.mark.asyncio +async def test_post_nobody(mock_post, context_factory): + mock_post.return_value.__aenter__.return_value.text.return_value = "Hello World !" + mock_post.return_value.__aenter__.return_value.status = 200 + + plugin = HttpPlugin() + context_variables = ContextVariables() + context = context_factory(context_variables) + response = await plugin.post("https://example.org/post", context) + assert response == "Hello World !" + + +@patch("aiohttp.ClientSession.put") +@pytest.mark.asyncio +async def test_put(mock_put, context_factory): + mock_put.return_value.__aenter__.return_value.text.return_value = "Hello World !" + mock_put.return_value.__aenter__.return_value.status = 200 + + plugin = HttpPlugin() + context_variables = ContextVariables() + context_variables.set("body", "{message: 'Hello, world!'}") + context = context_factory(context_variables) + response = await plugin.put("https://example.org/put", context) + assert response == "Hello World !" + + +@patch("aiohttp.ClientSession.put") +@pytest.mark.asyncio +async def test_put_nobody(mock_put, context_factory): + mock_put.return_value.__aenter__.return_value.text.return_value = "Hello World !" + mock_put.return_value.__aenter__.return_value.status = 200 + + plugin = HttpPlugin() + context_variables = ContextVariables() + context = context_factory(context_variables) + response = await plugin.put("https://example.org/put", context) + assert response == "Hello World !" + + +@patch("aiohttp.ClientSession.delete") +@pytest.mark.asyncio +async def test_delete(mock_delete): + mock_delete.return_value.__aenter__.return_value.text.return_value = "Hello World !" + mock_delete.return_value.__aenter__.return_value.status = 200 + + plugin = HttpPlugin() + response = await plugin.delete("https://example.org/delete") + assert response == "Hello World !" diff --git a/python/tests/unit/core_plugins/test_math_plugin.py b/python/tests/unit/core_plugins/test_math_plugin.py new file mode 100644 index 000000000000..bbffbf8203b7 --- /dev/null +++ b/python/tests/unit/core_plugins/test_math_plugin.py @@ -0,0 +1,196 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +from semantic_kernel import Kernel +from semantic_kernel.core_plugins import MathPlugin +from semantic_kernel.orchestration.context_variables import ContextVariables + + +def test_can_be_instantiated(): + plugin = MathPlugin() + assert plugin is not None + + +def test_can_be_imported(): + kernel = Kernel() + assert kernel.import_plugin(MathPlugin(), "math") + assert kernel.plugins["math"] is not None + assert kernel.plugins["math"].name == "math" + assert kernel.plugins["math"]["Add"] is not None + assert kernel.plugins["math"]["Subtract"] is not None + + +@pytest.mark.parametrize( + "initial_Value, amount, expectedResult", + [ + ("10", "10", "20"), + ("0", "10", "10"), + ("0", "-10", "-10"), + ("10", "0", "10"), + ("-1", "10", "9"), + ("-10", "10", "0"), + ("-192", "13", "-179"), + ("-192", "-13", "-205"), + ], +) +def test_add_when_valid_parameters_should_succeed(initial_Value, amount, expectedResult): + # Arrange + context = ContextVariables() + context["Amount"] = amount + plugin = MathPlugin() + + # Act + result = plugin.add(initial_Value, context) + + # Assert + assert result == expectedResult + + +@pytest.mark.parametrize( + "initial_Value, amount, expectedResult", + [ + ("10", "10", "0"), + ("0", "10", "-10"), + ("10", "0", "10"), + ("100", "-10", "110"), + ("100", "102", "-2"), + ("-1", "10", "-11"), + ("-10", "10", "-20"), + ("-192", "13", "-205"), + ], +) +def test_subtract_when_valid_parameters_should_succeed(initial_Value, amount, expectedResult): + # Arrange + context = ContextVariables() + context["Amount"] = amount + plugin = MathPlugin() + + # Act + result = plugin.subtract(initial_Value, context) + + # Assert + assert result == expectedResult + + +@pytest.mark.parametrize( + "initial_Value", + [ + "$0", + "one hundred", + "20..,,2,1", + ".2,2.1", + "0.1.0", + "00-099", + "¹²¹", + "2²", + "zero", + "-100 units", + "1 banana", + ], +) +def test_add_when_invalid_initial_value_should_throw(initial_Value): + # Arrange + context = ContextVariables() + context["Amount"] = "1" + plugin = MathPlugin() + + # Act + with pytest.raises(ValueError) as exception: + plugin.add(initial_Value, context) + + # Assert + assert str(exception.value) == f"Initial value provided is not in numeric format: {initial_Value}" + assert exception.type == ValueError + + +@pytest.mark.parametrize( + "amount", + [ + "$0", + "one hundred", + "20..,,2,1", + ".2,2.1", + "0.1.0", + "00-099", + "¹²¹", + "2²", + "zero", + "-100 units", + "1 banana", + ], +) +def test_add_when_invalid_amount_should_throw(amount): + # Arrange + context = ContextVariables() + context["Amount"] = amount + plugin = MathPlugin() + + # Act / Assert + with pytest.raises(ValueError) as exception: + plugin.add("1", context) + + assert str(exception.value) == f"Context amount provided is not in numeric format: {amount}" + assert exception.type == ValueError + + +@pytest.mark.parametrize( + "initial_value", + [ + "$0", + "one hundred", + "20..,,2,1", + ".2,2.1", + "0.1.0", + "00-099", + "¹²¹", + "2²", + "zero", + "-100 units", + "1 banana", + ], +) +def test_subtract_when_invalid_initial_value_should_throw(initial_value): + # Arrange + context = ContextVariables() + context["Amount"] = "1" + plugin = MathPlugin() + + # Act / Assert + with pytest.raises(ValueError) as exception: + plugin.subtract(initial_value, context) + + # Assert + assert str(exception.value) == f"Initial value provided is not in numeric format: {initial_value}" + assert exception.type == ValueError + + +@pytest.mark.parametrize( + "amount", + [ + "$0", + "one hundred", + "20..,,2,1", + ".2,2.1", + "0.1.0", + "00-099", + "¹²¹", + "2²", + "zero", + "-100 units", + "1 banana", + ], +) +def test_subtract_when_invalid_amount_should_throw(amount): + # Arrange + context = ContextVariables() + context["Amount"] = amount + plugin = MathPlugin() + + # Act / Assert + with pytest.raises(ValueError) as exception: + plugin.subtract("1", context) + + # Assert + assert str(exception.value) == f"Context amount provided is not in numeric format: {amount}" + assert exception.type == ValueError diff --git a/python/tests/unit/core_plugins/test_text_plugin.py b/python/tests/unit/core_plugins/test_text_plugin.py new file mode 100644 index 000000000000..36dfc438fe9c --- /dev/null +++ b/python/tests/unit/core_plugins/test_text_plugin.py @@ -0,0 +1,48 @@ +import semantic_kernel as sk +from semantic_kernel.core_plugins.text_plugin import TextPlugin + + +def test_can_be_instantiated(): + assert TextPlugin() + + +def test_can_be_imported(): + kernel = sk.Kernel() + assert kernel.import_plugin(TextPlugin(), "text_plugin") + assert not kernel.plugins["text_plugin"]["trim"].is_semantic + + +def test_can_be_imported_with_name(): + kernel = sk.Kernel() + assert kernel.import_plugin(TextPlugin(), "text") + assert not kernel.plugins["text"]["trim"].is_semantic + + +def test_can_trim(): + text_plugin = TextPlugin() + result = text_plugin.trim(" hello world ") + assert result == "hello world" + + +def test_can_trim_start(): + text_plugin = TextPlugin() + result = text_plugin.trim_start(" hello world ") + assert result == "hello world " + + +def test_can_trim_end(): + text_plugin = TextPlugin() + result = text_plugin.trim_end(" hello world ") + assert result == " hello world" + + +def test_can_lower(): + text_plugin = TextPlugin() + result = text_plugin.lowercase(" HELLO WORLD ") + assert result == " hello world " + + +def test_can_upper(): + text_plugin = TextPlugin() + result = text_plugin.uppercase(" hello world ") + assert result == " HELLO WORLD " diff --git a/python/tests/unit/core_plugins/test_time_plugin.py b/python/tests/unit/core_plugins/test_time_plugin.py new file mode 100644 index 000000000000..40e58b2bcd3e --- /dev/null +++ b/python/tests/unit/core_plugins/test_time_plugin.py @@ -0,0 +1,148 @@ +import datetime +from unittest import mock + +import semantic_kernel as sk +from semantic_kernel.core_plugins.time_plugin import TimePlugin + +test_mock_now = datetime.datetime(2031, 1, 12, 12, 24, 56, tzinfo=datetime.timezone.utc) +test_mock_today = datetime.date(2031, 1, 12) + + +def test_can_be_instantiated(): + assert TimePlugin() + + +def test_can_be_imported(): + kernel = sk.Kernel() + assert kernel.import_plugin(TimePlugin(), "time") + assert kernel.plugins["time"] is not None + assert kernel.plugins["time"].name == "time" + assert kernel.plugins["time"]["now"] is not None + + +def test_date(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.date() == "Sunday, 12 January, 2031" + + +def test_now(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.now() == "Sunday, January 12, 2031 12:24 PM" + + +def test_days_ago(): + plugin = TimePlugin() + + with mock.patch("datetime.date", wraps=datetime.date) as dt: + dt.today.return_value = test_mock_today + assert plugin.days_ago(1) == "Saturday, 11 January, 2031" + + +def test_date_matching_last_day_name(): + plugin = TimePlugin() + + with mock.patch("datetime.date", wraps=datetime.date) as dt: + dt.today.return_value = test_mock_today + assert plugin.date_matching_last_day_name("Friday") == "Friday, 10 January, 2031" + + +def test_utc_now(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.utcnow.return_value = test_mock_now + assert plugin.utc_now() == "Sunday, January 12, 2031 12:24 PM" + + +def test_time(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.time() == "12:24:56 PM" + + +def test_year(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.year() == "2031" + + +def test_month(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.month() == "January" + + +def test_month_number(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.month_number() == "01" + + +def test_day(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.day() == "12" + + +def test_day_of_week(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.day_of_week() == "Sunday" + + +def test_hour(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.hour() == "12 PM" + + +def test_minute(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.minute() == "24" + + +def test_second(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.second() == "56" + + +def test_time_zone_offset(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.time_zone_offset() == "+0000" + + +def test_time_zone_name(): + plugin = TimePlugin() + + with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: + dt.now.return_value = test_mock_now + assert plugin.time_zone_name() == "UTC" diff --git a/python/tests/unit/core_plugins/test_wait_plugin.py b/python/tests/unit/core_plugins/test_wait_plugin.py new file mode 100644 index 000000000000..481f3e24e00c --- /dev/null +++ b/python/tests/unit/core_plugins/test_wait_plugin.py @@ -0,0 +1,55 @@ +import pytest + +from semantic_kernel.core_plugins.wait_plugin import WaitPlugin + +test_data_good = [ + "0", + "1", + "2.1", + "0.1", + "0.01", + "0.001", + "0.0001", + "-0.0001", + "-10000", +] + +test_data_bad = [ + "$0", + "one hundred", + "20..,,2,1", + ".2,2.1", + "0.1.0", + "00-099", + "¹²¹", + "2²", + "zero", + "-100 seconds", + "1 second", +] + + +def test_can_be_instantiated(): + plugin = WaitPlugin() + assert plugin is not None + + +@pytest.mark.asyncio +@pytest.mark.parametrize("wait_time", test_data_good) +async def test_wait_valid_params(wait_time): + plugin = WaitPlugin() + + await plugin.wait(wait_time) + + assert True + + +@pytest.mark.asyncio +@pytest.mark.parametrize("wait_time", test_data_bad) +async def test_wait_invalid_params(wait_time): + plugin = WaitPlugin() + + with pytest.raises(ValueError) as exc_info: + await plugin.wait("wait_time") + + assert exc_info.value.args[0] == "seconds text must be a number" diff --git a/python/tests/unit/core_skills/test_file_io_skill.py b/python/tests/unit/core_skills/test_file_io_skill.py deleted file mode 100644 index 910e14a9deb9..000000000000 --- a/python/tests/unit/core_skills/test_file_io_skill.py +++ /dev/null @@ -1,94 +0,0 @@ -import os -import tempfile - -import pytest - -from semantic_kernel import Kernel -from semantic_kernel.core_skills.file_io_skill import FileIOSkill -from semantic_kernel.orchestration.context_variables import ContextVariables - - -def test_can_be_instantiated(): - skill = FileIOSkill() - assert skill is not None - - -def test_can_be_imported(): - kernel = Kernel() - assert kernel.import_skill(FileIOSkill(), "file") - assert kernel.skills.has_native_function("file", "readAsync") - - -@pytest.mark.asyncio -async def test_can_read_async(): - skill = FileIOSkill() - fp = None - try: - with tempfile.NamedTemporaryFile(mode="w", delete=False) as fp: - fp.write("Hello, world!") - fp.flush() - - content = await skill.read_async(fp.name) - assert content == "Hello, world!" - finally: - if fp is not None: - os.remove(fp.name) - - -@pytest.mark.asyncio -async def test_cannot_read_async(): - skill = FileIOSkill() - filepath = None - with tempfile.NamedTemporaryFile(mode="w", delete=True) as fp: - fp.write("Hello, world!") - filepath = fp.name - - with pytest.raises(AssertionError): - await skill.read_async(filepath) - - -@pytest.mark.asyncio -async def test_can_write(context_factory): - skill = FileIOSkill() - fp = None - try: - with tempfile.NamedTemporaryFile(mode="r", delete=False) as fp: - context_variables = ContextVariables() - - context_variables.set("path", fp.name) - context_variables.set("content", "Hello, world!") - - context = context_factory(context_variables) - - await skill.write_async(context) - - content = fp.read() - - assert content == "Hello, world!" - finally: - if fp is not None: - os.remove(fp.name) - - -@pytest.mark.asyncio -async def test_cannot_write(context_factory): - skill = FileIOSkill() - fp = None - try: - with tempfile.NamedTemporaryFile(mode="r", delete=False) as fp: - os.chmod(fp.name, 0o500) - - context_variables = ContextVariables() - - context_variables.set("path", fp.name) - context_variables.set("content", "Hello, world!") - - context = context_factory(context_variables) - - with pytest.raises(PermissionError): - await skill.write_async(context) - - os.chmod(fp.name, 0o777) - finally: - if fp is not None: - os.remove(fp.name) diff --git a/python/tests/unit/core_skills/test_http_skill.py b/python/tests/unit/core_skills/test_http_skill.py deleted file mode 100644 index 3f369924a992..000000000000 --- a/python/tests/unit/core_skills/test_http_skill.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import patch - -import pytest - -from semantic_kernel import Kernel -from semantic_kernel.core_skills import HttpSkill -from semantic_kernel.orchestration.context_variables import ContextVariables - - -@pytest.mark.asyncio -async def test_it_can_be_instantiated(): - skill = HttpSkill() - assert skill is not None - - -@pytest.mark.asyncio -async def test_it_can_be_imported(): - kernel = Kernel() - skill = HttpSkill() - assert kernel.import_skill(skill, "http") - assert kernel.skills.has_native_function("http", "getAsync") - assert kernel.skills.has_native_function("http", "postAsync") - - -@patch("aiohttp.ClientSession.get") -@pytest.mark.asyncio -async def test_get(mock_get): - mock_get.return_value.__aenter__.return_value.text.return_value = "Hello" - mock_get.return_value.__aenter__.return_value.status = 200 - - skill = HttpSkill() - response = await skill.get_async("https://example.org/get") - assert response == "Hello" - - -@pytest.mark.asyncio -async def test_get_none_url(): - skill = HttpSkill() - with pytest.raises(ValueError): - await skill.get_async(None) - - -@patch("aiohttp.ClientSession.post") -@pytest.mark.asyncio -async def test_post(mock_post, context_factory): - mock_post.return_value.__aenter__.return_value.text.return_value = "Hello World !" - mock_post.return_value.__aenter__.return_value.status = 200 - - skill = HttpSkill() - context_variables = ContextVariables() - context_variables.set("body", "{message: 'Hello, world!'}") - context = context_factory(context_variables) - response = await skill.post_async("https://example.org/post", context) - assert response == "Hello World !" - - -@patch("aiohttp.ClientSession.post") -@pytest.mark.asyncio -async def test_post_nobody(mock_post, context_factory): - mock_post.return_value.__aenter__.return_value.text.return_value = "Hello World !" - mock_post.return_value.__aenter__.return_value.status = 200 - - skill = HttpSkill() - context_variables = ContextVariables() - context = context_factory(context_variables) - response = await skill.post_async("https://example.org/post", context) - assert response == "Hello World !" - - -@patch("aiohttp.ClientSession.put") -@pytest.mark.asyncio -async def test_put(mock_put, context_factory): - mock_put.return_value.__aenter__.return_value.text.return_value = "Hello World !" - mock_put.return_value.__aenter__.return_value.status = 200 - - skill = HttpSkill() - context_variables = ContextVariables() - context_variables.set("body", "{message: 'Hello, world!'}") - context = context_factory(context_variables) - response = await skill.put_async("https://example.org/put", context) - assert response == "Hello World !" - - -@patch("aiohttp.ClientSession.put") -@pytest.mark.asyncio -async def test_put_nobody(mock_put, context_factory): - mock_put.return_value.__aenter__.return_value.text.return_value = "Hello World !" - mock_put.return_value.__aenter__.return_value.status = 200 - - skill = HttpSkill() - context_variables = ContextVariables() - context = context_factory(context_variables) - response = await skill.put_async("https://example.org/put", context) - assert response == "Hello World !" - - -@patch("aiohttp.ClientSession.delete") -@pytest.mark.asyncio -async def test_delete(mock_delete): - mock_delete.return_value.__aenter__.return_value.text.return_value = "Hello World !" - mock_delete.return_value.__aenter__.return_value.status = 200 - - skill = HttpSkill() - response = await skill.delete_async("https://example.org/delete") - assert response == "Hello World !" diff --git a/python/tests/unit/core_skills/test_math_skill.py b/python/tests/unit/core_skills/test_math_skill.py deleted file mode 100644 index 5b48908a5134..000000000000 --- a/python/tests/unit/core_skills/test_math_skill.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest - -from semantic_kernel import Kernel -from semantic_kernel.core_skills import MathSkill -from semantic_kernel.orchestration.context_variables import ContextVariables - - -def test_can_be_instantiated(): - skill = MathSkill() - assert skill is not None - - -def test_can_be_imported(): - kernel = Kernel() - assert kernel.import_skill(MathSkill(), "math") - assert kernel.skills.has_native_function("math", "add") - assert kernel.skills.has_native_function("math", "subtract") - - -@pytest.mark.parametrize( - "initial_Value, amount, expectedResult", - [ - ("10", "10", "20"), - ("0", "10", "10"), - ("0", "-10", "-10"), - ("10", "0", "10"), - ("-1", "10", "9"), - ("-10", "10", "0"), - ("-192", "13", "-179"), - ("-192", "-13", "-205"), - ], -) -def test_add_when_valid_parameters_should_succeed( - initial_Value, amount, expectedResult -): - # Arrange - context = ContextVariables() - context["Amount"] = amount - skill = MathSkill() - - # Act - result = skill.add(initial_Value, context) - - # Assert - assert result == expectedResult - - -@pytest.mark.parametrize( - "initial_Value, amount, expectedResult", - [ - ("10", "10", "0"), - ("0", "10", "-10"), - ("10", "0", "10"), - ("100", "-10", "110"), - ("100", "102", "-2"), - ("-1", "10", "-11"), - ("-10", "10", "-20"), - ("-192", "13", "-205"), - ], -) -def test_subtract_when_valid_parameters_should_succeed( - initial_Value, amount, expectedResult -): - # Arrange - context = ContextVariables() - context["Amount"] = amount - skill = MathSkill() - - # Act - result = skill.subtract(initial_Value, context) - - # Assert - assert result == expectedResult - - -@pytest.mark.parametrize( - "initial_Value", - [ - "$0", - "one hundred", - "20..,,2,1", - ".2,2.1", - "0.1.0", - "00-099", - "¹²¹", - "2²", - "zero", - "-100 units", - "1 banana", - ], -) -def test_add_when_invalid_initial_value_should_throw(initial_Value): - # Arrange - context = ContextVariables() - context["Amount"] = "1" - skill = MathSkill() - - # Act - with pytest.raises(ValueError) as exception: - skill.add(initial_Value, context) - - # Assert - assert ( - str(exception.value) - == f"Initial value provided is not in numeric format: {initial_Value}" - ) - assert exception.type == ValueError - - -@pytest.mark.parametrize( - "amount", - [ - "$0", - "one hundred", - "20..,,2,1", - ".2,2.1", - "0.1.0", - "00-099", - "¹²¹", - "2²", - "zero", - "-100 units", - "1 banana", - ], -) -def test_add_when_invalid_amount_should_throw(amount): - # Arrange - context = ContextVariables() - context["Amount"] = amount - skill = MathSkill() - - # Act / Assert - with pytest.raises(ValueError) as exception: - skill.add("1", context) - - assert ( - str(exception.value) - == f"Context amount provided is not in numeric format: {amount}" - ) - assert exception.type == ValueError - - -@pytest.mark.parametrize( - "initial_value", - [ - "$0", - "one hundred", - "20..,,2,1", - ".2,2.1", - "0.1.0", - "00-099", - "¹²¹", - "2²", - "zero", - "-100 units", - "1 banana", - ], -) -def test_subtract_when_invalid_initial_value_should_throw(initial_value): - # Arrange - context = ContextVariables() - context["Amount"] = "1" - skill = MathSkill() - - # Act / Assert - with pytest.raises(ValueError) as exception: - skill.subtract(initial_value, context) - - # Assert - assert ( - str(exception.value) - == f"Initial value provided is not in numeric format: {initial_value}" - ) - assert exception.type == ValueError - - -@pytest.mark.parametrize( - "amount", - [ - "$0", - "one hundred", - "20..,,2,1", - ".2,2.1", - "0.1.0", - "00-099", - "¹²¹", - "2²", - "zero", - "-100 units", - "1 banana", - ], -) -def test_subtract_when_invalid_amount_should_throw(amount): - # Arrange - context = ContextVariables() - context["Amount"] = amount - skill = MathSkill() - - # Act / Assert - with pytest.raises(ValueError) as exception: - skill.subtract("1", context) - - # Assert - assert ( - str(exception.value) - == f"Context amount provided is not in numeric format: {amount}" - ) - assert exception.type == ValueError diff --git a/python/tests/unit/core_skills/test_text_skill.py b/python/tests/unit/core_skills/test_text_skill.py deleted file mode 100644 index 5a8280b785d1..000000000000 --- a/python/tests/unit/core_skills/test_text_skill.py +++ /dev/null @@ -1,49 +0,0 @@ -import semantic_kernel as sk -from semantic_kernel.core_skills.text_skill import TextSkill -from semantic_kernel.skill_definition.skill_collection import SkillCollection - - -def test_can_be_instantiated(): - assert TextSkill() - - -def test_can_be_imported(): - kernel = sk.Kernel() - assert kernel.import_skill(TextSkill()) - assert kernel.skills.has_native_function(SkillCollection.GLOBAL_SKILL, "trim") - - -def test_can_be_imported_with_name(): - kernel = sk.Kernel() - assert kernel.import_skill(TextSkill(), "text") - assert kernel.skills.has_native_function("text", "trim") - - -def test_can_trim(): - text_skill = TextSkill() - result = text_skill.trim(" hello world ") - assert result == "hello world" - - -def test_can_trim_start(): - text_skill = TextSkill() - result = text_skill.trim_start(" hello world ") - assert result == "hello world " - - -def test_can_trim_end(): - text_skill = TextSkill() - result = text_skill.trim_end(" hello world ") - assert result == " hello world" - - -def test_can_lower(): - text_skill = TextSkill() - result = text_skill.lowercase(" HELLO WORLD ") - assert result == " hello world " - - -def test_can_upper(): - text_skill = TextSkill() - result = text_skill.uppercase(" hello world ") - assert result == " HELLO WORLD " diff --git a/python/tests/unit/core_skills/test_time_skill.py b/python/tests/unit/core_skills/test_time_skill.py deleted file mode 100644 index 80433a5b5dff..000000000000 --- a/python/tests/unit/core_skills/test_time_skill.py +++ /dev/null @@ -1,146 +0,0 @@ -import datetime -from unittest import mock - -import semantic_kernel as sk -from semantic_kernel.core_skills.time_skill import TimeSkill - -test_mock_now = datetime.datetime(2031, 1, 12, 12, 24, 56, tzinfo=datetime.timezone.utc) -test_mock_today = datetime.date(2031, 1, 12) - - -def test_can_be_instantiated(): - assert TimeSkill() - - -def test_can_be_imported(): - kernel = sk.Kernel() - assert kernel.import_skill(TimeSkill(), "time") - assert kernel.skills.has_native_function("time", "now") - - -def test_date(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.date() == "Sunday, 12 January, 2031" - - -def test_now(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.now() == "Sunday, January 12, 2031 12:24 PM" - - -def test_days_ago(): - skill = TimeSkill() - - with mock.patch("datetime.date", wraps=datetime.date) as dt: - dt.today.return_value = test_mock_today - assert skill.days_ago(1) == "Saturday, 11 January, 2031" - - -def test_date_matching_last_day_name(): - skill = TimeSkill() - - with mock.patch("datetime.date", wraps=datetime.date) as dt: - dt.today.return_value = test_mock_today - assert skill.date_matching_last_day_name("Friday") == "Friday, 10 January, 2031" - - -def test_utc_now(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.utcnow.return_value = test_mock_now - assert skill.utc_now() == "Sunday, January 12, 2031 12:24 PM" - - -def test_time(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.time() == "12:24:56 PM" - - -def test_year(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.year() == "2031" - - -def test_month(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.month() == "January" - - -def test_month_number(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.month_number() == "01" - - -def test_day(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.day() == "12" - - -def test_day_of_week(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.day_of_week() == "Sunday" - - -def test_hour(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.hour() == "12 PM" - - -def test_minute(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.minute() == "24" - - -def test_second(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.second() == "56" - - -def test_time_zone_offset(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.time_zone_offset() == "+0000" - - -def test_time_zone_name(): - skill = TimeSkill() - - with mock.patch("datetime.datetime", wraps=datetime.datetime) as dt: - dt.now.return_value = test_mock_now - assert skill.time_zone_name() == "UTC" diff --git a/python/tests/unit/core_skills/test_wait_skill.py b/python/tests/unit/core_skills/test_wait_skill.py deleted file mode 100644 index 411a192e73d8..000000000000 --- a/python/tests/unit/core_skills/test_wait_skill.py +++ /dev/null @@ -1,55 +0,0 @@ -import pytest - -from semantic_kernel.core_skills.wait_skill import WaitSkill - -test_data_good = [ - "0", - "1", - "2.1", - "0.1", - "0.01", - "0.001", - "0.0001", - "-0.0001", - "-10000", -] - -test_data_bad = [ - "$0", - "one hundred", - "20..,,2,1", - ".2,2.1", - "0.1.0", - "00-099", - "¹²¹", - "2²", - "zero", - "-100 seconds", - "1 second", -] - - -def test_can_be_instantiated(): - skill = WaitSkill() - assert skill is not None - - -@pytest.mark.asyncio -@pytest.mark.parametrize("wait_time", test_data_good) -async def test_wait_valid_params(wait_time): - skill = WaitSkill() - - await skill.wait(wait_time) - - assert True - - -@pytest.mark.asyncio -@pytest.mark.parametrize("wait_time", test_data_bad) -async def test_wait_invalid_params(wait_time): - skill = WaitSkill() - - with pytest.raises(ValueError) as exc_info: - await skill.wait("wait_time") - - assert exc_info.value.args[0] == "seconds text must be a number" diff --git a/python/tests/unit/kernel_extensions/test_import_plugins.py b/python/tests/unit/kernel_extensions/test_import_plugins.py new file mode 100644 index 000000000000..acd03d823517 --- /dev/null +++ b/python/tests/unit/kernel_extensions/test_import_plugins.py @@ -0,0 +1,98 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +import random + +import semantic_kernel as sk +import semantic_kernel.connectors.ai.open_ai as sk_oai +from semantic_kernel.plugin_definition import kernel_function + + +def test_plugin_can_be_imported(): + # create a kernel + kernel = sk.Kernel() + api_key = "test-api-key" + org_id = "test-org-id" + kernel.add_text_completion_service( + "test-completion-service", + sk_oai.OpenAITextCompletion("text-davinci-003", api_key, org_id), + ) + + # import plugins + plugins_directory = os.path.join(os.path.dirname(__file__), "../..", "test_plugins") + # path to plugins directory + plugin = kernel.import_semantic_plugin_from_directory(plugins_directory, "TestPlugin") + + assert plugin is not None + assert len(plugin.functions) == 1 + assert plugin.functions.get("TestFunction") is not None + + +def test_native_plugin_can_be_imported(): + # create a kernel + kernel = sk.Kernel() + + # import plugins + plugins_directory = os.path.join(os.path.dirname(__file__), "../..", "test_native_plugins") + # path to plugins directory + plugin = kernel.import_native_plugin_from_directory(plugins_directory, "TestNativePlugin") + + assert plugin is not None + assert len(plugin.functions) == 1 + assert plugin.functions.get("echoAsync") is not None + plugin_config = plugin.functions["echoAsync"] + assert plugin_config.name == "echoAsync" + assert plugin_config.description == "Echo for input text" + + +def test_create_semantic_function_succeeds(): + # create a kernel + kernel = sk.Kernel() + + kernel.add_chat_service( + "test-completion-service", + sk_oai.OpenAIChatCompletion("test", "test", ""), + ) + + class GenerateNamesPlugin: + @kernel_function(description="Generate character names", name="generate_names") + def generate_names(self) -> str: + """ + Generate two names. + Returns: + str + """ + names = {"Hoagie", "Hamilton", "Bacon", "Pizza", "Boots", "Shorts", "Tuna"} + first_name = random.choice(list(names)) + names.remove(first_name) + second_name = random.choice(list(names)) + return f"{first_name}, {second_name}" + + # import plugins + _ = kernel.import_plugin(GenerateNamesPlugin(), plugin_name="GenerateNames") + + sk_prompt = """ + Write a short story about two Corgis on an adventure. + The story must be: + - G rated + - Have a positive message + - No sexism, racism or other bias/bigotry + - Be exactly {{$paragraph_count}} paragraphs long + - Be written in this language: {{$language}} + - The two names of the corgis are {{GenerateNames.generate_names}} + """ + + print(sk_prompt) + + test_func = kernel.create_semantic_function( + prompt_template=sk_prompt, + function_name="TestFunction", + plugin_name="TestPlugin", + description="Write a short story.", + max_tokens=500, + temperature=0.5, + top_p=0.5, + ) + + assert len(test_func.plugins) > 0 + assert test_func.plugins["GenerateNames"] is not None diff --git a/python/tests/unit/kernel_extensions/test_import_skills.py b/python/tests/unit/kernel_extensions/test_import_skills.py deleted file mode 100644 index acca6f13994b..000000000000 --- a/python/tests/unit/kernel_extensions/test_import_skills.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import os - -import semantic_kernel as sk -import semantic_kernel.connectors.ai.open_ai as sk_oai - - -def test_skill_can_be_imported(): - # create a kernel - kernel = sk.Kernel() - api_key = "test-api-key" - org_id = "test-org-id" - kernel.add_text_completion_service( - "test-completion-service", - sk_oai.OpenAITextCompletion("text-davinci-003", api_key, org_id), - ) - - # import skills - skills_directory = os.path.join(os.path.dirname(__file__), "../..", "test_skills") - # path to skills directory - skill_config_dict = kernel.import_semantic_skill_from_directory( - skills_directory, "TestSkill" - ) - - assert skill_config_dict is not None - assert len(skill_config_dict) == 1 - assert "TestFunction" in skill_config_dict - skill_config = skill_config_dict["TestFunction"] - assert skill_config.name == "TestFunction" - assert skill_config.description == "Test Description" - - -def test_native_skill_can_be_imported(): - # create a kernel - kernel = sk.Kernel() - - # import skills - skills_directory = os.path.join( - os.path.dirname(__file__), "../..", "test_native_skills" - ) - # path to skills directory - skill_config_dict = kernel.import_native_skill_from_directory( - skills_directory, "TestNativeSkill" - ) - - assert skill_config_dict is not None - assert len(skill_config_dict) == 1 - assert "echoAsync" in skill_config_dict - skill_config = skill_config_dict["echoAsync"] - assert skill_config.name == "echoAsync" - assert skill_config.description == "Echo for input text" diff --git a/python/tests/unit/kernel_extensions/test_register_functions.py b/python/tests/unit/kernel_extensions/test_register_functions.py index 772f94a8f700..decfbb8bd482 100644 --- a/python/tests/unit/kernel_extensions/test_register_functions.py +++ b/python/tests/unit/kernel_extensions/test_register_functions.py @@ -5,57 +5,50 @@ from semantic_kernel import Kernel from semantic_kernel.kernel_exception import KernelException -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase -from semantic_kernel.skill_definition.sk_function_decorator import sk_function -from semantic_kernel.skill_definition.skill_collection import SkillCollection +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function def not_decorated_native_function(arg1: str) -> str: return "test" -@sk_function(name="getLightStatus") +@kernel_function(name="getLightStatus") def decorated_native_function(arg1: str) -> str: return "test" -def test_register_valid_native_function(): +@pytest.mark.asyncio +async def test_register_valid_native_function(): kernel = Kernel() - registered_func = kernel.register_native_function( - "TestSkill", decorated_native_function - ) + registered_func = kernel.register_native_function("TestPlugin", decorated_native_function) - assert isinstance(registered_func, SKFunctionBase) - assert ( - kernel.skills.get_native_function("TestSkill", "getLightStatus") - == registered_func - ) - assert registered_func.invoke("testtest").result == "test" + assert isinstance(registered_func, KernelFunction) + assert kernel.plugins["TestPlugin"]["getLightStatus"] == registered_func + func_result = await registered_func.invoke("testtest") + assert func_result.result == "test" def test_register_undecorated_native_function(): kernel = Kernel() with pytest.raises(KernelException): - kernel.register_native_function("TestSkill", not_decorated_native_function) + kernel.register_native_function("TestPlugin", not_decorated_native_function) -def test_register_with_none_skill_name(): +def test_register_with_none_plugin_name(): kernel = Kernel() registered_func = kernel.register_native_function(None, decorated_native_function) - assert registered_func.skill_name == SkillCollection.GLOBAL_SKILL + assert registered_func.plugin_name is not None + assert registered_func.plugin_name.startswith("p_") def test_register_overloaded_native_function(): kernel = Kernel() - kernel.register_native_function("TestSkill", decorated_native_function) + kernel.register_native_function("TestPlugin", decorated_native_function) with pytest.raises(KernelException): - kernel.register_native_function("TestSkill", decorated_native_function) - - -if __name__ == "__main__": - pytest.main([__file__]) + kernel.register_native_function("TestPlugin", decorated_native_function) diff --git a/python/tests/unit/memory/test_azure_cognitive_search_memory_store.py b/python/tests/unit/memory/test_azure_cognitive_search_memory_store.py new file mode 100644 index 000000000000..cb204ee16712 --- /dev/null +++ b/python/tests/unit/memory/test_azure_cognitive_search_memory_store.py @@ -0,0 +1,74 @@ +from unittest.mock import AsyncMock, patch + +import pytest +from azure.core.credentials import AzureKeyCredential +from azure.core.exceptions import ResourceNotFoundError +from azure.search.documents.indexes.models import SearchIndex, SearchResourceEncryptionKey + +from semantic_kernel.connectors.memory.azure_cognitive_search import AzureCognitiveSearchMemoryStore + + +@pytest.fixture +def azure_cognitive_search_memory_store(): + """Fixture to instantiate AzureCognitiveSearchMemoryStore with basic configuration.""" + store = AzureCognitiveSearchMemoryStore( + 1536, "https://test.search.windows.net", azure_credentials=AzureKeyCredential("test_key") + ) + return store + + +@pytest.fixture +def mock_search_index_client(): + """Fixture to patch 'SearchIndexClient' and its 'create_index' method.""" + with patch("azure.search.documents.indexes.aio.SearchIndexClient.create_index") as mock_create_index: + # Setup the mock to return a specific SearchIndex instance when called + mock_create_index.return_value = SearchIndex(name="testIndexWithEncryption", fields=[]) + yield mock_create_index + + +@pytest.fixture +def mock_encryption_key(): + """Fixture to provide a mock encryption key.""" + return SearchResourceEncryptionKey( + key_name="mockKeyName", key_version="mockKeyVersion", vault_uri="https://mockvault.vault.azure.net/" + ) + + +@pytest.fixture +def mock_get_index_client(): + """Fixture to patch 'SearchIndexClient.get_index' method to raise ResourceNotFoundError.""" + with patch("azure.search.documents.indexes.aio.SearchIndexClient.get_index", new_callable=AsyncMock) as mock: + mock.side_effect = ResourceNotFoundError("The specified index was not found.") + yield mock + + +@pytest.mark.asyncio +async def test_create_collection_without_encryption_key( + azure_cognitive_search_memory_store, mock_search_index_client, mock_get_index_client +): + mock_search_index_client.return_value = SearchIndex(name="testIndex", fields=[]) + await azure_cognitive_search_memory_store.create_collection("testIndex") + + mock_search_index_client.assert_called_once() + args, kwargs = mock_search_index_client.call_args + created_index: SearchIndex = args[0] + + assert created_index.encryption_key is None, "Encryption key should be None" + + +@pytest.mark.asyncio +async def test_create_collection_with_encryption_key( + azure_cognitive_search_memory_store, mock_search_index_client, mock_encryption_key, mock_get_index_client +): + mock_search_index_client.return_value = SearchIndex( + name="testIndexWithEncryption", fields=[], search_resource_encryption_key=mock_encryption_key + ) + await azure_cognitive_search_memory_store.create_collection( + "testIndexWithEncryption", search_resource_encryption_key=mock_encryption_key + ) + + mock_search_index_client.assert_called_once() + args, kwargs = mock_search_index_client.call_args + created_index: SearchIndex = args[0] + + assert created_index.encryption_key == mock_encryption_key, "Encryption key was not set correctly" diff --git a/python/tests/unit/memory/test_volatile_memory_store.py b/python/tests/unit/memory/test_volatile_memory_store.py index 7bd0a4f99375..f340eeade4fb 100644 --- a/python/tests/unit/memory/test_volatile_memory_store.py +++ b/python/tests/unit/memory/test_volatile_memory_store.py @@ -27,9 +27,7 @@ async def test_cosine_similarity_zero_query(): query_embedding = np.array([0, 0, 0]) collection_embeddings = np.array([[1, 0, 1], [0, 1, 0]]) with raises(ValueError): - _ = volatile_memory_store.compute_similarity_scores( - query_embedding, collection_embeddings - ) + _ = volatile_memory_store.compute_similarity_scores(query_embedding, collection_embeddings) @mark.asyncio @@ -39,9 +37,7 @@ async def test_cosine_similarity_zero_collection(): query_embedding = np.array([1, 0, 1]) collection_embeddings = np.array([[0, 0, 0], [0, 0, 0]]) with raises(ValueError): - _ = volatile_memory_store.compute_similarity_scores( - query_embedding, collection_embeddings - ) + _ = volatile_memory_store.compute_similarity_scores(query_embedding, collection_embeddings) @mark.asyncio @@ -51,7 +47,5 @@ async def test_cosine_similarity_partial_zero_collection(): query_embedding = np.array([1, 0, 1]) collection_embeddings = np.array([[1, 0, 1], [0, 0, 0]]) expected_scores = np.array([1.0, -1.0]) - scores = volatile_memory_store.compute_similarity_scores( - query_embedding, collection_embeddings - ) + scores = volatile_memory_store.compute_similarity_scores(query_embedding, collection_embeddings) assert np.allclose(expected_scores, scores) diff --git a/python/tests/unit/models/chat/test_chat_message.py b/python/tests/unit/models/chat/test_chat_message.py index 375e412725e1..e09e1e7504c8 100644 --- a/python/tests/unit/models/chat/test_chat_message.py +++ b/python/tests/unit/models/chat/test_chat_message.py @@ -21,12 +21,8 @@ async def test_chat_message_rendering(create_kernel): # Test initialization with custom values kernel = create_kernel expected_content = "Hello, world!" - prompt_config = PromptTemplateConfig.from_completion_parameters( - max_tokens=2000, temperature=0.7, top_p=0.8 - ) - content_template = PromptTemplate( - "Hello, {{$input}}!", kernel.prompt_template_engine, prompt_config - ) + prompt_config = PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) + content_template = PromptTemplate("Hello, {{$input}}!", kernel.prompt_template_engine, prompt_config) message = ChatMessage( role="user", @@ -34,7 +30,7 @@ async def test_chat_message_rendering(create_kernel): ) context = kernel.create_new_context() context.variables["input"] = "world" - await message.render_message_async(context) + await message.render_message(context) assert message.role == "user" assert message.fixed_content == expected_content assert message.content_template == content_template diff --git a/python/tests/unit/openapi/test_sk_openapi.py b/python/tests/unit/openapi/test_sk_openapi.py index df0293692901..b412ea42987f 100644 --- a/python/tests/unit/openapi/test_sk_openapi.py +++ b/python/tests/unit/openapi/test_sk_openapi.py @@ -5,7 +5,10 @@ import yaml from openapi_core import Spec -from semantic_kernel.connectors.openapi.sk_openapi import ( +from semantic_kernel.connectors.ai.open_ai.const import ( + USER_AGENT, +) +from semantic_kernel.connectors.openapi.kernel_openapi import ( OpenApiParser, OpenApiRunner, PreparedRestApiRequest, @@ -92,7 +95,11 @@ def test_prepare_request_with_path_params(): method="PUT", url="http://example.com/todos/1", params={"completed": False}, - headers={"Authorization": "Bearer abc123", "Content-Type": "application/json"}, + headers={ + "Authorization": "Bearer abc123", + "Content-Type": "application/json", + USER_AGENT: "Semantic-Kernel", + }, request_body={"title": "Buy milk", "completed": False}, ) actual_request = put_operation.prepare_request( @@ -127,7 +134,11 @@ def test_prepare_request_with_default_query_param(): method="PUT", url="http://example.com/todos/1", params={}, - headers={"Authorization": "Bearer abc123", "Content-Type": "application/json"}, + headers={ + "Authorization": "Bearer abc123", + "Content-Type": "application/json", + USER_AGENT: "Semantic-Kernel", + }, request_body={"title": "Buy milk", "completed": False}, ) actual_request = put_operation.prepare_request( @@ -148,7 +159,31 @@ def test_prepare_request_with_default_header(): method="PUT", url="http://example.com/todos/1", params={"completed": False}, - headers={"Content-Type": "application/json"}, + headers={"Content-Type": "application/json", USER_AGENT: "Semantic-Kernel"}, + request_body={"title": "Buy milk", "completed": False}, + ) + actual_request = put_operation.prepare_request( + path_params=path_params, + query_params=query_params, + headers=headers, + request_body=request_body, + ) + assert str(actual_request) == str(expected_request) + + +def test_prepare_request_with_existing_user_agent(): + path_params = {"id": 1} + query_params = {"completed": False} + headers = {USER_AGENT: "API/1.0 PythonBindings"} + request_body = {"title": "Buy milk", "completed": False} + expected_request = PreparedRestApiRequest( + method="PUT", + url="http://example.com/todos/1", + params={"completed": False}, + headers={ + USER_AGENT: "Semantic-Kernel API/1.0 PythonBindings", + "Content-Type": "application/json", + }, request_body={"title": "Buy milk", "completed": False}, ) actual_request = put_operation.prepare_request( @@ -267,9 +302,7 @@ async def test_run_operation_with_valid_request(mock_request, openapi_runner): headers = {"Authorization": "Bearer abc123"} request_body = {"title": "Buy milk", "completed": False} mock_request.return_value.__aenter__.return_value.text.return_value = 200 - response = await runner.run_operation( - operation, headers=headers, request_body=request_body - ) + response = await runner.run_operation(operation, headers=headers, request_body=request_body) assert response == 200 @@ -282,9 +315,7 @@ async def test_run_operation_with_invalid_request(mock_request, openapi_runner): request_body = {"title": "Buy milk"} mock_request.return_value.__aenter__.return_value.text.return_value = 400 with pytest.raises(Exception): - await runner.run_operation( - operation, headers=headers, request_body=request_body - ) + await runner.run_operation(operation, headers=headers, request_body=request_body) @patch("aiohttp.ClientSession.request") @@ -296,6 +327,4 @@ async def test_run_operation_with_error(mock_request, openapi_runner): request_body = {"title": "Buy milk", "completed": False} mock_request.side_effect = Exception("Error") with pytest.raises(Exception): - await runner.run_operation( - operation, headers=headers, request_body=request_body - ) + await runner.run_operation(operation, headers=headers, request_body=request_body) diff --git a/python/tests/unit/orchestration/test_context_variables.py b/python/tests/unit/orchestration/test_context_variables.py index 24430d0586c5..3776b7d13d58 100644 --- a/python/tests/unit/orchestration/test_context_variables.py +++ b/python/tests/unit/orchestration/test_context_variables.py @@ -46,18 +46,12 @@ def test_merged_context_vars_with_empty_input_results_in_empty_input(): assert context_vars_combined_1with2.variables is not None assert len(context_vars_combined_1with2.variables) == 2 assert context_vars_combined_1with2.variables["input"] == "" - assert ( - context_vars_combined_1with2.variables["test_string"] - == variables["test_string"] - ) + assert context_vars_combined_1with2.variables["test_string"] == variables["test_string"] assert context_vars_combined_2with1.variables is not None assert len(context_vars_combined_2with1.variables) == 2 assert context_vars_combined_2with1.variables["input"] == "" - assert ( - context_vars_combined_2with1.variables["test_string"] - == variables["test_string"] - ) + assert context_vars_combined_2with1.variables["test_string"] == variables["test_string"] def test_merged_context_vars_with_same_input_results_in_unchanged_input(): @@ -71,18 +65,12 @@ def test_merged_context_vars_with_same_input_results_in_unchanged_input(): assert context_vars_combined_1with2.variables is not None assert len(context_vars_combined_1with2.variables) == 2 assert context_vars_combined_1with2.variables["input"] == content - assert ( - context_vars_combined_1with2.variables["test_string"] - == variables["test_string"] - ) + assert context_vars_combined_1with2.variables["test_string"] == variables["test_string"] assert context_vars_combined_2with1.variables is not None assert len(context_vars_combined_2with1.variables) == 2 assert context_vars_combined_2with1.variables["input"] == content - assert ( - context_vars_combined_2with1.variables["test_string"] - == variables["test_string"] - ) + assert context_vars_combined_2with1.variables["test_string"] == variables["test_string"] def test_merged_context_vars_with_different_input_results_in_input_overwrite1(): @@ -91,20 +79,12 @@ def test_merged_context_vars_with_different_input_results_in_input_overwrite1(): variables = {"test_string": "Hello, world!"} context_vars1 = ContextVariables(content=content) context_vars2 = ContextVariables(content=content2, variables=variables) - context_vars_combined_1with2 = context_vars1.merge_or_overwrite( - context_vars2, overwrite=False - ) + context_vars_combined_1with2 = context_vars1.merge_or_overwrite(context_vars2, overwrite=False) assert context_vars_combined_1with2.variables is not None assert len(context_vars_combined_1with2.variables) == 2 - assert ( - context_vars_combined_1with2.variables["input"] - == context_vars2.variables["input"] - ) - assert ( - context_vars_combined_1with2.variables["test_string"] - == context_vars2.variables["test_string"] - ) + assert context_vars_combined_1with2.variables["input"] == context_vars2.variables["input"] + assert context_vars_combined_1with2.variables["test_string"] == context_vars2.variables["test_string"] def test_merged_context_vars_with_different_input_results_in_input_overwrite2(): @@ -113,17 +93,12 @@ def test_merged_context_vars_with_different_input_results_in_input_overwrite2(): variables = {"test_string": "Hello, world!"} context_vars1 = ContextVariables(content=content) context_vars2 = ContextVariables(content=content2, variables=variables) - context_vars_combined_2with1 = context_vars2.merge_or_overwrite( - context_vars1, overwrite=False - ) + context_vars_combined_2with1 = context_vars2.merge_or_overwrite(context_vars1, overwrite=False) assert context_vars_combined_2with1.variables is not None assert len(context_vars_combined_2with1.variables) == 2 assert context_vars_combined_2with1.variables["input"] == context_vars1["input"] - assert ( - context_vars_combined_2with1.variables["test_string"] - == context_vars2.variables["test_string"] - ) + assert context_vars_combined_2with1.variables["test_string"] == context_vars2.variables["test_string"] def test_can_overwrite_context_variables1(): @@ -132,20 +107,12 @@ def test_can_overwrite_context_variables1(): variables = {"test_string": "Hello, world!"} context_vars1 = ContextVariables(content=content) context_vars2 = ContextVariables(content=content2, variables=variables) - context_vars_overwrite_1with2 = context_vars1.merge_or_overwrite( - context_vars2, overwrite=True - ) + context_vars_overwrite_1with2 = context_vars1.merge_or_overwrite(context_vars2, overwrite=True) assert context_vars_overwrite_1with2.variables is not None assert len(context_vars_overwrite_1with2.variables) == len(context_vars2.variables) - assert ( - context_vars_overwrite_1with2.variables["input"] - == context_vars2.variables["input"] - ) - assert ( - context_vars_overwrite_1with2.variables["test_string"] - == context_vars2["test_string"] - ) + assert context_vars_overwrite_1with2.variables["input"] == context_vars2.variables["input"] + assert context_vars_overwrite_1with2.variables["test_string"] == context_vars2["test_string"] def test_can_overwrite_context_variables2(): @@ -154,13 +121,8 @@ def test_can_overwrite_context_variables2(): variables = {"test_string": "Hello, world!"} context_vars1 = ContextVariables(content=content) context_vars2 = ContextVariables(content=content2, variables=variables) - context_vars_overwrite_2with1 = context_vars2.merge_or_overwrite( - context_vars1, overwrite=True - ) + context_vars_overwrite_2with1 = context_vars2.merge_or_overwrite(context_vars1, overwrite=True) assert context_vars_overwrite_2with1.variables is not None assert len(context_vars_overwrite_2with1.variables) == len(context_vars1.variables) - assert ( - context_vars_overwrite_2with1.variables["input"] - == context_vars1.variables["input"] - ) + assert context_vars_overwrite_2with1.variables["input"] == context_vars1.variables["input"] diff --git a/python/tests/unit/orchestration/test_native_function.py b/python/tests/unit/orchestration/test_native_function.py index edb7a32c8c4d..2d039dfa1de1 100644 --- a/python/tests/unit/orchestration/test_native_function.py +++ b/python/tests/unit/orchestration/test_native_function.py @@ -2,23 +2,23 @@ from typing import TYPE_CHECKING -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.skill_definition.sk_function_decorator import sk_function +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function if TYPE_CHECKING: - from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.orchestration.kernel_context import KernelContext def test_init_native_function_with_input_description(): - def mock_function(input: str, context: "SKContext") -> None: + def mock_function(input: str, context: "KernelContext") -> None: pass - mock_function.__sk_function__ = True - mock_function.__sk_function_name__ = "mock_function" - mock_function.__sk_function_description__ = "Mock description" - mock_function.__sk_function_input_description__ = "Mock input description" - mock_function.__sk_function_input_default_value__ = "default_input_value" - mock_function.__sk_function_context_parameters__ = [ + mock_function.__kernel_function__ = True + mock_function.__kernel_function_name__ = "mock_function" + mock_function.__kernel_function_description__ = "Mock description" + mock_function.__kernel_function_input_description__ = "Mock input description" + mock_function.__kernel_function_input_default_value__ = "default_input_value" + mock_function.__kernel_function_context_parameters__ = [ { "name": "param1", "description": "Param 1 description", @@ -28,29 +28,29 @@ def mock_function(input: str, context: "SKContext") -> None: mock_method = mock_function - native_function = SKFunction.from_native_method(mock_method, "MockSkill") + native_function = KernelFunction.from_native_method(mock_method, "MockPlugin") - assert native_function._function == mock_method - assert native_function._parameters[0].name == "input" - assert native_function._parameters[0].description == "Mock input description" - assert native_function._parameters[0].default_value == "default_input_value" - assert native_function._parameters[0].type_ == "string" - assert native_function._parameters[0].required is False - assert native_function._parameters[1].name == "param1" - assert native_function._parameters[1].description == "Param 1 description" - assert native_function._parameters[1].default_value == "default_param1_value" - assert native_function._parameters[1].type_ == "string" - assert native_function._parameters[1].required is False + assert native_function.function == mock_method + assert native_function.parameters[0].name == "input" + assert native_function.parameters[0].description == "Mock input description" + assert native_function.parameters[0].default_value == "default_input_value" + assert native_function.parameters[0].type_ == "string" + assert native_function.parameters[0].required is False + assert native_function.parameters[1].name == "param1" + assert native_function.parameters[1].description == "Param 1 description" + assert native_function.parameters[1].default_value == "default_param1_value" + assert native_function.parameters[1].type_ == "string" + assert native_function.parameters[1].required is False def test_init_native_function_without_input_description(): - def mock_function(context: "SKContext") -> None: + def mock_function(context: "KernelContext") -> None: pass - mock_function.__sk_function__ = True - mock_function.__sk_function_name__ = "mock_function_no_input_desc" - mock_function.__sk_function_description__ = "Mock description no input desc" - mock_function.__sk_function_context_parameters__ = [ + mock_function.__kernel_function__ = True + mock_function.__kernel_function_name__ = "mock_function_no_input_desc" + mock_function.__kernel_function_description__ = "Mock description no input desc" + mock_function.__kernel_function_context_parameters__ = [ { "name": "param1", "description": "Param 1 description", @@ -61,18 +61,18 @@ def mock_function(context: "SKContext") -> None: mock_method = mock_function - native_function = SKFunction.from_native_method(mock_method, "MockSkill") + native_function = KernelFunction.from_native_method(mock_method, "MockPlugin") - assert native_function._function == mock_method - assert native_function._parameters[0].name == "param1" - assert native_function._parameters[0].description == "Param 1 description" - assert native_function._parameters[0].default_value == "default_param1_value" - assert native_function._parameters[0].type_ == "string" - assert native_function._parameters[0].required is True + assert native_function.function == mock_method + assert native_function.parameters[0].name == "param1" + assert native_function.parameters[0].description == "Param 1 description" + assert native_function.parameters[0].default_value == "default_param1_value" + assert native_function.parameters[0].type_ == "string" + assert native_function.parameters[0].required is True -def test_init_native_function_from_sk_function_decorator(): - @sk_function( +def test_init_native_function_from_kernel_function_decorator(): + @kernel_function( description="Test description", name="test_function", input_description="Test input description", @@ -81,38 +81,34 @@ def test_init_native_function_from_sk_function_decorator(): def decorated_function() -> None: pass - assert decorated_function.__sk_function__ is True - assert decorated_function.__sk_function_description__ == "Test description" - assert decorated_function.__sk_function_name__ == "test_function" - assert ( - decorated_function.__sk_function_input_description__ == "Test input description" - ) - assert ( - decorated_function.__sk_function_input_default_value__ == "test_default_value" - ) + assert decorated_function.__kernel_function__ is True + assert decorated_function.__kernel_function_description__ == "Test description" + assert decorated_function.__kernel_function_name__ == "test_function" + assert decorated_function.__kernel_function_input_description__ == "Test input description" + assert decorated_function.__kernel_function_input_default_value__ == "test_default_value" - native_function = SKFunction.from_native_method(decorated_function, "MockSkill") + native_function = KernelFunction.from_native_method(decorated_function, "MockPlugin") - assert native_function._function == decorated_function - assert native_function._parameters[0].name == "input" - assert native_function._parameters[0].description == "Test input description" - assert native_function._parameters[0].default_value == "test_default_value" - assert native_function._parameters[0].type_ == "string" - assert native_function._parameters[0].required is False + assert native_function.function == decorated_function + assert native_function.parameters[0].name == "input" + assert native_function.parameters[0].description == "Test input description" + assert native_function.parameters[0].default_value == "test_default_value" + assert native_function.parameters[0].type_ == "string" + assert native_function.parameters[0].required is False -def test_init_native_function_from_sk_function_decorator_defaults(): - @sk_function() +def test_init_native_function_from_kernel_function_decorator_defaults(): + @kernel_function() def decorated_function() -> None: pass - assert decorated_function.__sk_function__ is True - assert decorated_function.__sk_function_description__ == "" - assert decorated_function.__sk_function_name__ == "decorated_function" - assert decorated_function.__sk_function_input_description__ == "" - assert decorated_function.__sk_function_input_default_value__ == "" + assert decorated_function.__kernel_function__ is True + assert decorated_function.__kernel_function_description__ == "" + assert decorated_function.__kernel_function_name__ == "decorated_function" + assert decorated_function.__kernel_function_input_description__ == "" + assert decorated_function.__kernel_function_input_default_value__ == "" - native_function = SKFunction.from_native_method(decorated_function, "MockSkill") + native_function = KernelFunction.from_native_method(decorated_function, "MockPlugin") - assert native_function._function == decorated_function - assert len(native_function._parameters) == 0 + assert native_function.function == decorated_function + assert len(native_function.parameters) == 0 diff --git a/python/tests/unit/planning/action_planner/test_action_planner.py b/python/tests/unit/planning/action_planner/test_action_planner.py index 1e742974fcff..fe9096da3d24 100644 --- a/python/tests/unit/planning/action_planner/test_action_planner.py +++ b/python/tests/unit/planning/action_planner/test_action_planner.py @@ -1,29 +1,35 @@ from textwrap import dedent -from unittest.mock import Mock +from unittest.mock import MagicMock, Mock import pytest from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.memory.semantic_text_memory import SemanticTextMemoryBase from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction from semantic_kernel.planning import ActionPlanner from semantic_kernel.planning.action_planner.action_planner_config import ( ActionPlannerConfig, ) from semantic_kernel.planning.planning_exception import PlanningException -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.functions_view import FunctionsView -from semantic_kernel.skill_definition.skill_collection_base import SkillCollectionBase +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.functions_view import FunctionsView +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, +) -def create_mock_function(function_view: FunctionView) -> Mock(spec=SKFunctionBase): - mock_function = Mock(spec=SKFunctionBase) +def create_mock_function(function_view: FunctionView): + mock_function = Mock(spec=KernelFunction) mock_function.describe.return_value = function_view mock_function.name = function_view.name - mock_function.skill_name = function_view.skill_name + mock_function.plugin_name = function_view.plugin_name + mock_function.is_semantic = function_view.is_semantic mock_function.description = function_view.description + mock_function.prompt_execution_settings = PromptExecutionSettings() return mock_function @@ -40,48 +46,45 @@ def test_throw_without_completion_service(): @pytest.mark.asyncio -async def test_plan_creation_async(): +async def test_plan_creation(): goal = "Translate Happy birthday to German." plan_str = dedent( """Here is a plan that can achieve the given task:\n\n{""plan"":\n{""rationale"": ""the list contains a function that allows to translate one language to another."", - ""function"": ""WriterSkill.Translate"",""parameters"": \n{""translate_from"": + ""function"": ""WriterPlugin.Translate"",""parameters"": \n{""translate_from"": ""english"",""translate_to"": ""german"",""input"": ""Happy birthday""}\n}\n}\n\n - This plan makes use of the Translate function in WriterSkill to translate the message + This plan makes use of the Translate function in WriterPlugin to translate the message `Happy birthday` from english to german.""" ) kernel = Mock(spec=Kernel) - mock_function = Mock(spec=SKFunctionBase) + mock_function = Mock(spec=KernelFunction) memory = Mock(spec=SemanticTextMemoryBase) - skills = Mock(spec=SkillCollectionBase) + plugins = KernelPluginCollection() function_view = FunctionView( name="Translate", description="Translate something", - skill_name="WriterSkill", + plugin_name="WriterPlugin", is_semantic=False, parameters=[], ) mock_function = create_mock_function(function_view) - skills.get_function.return_value = mock_function - context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) - return_context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) + plugins.add(plugin=KernelPlugin(name=function_view.plugin_name, functions=[mock_function])) + + context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) + return_context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) return_context.variables.update(plan_str) - mock_function.invoke_async.return_value = return_context + mock_function.invoke.return_value = return_context kernel.create_semantic_function.return_value = mock_function kernel.create_new_context.return_value = context planner = ActionPlanner(kernel) - plan = await planner.create_plan_async(goal) + plan = await planner.create_plan(goal) assert plan is not None assert plan.description == mock_function.description @@ -91,82 +94,79 @@ async def test_plan_creation_async(): @pytest.fixture -def skills_input(): +def plugins_input(): return [ ("SendEmail", "email", "Send an e-mail", False), ("GetEmailAddress", "email", "Get an e-mail address", False), - ("Translate", "WriterSkill", "Translate something", True), - ("Summarize", "SummarizeSkill", "Summarize something", True), + ("Translate", "WriterPlugin", "Translate something", True), + ("Summarize", "SummarizePlugin", "Summarize something", True), ] @pytest.fixture -def mock_context(skills_input): +def mock_context(plugins_input): memory = Mock(spec=Kernel) - context = Mock(spec=SKContext) + context = Mock(spec=KernelContext) functionsView = FunctionsView() - skills = Mock(spec=SkillCollectionBase) - mock_functions = [] - for name, skillName, description, isSemantic in skills_input: - function_view = FunctionView(name, skillName, description, [], isSemantic, True) + + plugins = MagicMock(spec=KernelPluginCollection) + + mock_plugins = {} + + for name, plugin_name, description, is_semantic in plugins_input: + function_view = FunctionView(name, plugin_name, description, [], is_semantic, True) mock_function = create_mock_function(function_view) functionsView.add_function(function_view) - _context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) + if plugin_name not in mock_plugins: + mock_plugins[plugin_name] = {} + mock_plugins[plugin_name][name] = mock_function + + _context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) _context.variables.update("MOCK FUNCTION CALLED") - mock_function.invoke_async.return_value = _context - mock_functions.append(mock_function) - - skills.get_function.side_effect = lambda skill_name, function_name: next( - ( - func - for func in mock_functions - if func.skill_name == skill_name and func.name == function_name - ), - None, - ) - skills.get_functions_view.return_value = functionsView - context.skills.return_value = skills - context.skills.get_functions_view.return_value = functionsView + mock_function.invoke.return_value = _context + + plugins.__getitem__.side_effect = lambda plugin_name: MagicMock(__getitem__=mock_plugins[plugin_name].__getitem__) + + context.plugins = plugins + context.plugins.get_functions_view.return_value = functionsView return context -def test_available_functions(skills_input, mock_context): +def test_available_functions(plugins_input, mock_context): goal = "Translate Happy birthday to German." kernel = Mock(spec=Kernel) planner = ActionPlanner(kernel) result = planner.list_of_functions(goal=goal, context=mock_context) - expected_skills = [f"{val[1]}.{val[0]}" for val in skills_input[1:]] + expected_plugins = [f"{val[1]}.{val[0]}" for val in plugins_input[1:]] - assert all(skill in result for skill in expected_skills) + assert all(plugin in result for plugin in expected_plugins) -def test_exclude_skills(skills_input, mock_context): +def test_exclude_plugins(plugins_input, mock_context): goal = "Translate Happy birthday to German." kernel = Mock(spec=Kernel) - # Exclude the first and second in skills_input - excluded_skill_name = "email" + # Exclude the first and second in plugins_input + excluded_plugin_name = "email" - planner_config = ActionPlannerConfig(excluded_skills=[excluded_skill_name]) + planner_config = ActionPlannerConfig(excluded_plugins=[excluded_plugin_name]) planner = ActionPlanner(kernel, config=planner_config) result = planner.list_of_functions(goal=goal, context=mock_context) - all_skills = [f"{val[1]}.{val[0]}" for val in skills_input] - excluded_skills = all_skills[:2] - expected_skills = all_skills[2:] + all_plugins = [f"{val[1]}.{val[0]}" for val in plugins_input] + excluded_plugins = all_plugins[:2] + expected_plugins = all_plugins[2:] - assert all(skill in result for skill in expected_skills) - assert all(skill not in result for skill in excluded_skills) + assert all(plugin in result for plugin in expected_plugins) + assert all(plugin not in result for plugin in excluded_plugins) -def test_exclude_functions(skills_input, mock_context): +def test_exclude_functions(plugins_input, mock_context): goal = "Translate Happy birthday to German." kernel = Mock(spec=Kernel) @@ -176,44 +176,36 @@ def test_exclude_functions(skills_input, mock_context): planner = ActionPlanner(kernel, config=planner_config) result = planner.list_of_functions(goal=goal, context=mock_context) - all_skills = [f"{val[1]}.{val[0]}" for val in skills_input] - excluded_skills = all_skills[:1] - expected_skills = all_skills[1:] + all_plugins = [f"{val[1]}.{val[0]}" for val in plugins_input] + excluded_plugins = all_plugins[:1] + expected_plugins = all_plugins[1:] - assert all(skill in result for skill in expected_skills) - assert all(skill not in result for skill in excluded_skills) + assert all(plugin in result for plugin in expected_plugins) + assert all(plugin not in result for plugin in excluded_plugins) @pytest.mark.asyncio -async def test_invalid_json_throw_async(): - goal = "Translate Happy birthday to German." - plan_str = '{"":{""function"": ""WriterSkill.Translate""}}' +async def test_empty_goal_throw(): + goal = "" kernel = Mock(spec=Kernel) - mock_function = Mock(spec=SKFunctionBase) + mock_function = Mock(spec=KernelFunction) memory = Mock(spec=SemanticTextMemoryBase) - skills = Mock(spec=SkillCollectionBase) + plugins = MagicMock(spec=KernelPluginCollection) function_view = FunctionView( name="Translate", description="Translate something", - skill_name="WriterSkill", + plugin_name="WriterPlugin", is_semantic=False, parameters=[], ) mock_function = create_mock_function(function_view) - skills.get_function.return_value = mock_function + plugins.__getitem__.return_value = MagicMock(__getitem__=MagicMock(return_value=mock_function)) - context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) - return_context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) - - return_context.variables.update(plan_str) - - mock_function.invoke_async.return_value = return_context + context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) + return_context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) + mock_function.invoke.return_value = return_context kernel.create_semantic_function.return_value = mock_function kernel.create_new_context.return_value = context @@ -221,35 +213,34 @@ async def test_invalid_json_throw_async(): planner = ActionPlanner(kernel) with pytest.raises(PlanningException): - await planner.create_plan_async(goal) + await planner.create_plan(goal) @pytest.mark.asyncio -async def test_empty_goal_throw_async(): - goal = "" +async def test_invalid_json_throw(): + goal = "Translate Happy birthday to German." + plan_str = '{"":{""function"": ""WriterPlugin.Translate""}}' kernel = Mock(spec=Kernel) - mock_function = Mock(spec=SKFunctionBase) memory = Mock(spec=SemanticTextMemoryBase) - skills = Mock(spec=SkillCollectionBase) + plugins = MagicMock(spec=KernelPluginCollection) function_view = FunctionView( name="Translate", + plugin_name="WriterPlugin", description="Translate something", - skill_name="WriterSkill", is_semantic=False, parameters=[], ) mock_function = create_mock_function(function_view) - skills.get_function.return_value = mock_function - context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) - return_context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) - mock_function.invoke_async.return_value = return_context + plugins.__getitem__.return_value = MagicMock(__getitem__=MagicMock(return_value=mock_function)) + + context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) + return_context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) + + return_context.variables.update(plan_str) + mock_function.invoke.return_value = return_context kernel.create_semantic_function.return_value = mock_function kernel.create_new_context.return_value = context @@ -257,4 +248,4 @@ async def test_empty_goal_throw_async(): planner = ActionPlanner(kernel) with pytest.raises(PlanningException): - await planner.create_plan_async(goal) + await planner.create_plan(goal) diff --git a/python/tests/unit/planning/sequential_planner/test_sequential_planner.py b/python/tests/unit/planning/sequential_planner/test_sequential_planner.py index c4e04a42a4c1..8f15dbf63587 100644 --- a/python/tests/unit/planning/sequential_planner/test_sequential_planner.py +++ b/python/tests/unit/planning/sequential_planner/test_sequential_planner.py @@ -4,147 +4,140 @@ import pytest +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.kernel import Kernel from semantic_kernel.memory.semantic_text_memory import SemanticTextMemoryBase from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction from semantic_kernel.planning.planning_exception import PlanningException from semantic_kernel.planning.sequential_planner.sequential_planner import ( SequentialPlanner, ) -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.functions_view import FunctionsView -from semantic_kernel.skill_definition.skill_collection_base import SkillCollectionBase +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.functions_view import FunctionsView +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, +) def create_mock_function(function_view: FunctionView): - mock_function = Mock(spec=SKFunctionBase) + mock_function = Mock(spec=KernelFunction) mock_function.describe.return_value = function_view mock_function.name = function_view.name - mock_function.skill_name = function_view.skill_name + mock_function.plugin_name = function_view.plugin_name + mock_function.is_semantic = function_view.is_semantic + mock_function.description = function_view.description + mock_function.prompt_execution_settings = PromptExecutionSettings() return mock_function @pytest.mark.asyncio -@pytest.mark.parametrize( - "goal", ["Write a poem or joke and send it in an e-mail to Kai."] -) -async def test_it_can_create_plan_async(goal): +@pytest.mark.parametrize("goal", ["Write a poem or joke and send it in an e-mail to Kai."]) +async def test_it_can_create_plan(goal): # Arrange kernel = Mock(spec=Kernel) + kernel.prompt_template_engine = Mock() memory = Mock(spec=SemanticTextMemoryBase) input = [ ("SendEmail", "email", "Send an e-mail", False), ("GetEmailAddress", "email", "Get an e-mail address", False), - ("Translate", "WriterSkill", "Translate something", True), - ("Summarize", "SummarizeSkill", "Summarize something", True), + ("Translate", "WriterPlugin", "Translate something", True), + ("Summarize", "SummarizePlugin", "Summarize something", True), ] functionsView = FunctionsView() - skills = Mock(spec=SkillCollectionBase) + plugins = KernelPluginCollection() mock_functions = [] - for name, skillName, description, isSemantic in input: - function_view = FunctionView(name, skillName, description, [], isSemantic, True) + for name, pluginName, description, isSemantic in input: + function_view = FunctionView(name, pluginName, description, [], isSemantic, True) mock_function = create_mock_function(function_view) functionsView.add_function(function_view) - context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) + context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) context.variables.update("MOCK FUNCTION CALLED") - mock_function.invoke_async.return_value = context + mock_function.invoke.return_value = context mock_functions.append(mock_function) - skills.get_function.side_effect = lambda skill_name, function_name: next( - ( - func - for func in mock_functions - if func.skill_name == skill_name and func.name == function_name - ), - None, - ) - skills.get_functions_view.return_value = functionsView + if pluginName not in plugins.plugins: + plugins.add(KernelPlugin(name=pluginName, description="Mock plugin")) + plugins.add_functions_to_plugin([mock_function], pluginName) expected_functions = [x[0] for x in input] - expected_skills = [x[1] for x in input] + expected_plugins = [x[1] for x in input] - context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) - return_context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) + context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) + return_context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) plan_string = """ - - + + """ return_context.variables.update(plan_string) - mock_function_flow_function = Mock(spec=SKFunctionBase) - mock_function_flow_function.invoke_async.return_value = return_context + mock_function_flow_function = Mock(spec=KernelFunction) + mock_function_flow_function.invoke.return_value = return_context - kernel.skills = skills + kernel.plugins = plugins kernel.create_new_context.return_value = context kernel.register_semantic_function.return_value = mock_function_flow_function planner = SequentialPlanner(kernel) # Act - plan = await planner.create_plan_async(goal) + plan = await planner.create_plan(goal) # Assert assert plan.description == goal - assert any( - step.name in expected_functions and step.skill_name in expected_skills - for step in plan._steps - ) + assert any(step.name in expected_functions and step.plugin_name in expected_plugins for step in plan._steps) for expected_function in expected_functions: assert any(step.name == expected_function for step in plan._steps) - for expectedSkill in expected_skills: - assert any(step.skill_name == expectedSkill for step in plan._steps) + for expectedPlugin in expected_plugins: + assert any(step.plugin_name == expectedPlugin for step in plan._steps) @pytest.mark.asyncio -async def test_empty_goal_throws_async(): +async def test_empty_goal_throws(): # Arrange kernel = Mock(spec=Kernel) + kernel.prompt_template_engine = Mock() planner = SequentialPlanner(kernel) # Act & Assert with pytest.raises(PlanningException): - await planner.create_plan_async("") + await planner.create_plan("") @pytest.mark.asyncio -async def test_invalid_xml_throws_async(): +async def test_invalid_xml_throws(): # Arrange kernel = Mock(spec=Kernel) + kernel.prompt_template_engine = Mock() memory = Mock(spec=SemanticTextMemoryBase) - skills = Mock(spec=SkillCollectionBase) + plugins = Mock(spec=KernelPluginCollection) functionsView = FunctionsView() - skills.get_functions_view.return_value = functionsView + plugins.get_functions_view.return_value = functionsView plan_string = "notvalid<" - return_context = SKContext.construct( - variables=ContextVariables(plan_string), memory=memory, skill_collection=skills + return_context = KernelContext.model_construct( + variables=ContextVariables(plan_string), + memory=memory, + plugins=plugins, ) - context = SKContext.construct( - variables=ContextVariables(), memory=memory, skill_collection=skills - ) + context = KernelContext.model_construct(variables=ContextVariables(), memory=memory, plugins=plugins) - mock_function_flow_function = Mock(spec=SKFunctionBase) - mock_function_flow_function.invoke_async.return_value = return_context + mock_function_flow_function = Mock(spec=KernelFunction) + mock_function_flow_function.invoke.return_value = return_context - kernel.skills = skills + kernel.plugins = plugins kernel.create_new_context.return_value = context kernel.register_semantic_function.return_value = mock_function_flow_function @@ -152,4 +145,4 @@ async def test_invalid_xml_throws_async(): # Act & Assert with pytest.raises(PlanningException): - await planner.create_plan_async("goal") + await planner.create_plan("goal") diff --git a/python/tests/unit/planning/sequential_planner/test_sequential_planner_extensions.py b/python/tests/unit/planning/sequential_planner/test_sequential_planner_extensions.py index 5357ddc14212..87e46c312332 100644 --- a/python/tests/unit/planning/sequential_planner/test_sequential_planner_extensions.py +++ b/python/tests/unit/planning/sequential_planner/test_sequential_planner_extensions.py @@ -7,21 +7,19 @@ from semantic_kernel.memory.memory_query_result import MemoryQueryResult from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_context import KernelContext from semantic_kernel.planning.sequential_planner.sequential_planner_config import ( SequentialPlannerConfig, ) from semantic_kernel.planning.sequential_planner.sequential_planner_extensions import ( SequentialPlannerFunctionViewExtension, - SequentialPlannerSKContextExtension, + SequentialPlannerKernelContextExtension, ) -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.functions_view import FunctionsView -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.functions_view import FunctionsView +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, ) -from semantic_kernel.skill_definition.skill_collection import SkillCollection async def _async_generator(query_result): @@ -29,9 +27,9 @@ async def _async_generator(query_result): @pytest.mark.asyncio -async def test_can_call_get_available_functions_with_no_functions_async(): +async def test_can_call_get_available_functions_with_no_functions(): variables = ContextVariables() - skills = SkillCollection() + plugins = KernelPluginCollection() memory = Mock(spec=SemanticTextMemoryBase) memory_query_result = MemoryQueryResult( @@ -46,32 +44,29 @@ async def test_can_call_get_available_functions_with_no_functions_async(): ) async_enumerable = _async_generator(memory_query_result) - memory.search_async.return_value = async_enumerable + memory.search.return_value = async_enumerable # Arrange GetAvailableFunctionsAsync parameters - context = SKContext(variables, memory, skills.read_only_skill_collection, Mock()) + context = KernelContext(variables=variables, memory=memory, plugins=plugins) config = SequentialPlannerConfig() semantic_query = "test" # Act - result = await SequentialPlannerSKContextExtension.get_available_functions_async( - context, config, semantic_query - ) + result = await SequentialPlannerKernelContextExtension.get_available_functions(context, config, semantic_query) # Assert assert result is not None - memory.search_async.assert_not_called() + memory.search.assert_not_called() @pytest.mark.asyncio -async def test_can_call_get_available_functions_with_functions_async(): +async def test_can_call_get_available_functions_with_functions(): variables = ContextVariables() - function_mock = Mock(spec=SKFunctionBase) functions_view = FunctionsView() function_view = FunctionView( "functionName", - "skillName", + "pluginName", "description", [], is_semantic=True, @@ -79,7 +74,7 @@ async def test_can_call_get_available_functions_with_functions_async(): ) native_function_view = FunctionView( "nativeFunctionName", - "skillName", + "pluginName", "description", [], is_semantic=False, @@ -88,15 +83,12 @@ async def test_can_call_get_available_functions_with_functions_async(): functions_view.add_function(function_view) functions_view.add_function(native_function_view) - skills = Mock(spec=ReadOnlySkillCollectionBase) - skills.get_function.return_value = function_mock - skills.get_functions_view.return_value = functions_view + mock_plugins = Mock(spec=KernelPluginCollection) + mock_plugins.get_functions_view.return_value = functions_view memory_query_result = MemoryQueryResult( is_reference=False, - id=SequentialPlannerFunctionViewExtension.to_fully_qualified_name( - function_view - ), + id=SequentialPlannerFunctionViewExtension.to_fully_qualified_name(function_view), text="text", description="description", external_source_name="sourceName", @@ -107,19 +99,15 @@ async def test_can_call_get_available_functions_with_functions_async(): async_enumerable = _async_generator(memory_query_result) memory = Mock(spec=SemanticTextMemoryBase) - memory.search_async.return_value = async_enumerable + memory.search.return_value = async_enumerable # Arrange GetAvailableFunctionsAsync parameters - context = SKContext.construct( - variables=variables, memory=memory, skill_collection=skills - ) + context = KernelContext.model_construct(variables=variables, memory=memory, plugins=mock_plugins) config = SequentialPlannerConfig() semantic_query = "test" # Act - result = await SequentialPlannerSKContextExtension.get_available_functions_async( - context, config, semantic_query - ) + result = await SequentialPlannerKernelContextExtension.get_available_functions(context, config, semantic_query) # Assert assert result is not None @@ -130,9 +118,7 @@ async def test_can_call_get_available_functions_with_functions_async(): config.included_functions.append(["nativeFunctionName"]) # Act - result = await SequentialPlannerSKContextExtension.get_available_functions_async( - context, config, semantic_query - ) + result = await SequentialPlannerKernelContextExtension.get_available_functions(context, config, semantic_query) # Assert assert result is not None @@ -142,16 +128,15 @@ async def test_can_call_get_available_functions_with_functions_async(): @pytest.mark.asyncio -async def test_can_call_get_available_functions_with_functions_and_relevancy_async(): +async def test_can_call_get_available_functions_with_functions_and_relevancy(): # Arrange variables = ContextVariables() # Arrange FunctionView - function_mock = Mock(spec=SKFunctionBase) functions_view = FunctionsView() function_view = FunctionView( "functionName", - "skillName", + "pluginName", "description", [], is_semantic=True, @@ -159,7 +144,7 @@ async def test_can_call_get_available_functions_with_functions_and_relevancy_asy ) native_function_view = FunctionView( "nativeFunctionName", - "skillName", + "pluginName", "description", [], is_semantic=False, @@ -171,9 +156,7 @@ async def test_can_call_get_available_functions_with_functions_and_relevancy_asy # Arrange Mock Memory and Result memory_query_result = MemoryQueryResult( is_reference=False, - id=SequentialPlannerFunctionViewExtension.to_fully_qualified_name( - function_view - ), + id=SequentialPlannerFunctionViewExtension.to_fully_qualified_name(function_view), text="text", description="description", external_source_name="sourceName", @@ -182,27 +165,22 @@ async def test_can_call_get_available_functions_with_functions_and_relevancy_asy embedding=None, ) memory = Mock(spec=SemanticTextMemoryBase) - memory.search_async.return_value = _async_generator(memory_query_result) + memory.search.return_value = _async_generator(memory_query_result) - skills = Mock(spec=ReadOnlySkillCollectionBase) - skills.get_function.return_value = function_mock - skills.get_functions_view.return_value = functions_view - skills.read_only_skill_collection = skills + mock_plugins = Mock(spec=KernelPluginCollection) + mock_plugins.get_functions_view.return_value = functions_view # Arrange GetAvailableFunctionsAsync parameters - context = SKContext.construct( + context = KernelContext.model_construct( variables=variables, memory=memory, - skill_collection=skills, + plugins=mock_plugins, ) - context._logger = Mock() config = SequentialPlannerConfig(relevancy_threshold=0.78) semantic_query = "test" # Act - result = await SequentialPlannerSKContextExtension.get_available_functions_async( - context, config, semantic_query - ) + result = await SequentialPlannerKernelContextExtension.get_available_functions(context, config, semantic_query) # Assert assert result is not None @@ -211,12 +189,10 @@ async def test_can_call_get_available_functions_with_functions_and_relevancy_asy # Arrange update IncludedFunctions config.included_functions.append("nativeFunctionName") - memory.search_async.return_value = _async_generator(memory_query_result) + memory.search.return_value = _async_generator(memory_query_result) # Act - result = await SequentialPlannerSKContextExtension.get_available_functions_async( - context, config, semantic_query - ) + result = await SequentialPlannerKernelContextExtension.get_available_functions(context, config, semantic_query) # Assert assert result is not None @@ -226,10 +202,10 @@ async def test_can_call_get_available_functions_with_functions_and_relevancy_asy @pytest.mark.asyncio -async def test_can_call_get_available_functions_async_with_default_relevancy_async(): +async def test_can_call_get_available_functions_with_default_relevancy(): # Arrange variables = ContextVariables() - skills = SkillCollection() + plugins = KernelPluginCollection() # Arrange Mock Memory and Result memory_query_result = MemoryQueryResult( @@ -244,20 +220,16 @@ async def test_can_call_get_available_functions_async_with_default_relevancy_asy ) async_enumerable = _async_generator(memory_query_result) memory = Mock(spec=SemanticTextMemoryBase) - memory.search_async.return_value = async_enumerable + memory.search.return_value = async_enumerable # Arrange GetAvailableFunctionsAsync parameters - context = SKContext.construct( - variables=variables, memory=memory, skill_collection=skills - ) + context = KernelContext.model_construct(variables=variables, memory=memory, plugins=plugins) config = SequentialPlannerConfig(relevancy_threshold=0.78) semantic_query = "test" # Act - result = await SequentialPlannerSKContextExtension.get_available_functions_async( - context, config, semantic_query - ) + result = await SequentialPlannerKernelContextExtension.get_available_functions(context, config, semantic_query) # Assert assert result is not None - memory.search_async.assert_called_once() + memory.search.assert_called_once() diff --git a/python/tests/unit/planning/sequential_planner/test_sequential_planner_parser.py b/python/tests/unit/planning/sequential_planner/test_sequential_planner_parser.py index d39e226201a3..c88e6436e8c0 100644 --- a/python/tests/unit/planning/sequential_planner/test_sequential_planner_parser.py +++ b/python/tests/unit/planning/sequential_planner/test_sequential_planner_parser.py @@ -4,39 +4,41 @@ import pytest +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.kernel import Kernel -from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.orchestration.kernel_function import KernelFunction from semantic_kernel.planning.planning_exception import PlanningException from semantic_kernel.planning.sequential_planner.sequential_planner_parser import ( SequentialPlanParser, ) -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.functions_view import FunctionsView +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.functions_view import FunctionsView +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin -def create_mock_function(function_view: FunctionView) -> SKFunctionBase: - mock_function = Mock(spec=SKFunctionBase) +def create_mock_function(function_view: FunctionView) -> KernelFunction: + mock_function = Mock(spec=KernelFunction) mock_function.describe.return_value = function_view mock_function.name = function_view.name - mock_function.skill_name = function_view.skill_name + mock_function.plugin_name = function_view.plugin_name mock_function.description = function_view.description + mock_function.is_semantic = function_view.is_semantic + mock_function.prompt_execution_settings = PromptExecutionSettings() return mock_function def create_kernel_and_functions_mock(functions) -> Kernel: kernel = Kernel() functions_view = FunctionsView() - for name, skill_name, description, is_semantic, result_string in functions: - function_view = FunctionView( - name, skill_name, description, [], is_semantic, True - ) + for name, plugin_name, description, is_semantic, result_string in functions: + function_view = FunctionView(name, plugin_name, description, [], is_semantic, True) functions_view.add_function(function_view) mock_function = create_mock_function(function_view) result = kernel.create_new_context() result.variables.update(result_string) - mock_function.invoke_async.return_value = result - kernel._skill_collection.add_semantic_function(mock_function) + mock_function.invoke.return_value = result + kernel.plugins.add(KernelPlugin(name=plugin_name, functions=[mock_function])) return kernel @@ -45,58 +47,55 @@ def test_can_call_to_plan_from_xml(): functions = [ ( "Summarize", - "SummarizeSkill", + "SummarizePlugin", "Summarize an input", True, "This is the summary.", ), - ("Translate", "WriterSkill", "Translate to french", True, "Bonjour!"), + ("Translate", "WriterPlugin", "Translate to french", True, "Bonjour!"), ( "GetEmailAddressAsync", - "email", + "get_email", "Get email address", False, "johndoe@email.com", ), - ("SendEmailAsync", "email", "Send email", False, "Email sent."), + ("SendEmailAsync", "send_email", "Send email", False, "Email sent."), ] kernel = create_kernel_and_functions_mock(functions) plan_string = """ - - - + + - + """ goal = "Summarize an input, translate to french, and e-mail to John Doe" plan = SequentialPlanParser.to_plan_from_xml( plan_string, goal, - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) assert plan is not None - assert ( - plan.description - == "Summarize an input, translate to french, and e-mail to John Doe" - ) + assert plan.description == "Summarize an input, translate to french, and e-mail to John Doe" assert len(plan._steps) == 4 - assert plan._steps[0].skill_name == "SummarizeSkill" + assert plan._steps[0].plugin_name == "SummarizePlugin" assert plan._steps[0].name == "Summarize" - assert plan._steps[1].skill_name == "WriterSkill" + assert plan._steps[1].plugin_name == "WriterPlugin" assert plan._steps[1].name == "Translate" assert plan._steps[1].parameters["language"] == "French" assert "TRANSLATED_SUMMARY" in plan._steps[1]._outputs - assert plan._steps[2].skill_name == "email" + assert plan._steps[2].plugin_name == "get_email" assert plan._steps[2].name == "GetEmailAddressAsync" assert plan._steps[2].parameters["input"] == "John Doe" assert "EMAIL_ADDRESS" in plan._steps[2]._outputs - assert plan._steps[3].skill_name == "email" + assert plan._steps[3].plugin_name == "send_email" assert plan._steps[3].name == "SendEmailAsync" assert "$TRANSLATED_SUMMARY" in plan._steps[3].parameters["input"] assert "$EMAIL_ADDRESS" in plan._steps[3].parameters["email_address"] @@ -111,7 +110,7 @@ def test_invalid_plan_execute_plan_returns_invalid_result(): SequentialPlanParser.to_plan_from_xml( "", "Solve the equation x^2 = 2.", - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) @@ -121,11 +120,11 @@ def test_can_create_plan_with_text_nodes(): plan_text = """ Test the functionFlowRunner - + This is some text """ functions = [ - ("Echo", "MockSkill", "Echo an input", True, "Mock Echo Result"), + ("Echo", "MockPlugin", "Echo an input", True, "Mock Echo Result"), ] kernel = create_kernel_and_functions_mock(functions) @@ -133,14 +132,14 @@ def test_can_create_plan_with_text_nodes(): plan = SequentialPlanParser.to_plan_from_xml( plan_text, goal_text, - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) # Assert assert plan is not None assert plan.description == goal_text assert len(plan._steps) == 1 - assert plan._steps[0].skill_name == "MockSkill" + assert plan._steps[0].plugin_name == "MockPlugin" assert plan._steps[0].name == "Echo" @@ -150,27 +149,25 @@ def test_can_create_plan_with_text_nodes(): ( """ - - + + """, True, ), ( """ - - + + """, False, ), ], ) -def test_can_create_plan_with_invalid_function_nodes( - plan_text, allow_missing_functions -): +def test_can_create_plan_with_invalid_function_nodes(plan_text, allow_missing_functions): # Arrange functions = [ - ("Echo", "MockSkill", "Echo an input", True, "Mock Echo Result"), + ("Echo", "MockPlugin", "Echo an input", True, "Mock Echo Result"), ] kernel = create_kernel_and_functions_mock(functions) # Act and Assert @@ -178,7 +175,7 @@ def test_can_create_plan_with_invalid_function_nodes( plan = SequentialPlanParser.to_plan_from_xml( plan_text, "", - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), allow_missing_functions, ) @@ -186,19 +183,19 @@ def test_can_create_plan_with_invalid_function_nodes( assert plan is not None assert len(plan._steps) == 2 - assert plan._steps[0].skill_name == "MockSkill" + assert plan._steps[0].plugin_name == "MockPlugin" assert plan._steps[0].name == "Echo" assert plan._steps[0].description == "Echo an input" - assert plan._steps[1].skill_name == plan.__class__.__name__ + assert plan._steps[1].plugin_name == plan.__class__.__name__ assert plan._steps[1].name == "" - assert plan._steps[1].description == "MockSkill.DoesNotExist" + assert plan._steps[1].description == "MockPlugin.DoesNotExist" else: with pytest.raises(PlanningException): SequentialPlanParser.to_plan_from_xml( plan_text, "", - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), allow_missing_functions, ) @@ -208,25 +205,25 @@ def test_can_create_plan_with_other_text(): goal_text = "Test the functionFlowRunner" plan_text1 = """Possible result: Test the functionFlowRunner - + This is some text """ plan_text2 = """ - + This is some text plan end""" plan_text3 = """ - + This is some text plan end""" functions = [ - ("Echo", "MockSkill", "Echo an input", True, "Mock Echo Result"), + ("Echo", "MockPlugin", "Echo an input", True, "Mock Echo Result"), ] kernel = create_kernel_and_functions_mock(functions) @@ -234,36 +231,36 @@ def test_can_create_plan_with_other_text(): plan1 = SequentialPlanParser.to_plan_from_xml( plan_text1, goal_text, - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) plan2 = SequentialPlanParser.to_plan_from_xml( plan_text2, goal_text, - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) plan3 = SequentialPlanParser.to_plan_from_xml( plan_text3, goal_text, - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) # Assert assert plan1 is not None assert plan1.description == goal_text assert len(plan1._steps) == 1 - assert plan1._steps[0].skill_name == "MockSkill" + assert plan1._steps[0].plugin_name == "MockPlugin" assert plan1._steps[0].name == "Echo" assert plan2 is not None assert plan2.description == goal_text assert len(plan2._steps) == 1 - assert plan2._steps[0].skill_name == "MockSkill" + assert plan2._steps[0].plugin_name == "MockPlugin" assert plan2._steps[0].name == "Echo" assert plan3 is not None assert plan3.description == goal_text assert len(plan3._steps) == 1 - assert plan3._steps[0].skill_name == "MockSkill" + assert plan3._steps[0].plugin_name == "MockPlugin" assert plan3._steps[0].name == "Echo" @@ -308,13 +305,13 @@ def test_can_create_plan_with_open_api_plugin(plan_text): plan = SequentialPlanParser.to_plan_from_xml( plan_text, "", - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) # Assert assert plan is not None assert len(plan._steps) == 1 - assert plan._steps[0].skill_name == "CodeSearch" + assert plan._steps[0].plugin_name == "CodeSearch" assert plan._steps[0].name == "codesearchresults_post" @@ -322,12 +319,12 @@ def test_can_create_plan_with_ignored_nodes(): # Arrange goal_text = "Test the functionFlowRunner" plan_text = """ - + Some other tag - + """ functions = [ - ("Echo", "MockSkill", "Echo an input", True, "Mock Echo Result"), + ("Echo", "MockPlugin", "Echo an input", True, "Mock Echo Result"), ] kernel = create_kernel_and_functions_mock(functions) @@ -335,15 +332,15 @@ def test_can_create_plan_with_ignored_nodes(): plan = SequentialPlanParser.to_plan_from_xml( plan_text, goal_text, - SequentialPlanParser.get_skill_function(kernel.create_new_context()), + SequentialPlanParser.get_plugin_function(kernel.create_new_context()), ) # Assert assert plan is not None assert plan.description == goal_text assert len(plan._steps) == 2 - assert plan._steps[0].skill_name == "MockSkill" + assert plan._steps[0].plugin_name == "MockPlugin" assert plan._steps[0].name == "Echo" assert len(plan._steps[1]._steps) == 0 - assert plan._steps[1].skill_name == "MockSkill" + assert plan._steps[1].plugin_name == "MockPlugin" assert plan._steps[1].name == "Echo" diff --git a/python/tests/unit/planning/stepwise_planner/test_stepwise_planner_parse_result.py b/python/tests/unit/planning/stepwise_planner/test_stepwise_planner_parse_result.py index 6be9228effe5..93bb31070cc2 100644 --- a/python/tests/unit/planning/stepwise_planner/test_stepwise_planner_parse_result.py +++ b/python/tests/unit/planning/stepwise_planner/test_stepwise_planner_parse_result.py @@ -22,7 +22,7 @@ ) def test_when_input_is_final_answer_returns_final_answer(input: str, expected: str): kernel = Mock(spec=Kernel) - + kernel.prompt_template_engine = Mock() planner = StepwisePlanner(kernel) result = planner.parse_result(input) @@ -41,6 +41,7 @@ def test_when_input_is_final_answer_returns_final_answer(input: str, expected: s ) def test_when_input_is_only_thought_does_not_throw_error(input: str, expected: str): kernel = Mock(spec=Kernel) + kernel.prompt_template_engine = Mock() planner = StepwisePlanner(kernel) result = planner.parse_result(input) assert result.thought == expected diff --git a/python/tests/unit/planning/test_plan_creation.py b/python/tests/unit/planning/test_plan_creation.py index 1fdc740dcbbd..f2379a6fd10f 100644 --- a/python/tests/unit/planning/test_plan_creation.py +++ b/python/tests/unit/planning/test_plan_creation.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import semantic_kernel as sk -from semantic_kernel.core_skills.math_skill import MathSkill +from semantic_kernel.core_plugins.math_plugin import MathPlugin from semantic_kernel.orchestration.context_variables import ContextVariables from semantic_kernel.planning import Plan @@ -11,13 +11,13 @@ def test_create_empty_plan(): assert plan is not None assert plan.name == "" assert type(plan.state) is ContextVariables - assert plan.skill_name == "" + assert plan.plugin_name == "" assert plan.description == "" assert plan.function is None assert type(plan.parameters) is ContextVariables assert plan.is_semantic is None assert plan.is_native is None - assert plan.request_settings is None + assert plan.prompt_execution_settings is None assert plan.has_next_step is False assert plan.next_step_index == 0 assert plan._steps == [] @@ -28,13 +28,13 @@ def test_create_plan_with_name(): assert plan is not None assert plan.name == "test" assert type(plan.state) is ContextVariables - assert plan.skill_name == "" + assert plan.plugin_name == "" assert plan.description == "" assert plan.function is None assert type(plan.parameters) is ContextVariables assert plan.is_semantic is None assert plan.is_native is None - assert plan.request_settings is None + assert plan.prompt_execution_settings is None assert plan.has_next_step is False assert plan.next_step_index == 0 assert plan._steps == [] @@ -45,13 +45,13 @@ def test_create_plan_with_name_and_description(): assert plan is not None assert plan.name == "test" assert type(plan.state) is ContextVariables - assert plan.skill_name == "" + assert plan.plugin_name == "" assert plan.description == "test description" assert plan.function is None assert type(plan.parameters) is ContextVariables assert plan.is_semantic is None assert plan.is_native is None - assert plan.request_settings is None + assert plan.prompt_execution_settings is None assert plan.has_next_step is False assert plan.next_step_index == 0 assert plan._steps == [] @@ -66,13 +66,13 @@ def test_create_plan_with_state_and_parameters(): assert plan is not None assert plan.name == "test" assert plan.state["input"] == "" - assert plan.skill_name == "" + assert plan.plugin_name == "" assert plan.description == "" assert plan.function is None assert plan.parameters["test_param"] == "test_param_val" assert plan.is_semantic is None assert plan.is_native is None - assert plan.request_settings is None + assert plan.prompt_execution_settings is None assert plan.has_next_step is False assert plan.next_step_index == 0 assert plan._steps == [] @@ -82,23 +82,23 @@ def test_create_plan_with_name_and_function(): # create a kernel kernel = sk.Kernel() - # import test (math) skill - skill = MathSkill() - skill_config_dict = kernel.import_skill(skill, "math") + # import test (math) plugin + plugin = MathPlugin() + plugin = kernel.import_plugin(plugin, "math") - test_function = skill_config_dict["Add"] + test_function = plugin["Add"] plan = Plan(name="test", function=test_function) assert plan is not None assert plan.name == "Add" assert type(plan.state) is ContextVariables - assert plan.skill_name == "math" + assert plan.plugin_name == "math" assert plan.description == test_function.description assert plan.function is test_function assert type(plan.parameters) is ContextVariables assert plan.is_semantic is test_function.is_semantic - assert plan.is_native is test_function.is_native - assert plan.request_settings == test_function.request_settings + assert plan.is_native is not test_function.is_semantic + assert plan.prompt_execution_settings == test_function.prompt_execution_settings assert plan.has_next_step is False assert plan.next_step_index == 0 assert plan._steps == [] @@ -108,12 +108,12 @@ def test_create_multistep_plan_with_functions(): # create a kernel kernel = sk.Kernel() - # import test (math) skill - skill = MathSkill() - skill_config_dict = kernel.import_skill(skill, "math") + # import test (math) plugin + plugin = MathPlugin() + plugin = kernel.import_plugin(plugin, "math") - test_function1 = skill_config_dict["Add"] - test_function2 = skill_config_dict["Subtract"] + test_function1 = plugin["Add"] + test_function2 = plugin["Subtract"] plan = Plan(name="multistep_test") plan.add_steps([test_function1, test_function2]) @@ -121,13 +121,13 @@ def test_create_multistep_plan_with_functions(): assert plan is not None assert plan.name == "multistep_test" assert type(plan.state) is ContextVariables - assert plan.skill_name == "" + assert plan.plugin_name == "" assert plan.description == "" assert plan.function is None assert type(plan.parameters) is ContextVariables assert plan.is_semantic is None assert plan.is_native is None - assert plan.request_settings is None + assert plan.prompt_execution_settings is None assert plan.has_next_step is True assert plan.next_step_index == 0 assert len(plan._steps) == 2 @@ -137,12 +137,12 @@ def test_create_multistep_plan_with_plans(): # create a kernel kernel = sk.Kernel() - # import test (math) skill - skill = MathSkill() - skill_config_dict = kernel.import_skill(skill, "math") + # import test (math) plugin + plugin = MathPlugin() + plugin = kernel.import_plugin(plugin, "math") - test_function1 = skill_config_dict["Add"] - test_function2 = skill_config_dict["Subtract"] + test_function1 = plugin["Add"] + test_function2 = plugin["Subtract"] plan = Plan(name="multistep_test") plan_step1 = Plan(name="step1", function=test_function1) @@ -152,13 +152,13 @@ def test_create_multistep_plan_with_plans(): assert plan is not None assert plan.name == "multistep_test" assert type(plan.state) is ContextVariables - assert plan.skill_name == "" + assert plan.plugin_name == "" assert plan.description == "" assert plan.function is None assert type(plan.parameters) is ContextVariables assert plan.is_semantic is None assert plan.is_native is None - assert plan.request_settings is None + assert plan.prompt_execution_settings is None assert plan.has_next_step is True assert plan.next_step_index == 0 assert len(plan._steps) == 2 @@ -168,25 +168,25 @@ def test_add_step_to_plan(): # create a kernel kernel = sk.Kernel() - # import test (math) skill - skill = MathSkill() - skill_config_dict = kernel.import_skill(skill, "math") + # import test (math) plugin + plugin = MathPlugin() + plugin = kernel.import_plugin(plugin, "math") - test_function1 = skill_config_dict["Add"] - test_function2 = skill_config_dict["Subtract"] + test_function1 = plugin["Add"] + test_function2 = plugin["Subtract"] plan = Plan(name="multistep_test", function=test_function1) plan.add_steps([test_function2]) assert plan is not None assert plan.name == "Add" assert type(plan.state) is ContextVariables - assert plan.skill_name == "math" + assert plan.plugin_name == "math" assert plan.description == test_function1.description assert plan.function is test_function1 assert type(plan.parameters) is ContextVariables assert plan.is_semantic is test_function1.is_semantic - assert plan.is_native is test_function1.is_native - assert plan.request_settings == test_function1.request_settings + assert plan.is_native is not test_function1.is_semantic + assert plan.prompt_execution_settings == test_function1.prompt_execution_settings assert plan.has_next_step is True assert plan.next_step_index == 0 assert len(plan._steps) == 1 diff --git a/python/tests/unit/planning/test_plan_execution.py b/python/tests/unit/planning/test_plan_execution.py index fa26475aa848..28c43c5f5cbf 100644 --- a/python/tests/unit/planning/test_plan_execution.py +++ b/python/tests/unit/planning/test_plan_execution.py @@ -3,39 +3,41 @@ import pytest import semantic_kernel as sk -from semantic_kernel.core_skills.math_skill import MathSkill -from semantic_kernel.core_skills.text_skill import TextSkill +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.text_plugin import TextPlugin from semantic_kernel.planning import Plan -def test_invoke_empty_plan(): +@pytest.mark.asyncio +async def test_invoke_empty_plan(): plan = Plan() - result = plan.invoke() + result = await plan.invoke() assert result.result == "" @pytest.mark.asyncio async def test_invoke_empty_plan_async(): plan = Plan() - result = await plan.invoke_async() + result = await plan.invoke() assert result.result == "" -def test_invoke_plan_constructed_with_function(): +@pytest.mark.asyncio +async def test_invoke_plan_constructed_with_function(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] # setup context context = kernel.create_new_context() context["input"] = "hello world " plan = Plan(name="test", function=test_function) - result = plan.invoke(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD " @@ -44,28 +46,29 @@ async def test_invoke_plan_constructed_with_function_async(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] # setup context context = kernel.create_new_context() context["input"] = "hello world " plan = Plan(name="test", function=test_function) - result = await plan.invoke_async(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD " -def test_invoke_empty_plan_with_added_function_step(): +@pytest.mark.asyncio +async def test_invoke_empty_plan_with_added_function_step(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] # setup context context = kernel.create_new_context() @@ -73,7 +76,7 @@ def test_invoke_empty_plan_with_added_function_step(): plan = Plan(name="test") plan.add_steps([test_function]) - result = plan.invoke(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD " @@ -82,10 +85,10 @@ async def test_invoke_empty_plan_with_added_function_step_async(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] # setup context context = kernel.create_new_context() @@ -93,18 +96,19 @@ async def test_invoke_empty_plan_with_added_function_step_async(): plan = Plan(name="test") plan.add_steps([test_function]) - result = await plan.invoke_async(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD " -def test_invoke_empty_plan_with_added_plan_step(): +@pytest.mark.asyncio +async def test_invoke_empty_plan_with_added_plan_step(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] # setup context context = kernel.create_new_context() @@ -113,7 +117,7 @@ def test_invoke_empty_plan_with_added_plan_step(): plan = Plan(name="test") new_step = Plan(name="test", function=test_function) plan.add_steps([new_step]) - result = plan.invoke(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD " @@ -122,10 +126,10 @@ async def test_invoke_empty_plan_with_added_plan_step_async(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] # setup context context = kernel.create_new_context() @@ -134,19 +138,20 @@ async def test_invoke_empty_plan_with_added_plan_step_async(): plan = Plan(name="test") new_step = Plan(name="test", function=test_function) plan.add_steps([new_step]) - result = await plan.invoke_async(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD " -def test_invoke_multi_step_plan(): +@pytest.mark.asyncio +async def test_invoke_multi_step_plan(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] - test_function2 = skill_config_dict["trim_end"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] + test_function2 = plugin["trim_end"] # setup context context = kernel.create_new_context() @@ -156,7 +161,7 @@ def test_invoke_multi_step_plan(): new_step = Plan(name="test", function=test_function) new_step2 = Plan(name="test", function=test_function2) plan.add_steps([new_step, new_step2]) - result = plan.invoke(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD" @@ -165,11 +170,11 @@ async def test_invoke_multi_step_plan_async(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = TextSkill() - skill_config_dict = kernel.import_skill(skill, "text") - test_function = skill_config_dict["uppercase"] - test_function2 = skill_config_dict["trim_end"] + # import test (text) plugin + plugin = TextPlugin() + plugin = kernel.import_plugin(plugin, "text") + test_function = plugin["uppercase"] + test_function2 = plugin["trim_end"] # setup context context = kernel.create_new_context() @@ -179,7 +184,7 @@ async def test_invoke_multi_step_plan_async(): new_step = Plan(name="test", function=test_function) new_step2 = Plan(name="test", function=test_function2) plan.add_steps([new_step, new_step2]) - result = await plan.invoke_async(context=context) + result = await plan.invoke(context=context) assert result.result == "HELLO WORLD" @@ -188,11 +193,11 @@ async def test_invoke_multi_step_plan_async_with_variables(): # create a kernel kernel = sk.Kernel() - # import test (text) skill - skill = MathSkill() - skill_config_dict = kernel.import_skill(skill, "math") - test_function = skill_config_dict["Add"] - test_function2 = skill_config_dict["Subtract"] + # import test (text) plugin + plugin = MathPlugin() + plugin = kernel.import_plugin(plugin, "math") + test_function = plugin["Add"] + test_function2 = plugin["Subtract"] plan = Plan(name="test") @@ -204,10 +209,8 @@ async def test_invoke_multi_step_plan_async_with_variables(): # setup context for step 2 context2 = kernel.create_new_context() context2["amount"] = "5" - new_step2 = Plan( - name="test", function=test_function2, parameters=context2.variables - ) + new_step2 = Plan(name="test", function=test_function2, parameters=context2.variables) plan.add_steps([new_step, new_step2]) - result = await plan.invoke_async(input="2") + result = await plan.invoke(input="2") assert result.result == "7" diff --git a/python/tests/unit/plugin_definition/test_functions_view.py b/python/tests/unit/plugin_definition/test_functions_view.py new file mode 100644 index 000000000000..001e6e6b0b3e --- /dev/null +++ b/python/tests/unit/plugin_definition/test_functions_view.py @@ -0,0 +1,149 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.functions_view import FunctionsView + + +def test_add_semantic_function(): + view = FunctionView( + name="function1", + plugin_name="plugin1", + description="Semantic function", + parameters=[], + is_semantic=True, + is_asynchronous=True, + ) + functions_view = FunctionsView() + functions_view.add_function(view) + semantic_functions = functions_view.semantic_functions.get("plugin1") + assert len(semantic_functions) == 1 + assert semantic_functions[0] == view + + +def test_add_native_function(): + view = FunctionView( + name="function2", + plugin_name="plugin2", + description="Native function", + parameters=[], + is_semantic=False, + is_asynchronous=True, + ) + functions_view = FunctionsView() + functions_view.add_function(view) + native_functions = functions_view.native_functions.get("plugin2") + assert len(native_functions) == 1 + assert native_functions[0] == view + + +def test_add_multiple_functions(): + semantic_function = FunctionView( + name="function1", + plugin_name="plugin1", + description="Semantic function", + parameters=[], + is_semantic=True, + is_asynchronous=True, + ) + native_function = FunctionView( + name="function2", + plugin_name="plugin2", + description="Native function", + parameters=[], + is_semantic=False, + is_asynchronous=True, + ) + functions_view = FunctionsView() + functions_view.add_function(semantic_function) + functions_view.add_function(native_function) + semantic_functions = functions_view.semantic_functions.get("plugin1") + native_functions = functions_view.native_functions.get("plugin2") + assert len(semantic_functions) == 1 + assert semantic_functions[0] == semantic_function + assert len(native_functions) == 1 + assert native_functions[0] == native_function + + +def test_is_semantic(): + semantic_function = FunctionView( + name="function1", + plugin_name="plugin1", + description="Semantic function", + parameters=[], + is_semantic=True, + is_asynchronous=True, + ) + native_function = FunctionView( + name="function2", + plugin_name="plugin2", + description="Native function", + parameters=[], + is_semantic=False, + is_asynchronous=True, + ) + functions_view = FunctionsView() + functions_view.add_function(semantic_function) + functions_view.add_function(native_function) + assert functions_view.is_semantic("plugin1", "function1") is True + assert functions_view.is_semantic("plugin2", "function2") is False + assert functions_view.is_semantic("plugin1", "unregistered_function") is False + + +def test_is_native(): + semantic_function = FunctionView( + name="function1", + plugin_name="plugin1", + description="Semantic function", + parameters=[], + is_semantic=True, + is_asynchronous=True, + ) + native_function = FunctionView( + name="function2", + plugin_name="plugin2", + description="Native function", + parameters=[], + is_semantic=False, + is_asynchronous=True, + ) + functions_view = FunctionsView() + functions_view.add_function(semantic_function) + functions_view.add_function(native_function) + assert functions_view.is_native("plugin1", "function1") is False + assert functions_view.is_native("plugin2", "function2") is True + assert functions_view.is_native("plugin2", "unregistered_function") is False + + +def test_ambiguous_implementation(): + semantic_function = FunctionView( + name="function1", + plugin_name="plugin1", + description="Semantic function", + parameters=[], + is_semantic=True, + is_asynchronous=True, + ) + native_function = FunctionView( + name="function1", + plugin_name="plugin1", + description="Native function", + parameters=[], + is_semantic=False, + is_asynchronous=True, + ) + functions_view = FunctionsView() + functions_view.add_function(semantic_function) + functions_view.add_function(native_function) + + with pytest.raises(KernelException) as exc_info: + functions_view.is_semantic("plugin1", "function1") + + assert exc_info.value.error_code == KernelException.ErrorCodes.AmbiguousImplementation + + with pytest.raises(KernelException) as exc_info: + functions_view.is_native("plugin1", "function1") + + assert exc_info.value.error_code == KernelException.ErrorCodes.AmbiguousImplementation diff --git a/python/tests/unit/plugin_definition/test_kernel_function_decorators.py b/python/tests/unit/plugin_definition/test_kernel_function_decorators.py new file mode 100644 index 000000000000..f2fda443b5f4 --- /dev/null +++ b/python/tests/unit/plugin_definition/test_kernel_function_decorators.py @@ -0,0 +1,35 @@ +from semantic_kernel.plugin_definition import kernel_function + + +class MiscClass: + __test__ = False + + @kernel_function(description="description") + def func_with_description(self, input): + return input + + @kernel_function(description="description") + def func_no_name(self, input): + return input + + @kernel_function(description="description", name="my-name") + def func_with_name(self, input): + return input + + +def test_description(): + decorator_test = MiscClass() + my_func = getattr(decorator_test, "func_with_description") + assert my_func.__kernel_function_description__ == "description" + + +def test_kernel_function_name_not_specified(): + decorator_test = MiscClass() + my_func = getattr(decorator_test, "func_no_name") + assert my_func.__kernel_function_name__ == "func_no_name" + + +def test_kernel_function_with_name_specified(): + decorator_test = MiscClass() + my_func = getattr(decorator_test, "func_with_name") + assert my_func.__kernel_function_name__ == "my-name" diff --git a/python/tests/unit/plugin_definition/test_kernel_plugin_collection.py b/python/tests/unit/plugin_definition/test_kernel_plugin_collection.py new file mode 100644 index 000000000000..8eadee35cea1 --- /dev/null +++ b/python/tests/unit/plugin_definition/test_kernel_plugin_collection.py @@ -0,0 +1,98 @@ +# Copyright (c) Microsoft. All rights reserved. + +from string import ascii_uppercase + +import pytest + +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin +from semantic_kernel.plugin_definition.kernel_plugin_collection import KernelPluginCollection + + +def test_add_plugin(): + collection = KernelPluginCollection() + plugin = KernelPlugin(name="TestPlugin") + collection.add(plugin) + assert len(collection) == 1 + assert plugin.name in collection + + +def test_add_plugin_with_description(): + expected_description = "Test Description" + collection = KernelPluginCollection() + plugin = KernelPlugin(name="TestPlugin", description=expected_description) + collection.add(plugin) + assert len(collection) == 1 + assert plugin.name in collection + assert collection[plugin.name].description == expected_description + + +def test_remove_plugin(): + collection = KernelPluginCollection() + plugin = KernelPlugin(name="TestPlugin") + collection.add(plugin) + collection.remove(plugin) + assert len(collection) == 0 + + +def test_remove_plugin_by_name(): + collection = KernelPluginCollection() + expected_plugin_name = "TestPlugin" + plugin = KernelPlugin(name=expected_plugin_name) + collection.add(plugin) + collection.remove_by_name(expected_plugin_name) + assert len(collection) == 0 + + +def test_add_list_of_plugins(): + num_plugins = 3 + collection = KernelPluginCollection() + plugins = [KernelPlugin(name=f"Plugin_{ascii_uppercase[i]}") for i in range(num_plugins)] + collection.add_list_of_plugins(plugins) + assert len(collection) == num_plugins + + +def test_clear_collection(): + collection = KernelPluginCollection() + plugins = [KernelPlugin(name=f"Plugin_{ascii_uppercase[i]}") for i in range(3)] + collection.add_list_of_plugins(plugins) + collection.clear() + assert len(collection) == 0 + + +def test_iterate_collection(): + collection = KernelPluginCollection() + plugins = [KernelPlugin(name=f"Plugin_{ascii_uppercase[i]}") for i in range(3)] + collection.add_list_of_plugins(plugins) + + for i, plugin in enumerate(collection.plugins.values()): + assert plugin.name == f"Plugin_{ascii_uppercase[i]}" + + +def test_get_plugin(): + collection = KernelPluginCollection() + plugin = KernelPlugin(name="TestPlugin") + collection.add(plugin) + retrieved_plugin = collection["TestPlugin"] + assert retrieved_plugin == plugin + + +def test_get_plugin_not_found_raises_keyerror(): + collection = KernelPluginCollection() + with pytest.raises(KeyError): + _ = collection["NonExistentPlugin"] + + +def test_get_plugin_succeeds(): + collection = KernelPluginCollection() + plugin = KernelPlugin(name="TestPlugin") + collection.add(plugin) + found_plugin = collection["TestPlugin"] + assert found_plugin == plugin + with pytest.raises(KeyError): + collection["NonExistentPlugin"] is None + + +def test_configure_plugins_on_object_creation(): + plugin = KernelPlugin(name="TestPlugin") + collection = KernelPluginCollection(plugins=[plugin]) + assert len(collection) == 1 diff --git a/python/tests/unit/plugin_definition/test_kernel_plugins.py b/python/tests/unit/plugin_definition/test_kernel_plugins.py new file mode 100644 index 000000000000..39f98a0f9a26 --- /dev/null +++ b/python/tests/unit/plugin_definition/test_kernel_plugins.py @@ -0,0 +1,208 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import TYPE_CHECKING + +import pytest + +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin +from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate +from semantic_kernel.semantic_functions.prompt_template_config import PromptTemplateConfig +from semantic_kernel.semantic_functions.semantic_function_config import SemanticFunctionConfig +from semantic_kernel.template_engine.prompt_template_engine import PromptTemplateEngine + +if TYPE_CHECKING: + from semantic_kernel.orchestration.kernel_context import KernelContext + + +def test_throws_for_missing_name(): + with pytest.raises(TypeError): + KernelPlugin(description="A unit test plugin") + + +def test_default_kernel_plugin_construction_with_no_functions(): + expected_plugin_name = "test_plugin" + expected_plugin_description = "A unit test plugin" + plugin = KernelPlugin(name=expected_plugin_name, description=expected_plugin_description) + assert plugin.name == expected_plugin_name + assert plugin.description == expected_plugin_description + + +def test_default_kernel_plugin_construction_with_native_functions(): + expected_plugin_name = "test_plugin" + expected_plugin_description = "A unit test plugin" + + def mock_function(input: str, context: "KernelContext") -> None: + pass + + mock_function.__kernel_function__ = True + mock_function.__kernel_function_name__ = "mock_function" + mock_function.__kernel_function_description__ = "Mock description" + mock_function.__kernel_function_input_description__ = "Mock input description" + mock_function.__kernel_function_input_default_value__ = "default_input_value" + mock_function.__kernel_function_context_parameters__ = [ + { + "name": "param1", + "description": "Param 1 description", + "default_value": "default_param1_value", + } + ] + + mock_method = mock_function + + native_function = KernelFunction.from_native_method(mock_method, "MockPlugin") + + plugin = KernelPlugin( + name=expected_plugin_name, description=expected_plugin_description, functions=[native_function] + ) + assert plugin.name == expected_plugin_name + assert plugin.description == expected_plugin_description + assert len(plugin.functions) == 1 + assert plugin["mock_function"] == native_function + + +def test_default_kernel_plugin_exposes_the_native_function_it_contains(): + expected_plugin_name = "test_plugin" + expected_plugin_description = "A unit test plugin" + + def mock_function(input: str, context: "KernelContext") -> None: + pass + + mock_function.__kernel_function__ = True + mock_function.__kernel_function_name__ = "mock_function" + mock_function.__kernel_function_description__ = "Mock description" + mock_function.__kernel_function_input_description__ = "Mock input description" + mock_function.__kernel_function_input_default_value__ = "default_input_value" + mock_function.__kernel_function_context_parameters__ = [ + { + "name": "param1", + "description": "Param 1 description", + "default_value": "default_param1_value", + } + ] + + mock_method = mock_function + + native_function = KernelFunction.from_native_method(mock_method, "MockPlugin") + + plugin = KernelPlugin( + name=expected_plugin_name, description=expected_plugin_description, functions=[native_function] + ) + assert plugin.name == expected_plugin_name + assert plugin.description == expected_plugin_description + assert len(plugin.functions) == 1 + assert plugin["mock_function"] == native_function + + for func in [native_function]: + assert func.name in plugin + assert plugin[func.name] == func + + +def test_default_kernel_plugin_construction_with_semantic_function(): + prompt_config = PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) + prompt_template = ChatPromptTemplate("{{$user_input}}", PromptTemplateEngine(), prompt_config) + function_config = SemanticFunctionConfig(prompt_config, prompt_template) + + expected_plugin_name = "test_plugin" + expected_function_name = "mock_function" + semantic_function = KernelFunction.from_semantic_config( + plugin_name=expected_plugin_name, function_name=expected_function_name, function_config=function_config + ) + + expected_plugin_description = "A unit test plugin" + + plugin = KernelPlugin( + name=expected_plugin_name, description=expected_plugin_description, functions=[semantic_function] + ) + + assert plugin.name == expected_plugin_name + assert plugin.description == expected_plugin_description + assert len(plugin.functions) == 1 + assert plugin["mock_function"] == semantic_function + + +def test_default_kernel_plugin_construction_with_both_function_types(): + # Construct a semantic function + prompt_config = PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) + prompt_template = ChatPromptTemplate("{{$user_input}}", PromptTemplateEngine(), prompt_config) + function_config = SemanticFunctionConfig(prompt_config, prompt_template) + + expected_plugin_name = "test_plugin" + expected_function_name = "mock_semantic_function" + semantic_function = KernelFunction.from_semantic_config( + plugin_name=expected_plugin_name, function_name=expected_function_name, function_config=function_config + ) + + # Construct a nativate function + def mock_function(input: str, context: "KernelContext") -> None: + pass + + mock_function.__kernel_function__ = True + mock_function.__kernel_function_name__ = "mock_native_function" + mock_function.__kernel_function_description__ = "Mock description" + mock_function.__kernel_function_input_description__ = "Mock input description" + mock_function.__kernel_function_input_default_value__ = "default_input_value" + mock_function.__kernel_function_context_parameters__ = [ + { + "name": "param1", + "description": "Param 1 description", + "default_value": "default_param1_value", + } + ] + + mock_method = mock_function + + native_function = KernelFunction.from_native_method(mock_method, "MockPlugin") + + # Add both types to the default kernel plugin + expected_plugin_description = "A unit test plugin" + + plugin = KernelPlugin( + name=expected_plugin_name, + description=expected_plugin_description, + functions=[semantic_function, native_function], + ) + + assert plugin.name == expected_plugin_name + assert plugin.description == expected_plugin_description + assert len(plugin.functions) == 2 + + for func in [semantic_function, native_function]: + assert func.name in plugin + assert plugin[func.name] == func + + +def test_default_kernel_plugin_construction_with_same_function_names_throws(): + # Construct a semantic function + prompt_config = PromptTemplateConfig.from_execution_settings(max_tokens=2000, temperature=0.7, top_p=0.8) + prompt_template = ChatPromptTemplate("{{$user_input}}", PromptTemplateEngine(), prompt_config) + function_config = SemanticFunctionConfig(prompt_config, prompt_template) + + expected_plugin_name = "test_plugin" + expected_function_name = "mock_function" + semantic_function = KernelFunction.from_semantic_config( + plugin_name=expected_plugin_name, function_name=expected_function_name, function_config=function_config + ) + + # Construct a nativate function + def mock_function(input: str, context: "KernelContext") -> None: + pass + + mock_function.__kernel_function__ = True + mock_function.__kernel_function_name__ = expected_function_name + mock_function.__kernel_function_description__ = "Mock description" + mock_function.__kernel_function_input_description__ = "Mock input description" + mock_function.__kernel_function_input_default_value__ = "default_input_value" + mock_function.__kernel_function_context_parameters__ = [ + { + "name": "param1", + "description": "Param 1 description", + "default_value": "default_param1_value", + } + ] + + mock_method = mock_function + native_function = KernelFunction.from_native_method(mock_method, "MockPlugin") + + with pytest.raises(ValueError): + KernelPlugin(name=expected_plugin_name, functions=[semantic_function, native_function]) diff --git a/python/tests/unit/plugin_definition/test_prompt_templates.py b/python/tests/unit/plugin_definition/test_prompt_templates.py new file mode 100644 index 000000000000..6fed481a6552 --- /dev/null +++ b/python/tests/unit/plugin_definition/test_prompt_templates.py @@ -0,0 +1,183 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json + +import pytest + +from semantic_kernel.connectors.ai.open_ai.models.chat.open_ai_chat_message import ( + OpenAIChatMessage, +) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, +) +from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate +from semantic_kernel.semantic_functions.prompt_template_config import ( + PromptTemplateConfig, +) +from semantic_kernel.template_engine.prompt_template_engine import PromptTemplateEngine + + +def test_default_prompt_template_config(): + prompt_template_config = PromptTemplateConfig() + assert prompt_template_config.schema_ == 1 + assert prompt_template_config.type == "completion" + assert prompt_template_config.description == "" + assert prompt_template_config.execution_settings.extension_data == {} + + +def test_default_chat_prompt_template_from_empty_dict(): + prompt_template_config = PromptTemplateConfig.from_dict({}) + assert prompt_template_config.schema_ == 1 + assert prompt_template_config.type == "completion" + assert prompt_template_config.description == "" + assert prompt_template_config.execution_settings.extension_data == {} + + +def test_default_chat_prompt_template_from_empty_string(): + with pytest.raises(json.decoder.JSONDecodeError): + _ = PromptTemplateConfig.from_json("") + + +def test_default_chat_prompt_template_from_empty_json(): + prompt_template_config = PromptTemplateConfig.from_dict({}) + assert prompt_template_config.schema_ == 1 + assert prompt_template_config.type == "completion" + assert prompt_template_config.description == "" + assert prompt_template_config.execution_settings.extension_data == {} + + +def test_custom_prompt_template_config(): + prompt_template_config = PromptTemplateConfig( + schema_=2, + type="completion2", + description="Custom description.", + execution_settings=OpenAIChatPromptExecutionSettings( + temperature=0.5, + top_p=0.5, + presence_penalty=0.5, + frequency_penalty=0.5, + max_tokens=128, + number_of_responses=2, + stop=["\n"], + logit_bias={"1": 1.0}, + ), + ) + assert prompt_template_config.schema_ == 2 + assert prompt_template_config.type == "completion2" + assert prompt_template_config.description == "Custom description." + assert prompt_template_config.execution_settings.temperature == 0.5 + assert prompt_template_config.execution_settings.top_p == 0.5 + assert prompt_template_config.execution_settings.presence_penalty == 0.5 + assert prompt_template_config.execution_settings.frequency_penalty == 0.5 + assert prompt_template_config.execution_settings.max_tokens == 128 + assert prompt_template_config.execution_settings.number_of_responses == 2 + assert prompt_template_config.execution_settings.stop == ["\n"] + assert prompt_template_config.execution_settings.logit_bias == {"1": 1.0} + + +def test_custom_prompt_template_config_from_dict(): + prompt_template_dict = { + "schema": 2, + "type": "completion2", + "description": "Custom description.", + "execution_settings": { + "default": { + "temperature": 0.5, + "top_p": 0.5, + "presence_penalty": 0.5, + "frequency_penalty": 0.5, + "max_tokens": 128, + "number_of_responses": 2, + "stop": ["\n"], + "logit_bias": {"1": 1}, + }, + }, + } + prompt_template_config = PromptTemplateConfig.from_dict(prompt_template_dict) + assert prompt_template_config.schema_ == 2 + assert prompt_template_config.type == "completion2" + assert prompt_template_config.description == "Custom description." + assert prompt_template_config.execution_settings.extension_data["temperature"] == 0.5 + assert prompt_template_config.execution_settings.extension_data["top_p"] == 0.5 + assert prompt_template_config.execution_settings.extension_data["presence_penalty"] == 0.5 + assert prompt_template_config.execution_settings.extension_data["frequency_penalty"] == 0.5 + assert prompt_template_config.execution_settings.extension_data["max_tokens"] == 128 + assert prompt_template_config.execution_settings.extension_data["number_of_responses"] == 2 + assert prompt_template_config.execution_settings.extension_data["stop"] == ["\n"] + assert prompt_template_config.execution_settings.extension_data["logit_bias"] == {"1": 1} + + +def test_custom_prompt_template_config_from_json(): + prompt_template_json = """ + { + "schema": 2, + "type": "completion2", + "description": "Custom description.", + "execution_settings": { + "default": { + "temperature": 0.5, + "top_p": 0.5, + "presence_penalty": 0.5, + "frequency_penalty": 0.5, + "max_tokens": 128, + "number_of_responses": 2, + "stop": ["s"], + "logit_bias": {"1": 1} + } + } + } + """ + prompt_template_config = PromptTemplateConfig[OpenAIChatPromptExecutionSettings].from_json(prompt_template_json) + assert prompt_template_config.schema_ == 2 + assert prompt_template_config.type == "completion2" + assert prompt_template_config.description == "Custom description." + assert prompt_template_config.execution_settings.temperature == 0.5 + assert prompt_template_config.execution_settings.top_p == 0.5 + assert prompt_template_config.execution_settings.presence_penalty == 0.5 + assert prompt_template_config.execution_settings.frequency_penalty == 0.5 + assert prompt_template_config.execution_settings.max_tokens == 128 + assert prompt_template_config.execution_settings.number_of_responses == 2 + assert prompt_template_config.execution_settings.stop == ["s"] + assert prompt_template_config.execution_settings.logit_bias == {"1": 1} + + +def test_chat_prompt_template(): + chat_prompt_template = ChatPromptTemplate( + "{{$user_input}}", + PromptTemplateEngine(), + prompt_config=PromptTemplateConfig(), + ) + + assert chat_prompt_template.messages == [] + + +def test_chat_prompt_template_with_messages(): + prompt_template_config = PromptTemplateConfig[OpenAIChatPromptExecutionSettings].from_execution_settings( + messages=[{"role": "system", "content": "Custom system prompt."}], + ) + chat_prompt_template = ChatPromptTemplate[OpenAIChatMessage]( + "{{$user_input}}", + PromptTemplateEngine(), + prompt_config=prompt_template_config, + parse_messages=True, + ) + print(chat_prompt_template.messages) + assert len(chat_prompt_template.messages) == 1 + assert chat_prompt_template.messages[0].role == "system" + assert chat_prompt_template.messages[0].content_template.template == "Custom system prompt." + + +def test_chat_prompt_template_with_system_prompt(): + prompt_template_config = PromptTemplateConfig[OpenAIChatPromptExecutionSettings].from_execution_settings( + chat_system_prompt="Custom system prompt.", + ) + chat_prompt_template = ChatPromptTemplate[OpenAIChatMessage]( + "{{$user_input}}", + PromptTemplateEngine(), + prompt_config=prompt_template_config, + parse_chat_system_prompt=True, + ) + print(chat_prompt_template.messages) + assert len(chat_prompt_template.messages) == 1 + assert chat_prompt_template.messages[0].role == "system" + assert chat_prompt_template.messages[0].content_template.template == "Custom system prompt." diff --git a/python/tests/unit/skill_definition/test_functions_view.py b/python/tests/unit/skill_definition/test_functions_view.py deleted file mode 100644 index f377ec667230..000000000000 --- a/python/tests/unit/skill_definition/test_functions_view.py +++ /dev/null @@ -1,153 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import pytest - -from semantic_kernel.kernel_exception import KernelException -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.functions_view import FunctionsView - - -def test_add_semantic_function(): - view = FunctionView( - name="function1", - skill_name="skill1", - description="Semantic function", - parameters=[], - is_semantic=True, - is_asynchronous=True, - ) - functions_view = FunctionsView() - functions_view.add_function(view) - semantic_functions = functions_view.semantic_functions.get("skill1") - assert len(semantic_functions) == 1 - assert semantic_functions[0] == view - - -def test_add_native_function(): - view = FunctionView( - name="function2", - skill_name="skill2", - description="Native function", - parameters=[], - is_semantic=False, - is_asynchronous=True, - ) - functions_view = FunctionsView() - functions_view.add_function(view) - native_functions = functions_view.native_functions.get("skill2") - assert len(native_functions) == 1 - assert native_functions[0] == view - - -def test_add_multiple_functions(): - semantic_function = FunctionView( - name="function1", - skill_name="skill1", - description="Semantic function", - parameters=[], - is_semantic=True, - is_asynchronous=True, - ) - native_function = FunctionView( - name="function2", - skill_name="skill2", - description="Native function", - parameters=[], - is_semantic=False, - is_asynchronous=True, - ) - functions_view = FunctionsView() - functions_view.add_function(semantic_function) - functions_view.add_function(native_function) - semantic_functions = functions_view.semantic_functions.get("skill1") - native_functions = functions_view.native_functions.get("skill2") - assert len(semantic_functions) == 1 - assert semantic_functions[0] == semantic_function - assert len(native_functions) == 1 - assert native_functions[0] == native_function - - -def test_is_semantic(): - semantic_function = FunctionView( - name="function1", - skill_name="skill1", - description="Semantic function", - parameters=[], - is_semantic=True, - is_asynchronous=True, - ) - native_function = FunctionView( - name="function2", - skill_name="skill2", - description="Native function", - parameters=[], - is_semantic=False, - is_asynchronous=True, - ) - functions_view = FunctionsView() - functions_view.add_function(semantic_function) - functions_view.add_function(native_function) - assert functions_view.is_semantic("skill1", "function1") is True - assert functions_view.is_semantic("skill2", "function2") is False - assert functions_view.is_semantic("skill1", "unregistered_function") is False - - -def test_is_native(): - semantic_function = FunctionView( - name="function1", - skill_name="skill1", - description="Semantic function", - parameters=[], - is_semantic=True, - is_asynchronous=True, - ) - native_function = FunctionView( - name="function2", - skill_name="skill2", - description="Native function", - parameters=[], - is_semantic=False, - is_asynchronous=True, - ) - functions_view = FunctionsView() - functions_view.add_function(semantic_function) - functions_view.add_function(native_function) - assert functions_view.is_native("skill1", "function1") is False - assert functions_view.is_native("skill2", "function2") is True - assert functions_view.is_native("skill2", "unregistered_function") is False - - -def test_ambiguous_implementation(): - semantic_function = FunctionView( - name="function1", - skill_name="skill1", - description="Semantic function", - parameters=[], - is_semantic=True, - is_asynchronous=True, - ) - native_function = FunctionView( - name="function1", - skill_name="skill1", - description="Native function", - parameters=[], - is_semantic=False, - is_asynchronous=True, - ) - functions_view = FunctionsView() - functions_view.add_function(semantic_function) - functions_view.add_function(native_function) - - with pytest.raises(KernelException) as exc_info: - functions_view.is_semantic("skill1", "function1") - - assert ( - exc_info.value.error_code == KernelException.ErrorCodes.AmbiguousImplementation - ) - - with pytest.raises(KernelException) as exc_info: - functions_view.is_native("skill1", "function1") - - assert ( - exc_info.value.error_code == KernelException.ErrorCodes.AmbiguousImplementation - ) diff --git a/python/tests/unit/skill_definition/test_prompt_templates.py b/python/tests/unit/skill_definition/test_prompt_templates.py deleted file mode 100644 index 82c03875ba00..000000000000 --- a/python/tests/unit/skill_definition/test_prompt_templates.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import json - -import pytest - -from semantic_kernel.semantic_functions.chat_prompt_template import ChatPromptTemplate -from semantic_kernel.semantic_functions.prompt_template_config import ( - PromptTemplateConfig, -) - - -def test_default_prompt_template_config(): - prompt_template_config = PromptTemplateConfig() - assert prompt_template_config.schema == 1 - assert prompt_template_config.type == "completion" - assert prompt_template_config.description == "" - assert prompt_template_config.completion.temperature == 0.0 - assert prompt_template_config.completion.top_p == 1.0 - assert prompt_template_config.completion.presence_penalty == 0.0 - assert prompt_template_config.completion.frequency_penalty == 0.0 - assert prompt_template_config.completion.max_tokens == 256 - assert prompt_template_config.completion.number_of_responses == 1 - assert prompt_template_config.completion.stop_sequences == [] - assert prompt_template_config.completion.token_selection_biases == {} - assert prompt_template_config.completion.chat_system_prompt is None - - -def test_default_chat_prompt_template_from_empty_dict(): - with pytest.raises(KeyError): - _ = PromptTemplateConfig().from_dict({}) - - -def test_default_chat_prompt_template_from_empty_string(): - with pytest.raises(json.decoder.JSONDecodeError): - _ = PromptTemplateConfig().from_json("") - - -def test_default_chat_prompt_template_from_empty_json(): - with pytest.raises(KeyError): - _ = PromptTemplateConfig().from_json("{}") - - -def test_custom_prompt_template_config(): - prompt_template_config = PromptTemplateConfig( - schema=2, - type="completion2", - description="Custom description.", - completion=PromptTemplateConfig.CompletionConfig( - temperature=0.5, - top_p=0.5, - presence_penalty=0.5, - frequency_penalty=0.5, - max_tokens=128, - number_of_responses=2, - stop_sequences=["\n"], - token_selection_biases={1: 1}, - chat_system_prompt="Custom system prompt.", - ), - ) - assert prompt_template_config.schema == 2 - assert prompt_template_config.type == "completion2" - assert prompt_template_config.description == "Custom description." - assert prompt_template_config.completion.temperature == 0.5 - assert prompt_template_config.completion.top_p == 0.5 - assert prompt_template_config.completion.presence_penalty == 0.5 - assert prompt_template_config.completion.frequency_penalty == 0.5 - assert prompt_template_config.completion.max_tokens == 128 - assert prompt_template_config.completion.number_of_responses == 2 - assert prompt_template_config.completion.stop_sequences == ["\n"] - assert prompt_template_config.completion.token_selection_biases == {1: 1} - assert ( - prompt_template_config.completion.chat_system_prompt == "Custom system prompt." - ) - - -def test_custom_prompt_template_config_from_dict(): - prompt_template_dict = { - "schema": 2, - "type": "completion2", - "description": "Custom description.", - "completion": { - "temperature": 0.5, - "top_p": 0.5, - "presence_penalty": 0.5, - "frequency_penalty": 0.5, - "max_tokens": 128, - "number_of_responses": 2, - "stop_sequences": ["\n"], - "token_selection_biases": {1: 1}, - "chat_system_prompt": "Custom system prompt.", - }, - } - prompt_template_config = PromptTemplateConfig().from_dict(prompt_template_dict) - assert prompt_template_config.schema == 2 - assert prompt_template_config.type == "completion2" - assert prompt_template_config.description == "Custom description." - assert prompt_template_config.completion.temperature == 0.5 - assert prompt_template_config.completion.top_p == 0.5 - assert prompt_template_config.completion.presence_penalty == 0.5 - assert prompt_template_config.completion.frequency_penalty == 0.5 - assert prompt_template_config.completion.max_tokens == 128 - assert prompt_template_config.completion.number_of_responses == 2 - assert prompt_template_config.completion.stop_sequences == ["\n"] - assert prompt_template_config.completion.token_selection_biases == {1: 1} - assert ( - prompt_template_config.completion.chat_system_prompt == "Custom system prompt." - ) - - -def test_custom_prompt_template_config_from_json(): - prompt_template_json = """ - { - "schema": 2, - "type": "completion2", - "description": "Custom description.", - "completion": { - "temperature": 0.5, - "top_p": 0.5, - "presence_penalty": 0.5, - "frequency_penalty": 0.5, - "max_tokens": 128, - "number_of_responses": 2, - "stop_sequences": ["s"], - "token_selection_biases": {"1": 1}, - "chat_system_prompt": "Custom system prompt." - } - } - """ - prompt_template_config = PromptTemplateConfig().from_json(prompt_template_json) - assert prompt_template_config.schema == 2 - assert prompt_template_config.type == "completion2" - assert prompt_template_config.description == "Custom description." - assert prompt_template_config.completion.temperature == 0.5 - assert prompt_template_config.completion.top_p == 0.5 - assert prompt_template_config.completion.presence_penalty == 0.5 - assert prompt_template_config.completion.frequency_penalty == 0.5 - assert prompt_template_config.completion.max_tokens == 128 - assert prompt_template_config.completion.number_of_responses == 2 - assert prompt_template_config.completion.stop_sequences == ["s"] - assert prompt_template_config.completion.token_selection_biases == {1: 1} - assert ( - prompt_template_config.completion.chat_system_prompt == "Custom system prompt." - ) - - -def test_chat_prompt_template(): - chat_prompt_template = ChatPromptTemplate( - "{{$user_input}}", - None, - prompt_config=PromptTemplateConfig(), - ) - - assert chat_prompt_template._messages == [] - - -def test_chat_prompt_template_with_system_prompt(): - prompt_template_config = PromptTemplateConfig( - completion=PromptTemplateConfig.CompletionConfig( - chat_system_prompt="Custom system prompt.", - ) - ) - - chat_prompt_template = ChatPromptTemplate( - "{{$user_input}}", - None, - prompt_config=prompt_template_config, - ) - print(chat_prompt_template._messages) - assert len(chat_prompt_template.messages) == 1 - assert chat_prompt_template._messages[0].role == "system" - assert ( - chat_prompt_template._messages[0].content_template._template - == "Custom system prompt." - ) diff --git a/python/tests/unit/skill_definition/test_sk_function_decorators.py b/python/tests/unit/skill_definition/test_sk_function_decorators.py deleted file mode 100644 index 25e1b1b38132..000000000000 --- a/python/tests/unit/skill_definition/test_sk_function_decorators.py +++ /dev/null @@ -1,35 +0,0 @@ -from semantic_kernel.skill_definition import sk_function - - -class MiscClass: - __test__ = False - - @sk_function(description="description") - def func_with_description(self, input): - return input - - @sk_function(description="description") - def func_no_name(self, input): - return input - - @sk_function(description="description", name="my-name") - def func_with_name(self, input): - return input - - -def test_description(): - decorator_test = MiscClass() - my_func = getattr(decorator_test, "func_with_description") - assert my_func.__sk_function_description__ == "description" - - -def test_sk_function_name_not_specified(): - decorator_test = MiscClass() - my_func = getattr(decorator_test, "func_no_name") - assert my_func.__sk_function_name__ == "func_no_name" - - -def test_sk_function_with_name_specified(): - decorator_test = MiscClass() - my_func = getattr(decorator_test, "func_with_name") - assert my_func.__sk_function_name__ == "my-name" diff --git a/python/tests/unit/template_engine/blocks/test_block.py b/python/tests/unit/template_engine/blocks/test_block.py index fb4fb02a73d1..ca8168bf1960 100644 --- a/python/tests/unit/template_engine/blocks/test_block.py +++ b/python/tests/unit/template_engine/blocks/test_block.py @@ -1,18 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger - from pytest import raises from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes -from semantic_kernel.utils.null_logger import NullLogger def test_init(): - block = Block(content="test content", log=NullLogger()) + block = Block(content="test content") assert block.content == "test content" - assert isinstance(block.log, Logger) def test_type_property(): diff --git a/python/tests/unit/template_engine/blocks/test_code_block.py b/python/tests/unit/template_engine/blocks/test_code_block.py index e83dcf10501c..f93f205d4afc 100644 --- a/python/tests/unit/template_engine/blocks/test_code_block.py +++ b/python/tests/unit/template_engine/blocks/test_code_block.py @@ -1,4 +1,3 @@ -from logging import Logger from unittest.mock import Mock from pytest import mark, raises @@ -6,10 +5,11 @@ from semantic_kernel.memory.null_memory import NullMemory from semantic_kernel.orchestration.context_variables import ContextVariables from semantic_kernel.orchestration.delegate_types import DelegateTypes -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, ) from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.code_block import CodeBlock @@ -20,59 +20,71 @@ class TestCodeBlock: def setup_method(self): - self.skills = Mock(spec=ReadOnlySkillCollectionBase) - self.log = Mock(spec=Logger) + self.plugins = Mock(spec=KernelPluginCollection) @mark.asyncio async def test_it_throws_if_a_function_doesnt_exist(self): - context = SKContext.construct( + context = KernelContext.model_construct( variables=ContextVariables(), memory=NullMemory(), - skill_collection=self.skills, - logger=self.log, + plugins=KernelPluginCollection(), + ) + + target = CodeBlock( + content="functionName", ) - # Make it so our self.skills mock's `has_function` method returns False - self.skills.has_function.return_value = False - target = CodeBlock(content="functionName", log=self.log) with raises(ValueError): - await target.render_code_async(context) + await target.render_code(context) @mark.asyncio async def test_it_throws_if_a_function_call_throws(self): - context = SKContext.construct( - variables=ContextVariables(), - memory=NullMemory(), - skill_collection=self.skills, - logger=self.log, - ) - def invoke(_): raise Exception("error") - function = SKFunction( - delegate_type=DelegateTypes.InSKContext, + function = KernelFunction( + delegate_type=DelegateTypes.InKernelContext, delegate_function=invoke, - skill_name="", + plugin_name="test", function_name="funcName", description="", parameters=[], is_semantic=False, ) - self.skills.has_function.return_value = True - self.skills.get_function.return_value = function + dkp = KernelPlugin(name="test", functions=[function]) + plugins = KernelPluginCollection() + plugins.add(dkp) - target = CodeBlock(content="functionName", log=self.log) + # Create a context with the variables, memory, and plugin collection + context = KernelContext.model_construct( + variables=ContextVariables(), + memory=NullMemory(), + plugins=plugins, + ) + + target = CodeBlock( + content="functionName", + ) with raises(ValueError): - await target.render_code_async(context) + await target.render_code(context) def test_it_has_the_correct_type(self): - assert CodeBlock(content="", log=self.log).type == BlockTypes.CODE + assert ( + CodeBlock( + content="", + ).type + == BlockTypes.CODE + ) def test_it_trims_spaces(self): - assert CodeBlock(content=" aa ", log=self.log).content == "aa" + assert ( + CodeBlock( + content=" aa ", + ).content + == "aa" + ) def test_it_checks_validity_of_internal_blocks(self): valid_block1 = FunctionIdBlock(content="x") @@ -81,10 +93,12 @@ def test_it_checks_validity_of_internal_blocks(self): invalid_block = VarBlock(content="!notvalid") code_block1 = CodeBlock( - tokens=[valid_block1, valid_block2], content="", log=self.log + tokens=[valid_block1, valid_block2], + content="", ) code_block2 = CodeBlock( - tokens=[valid_block1, invalid_block], content="", log=self.log + tokens=[valid_block1, invalid_block], + content="", ) is_valid1, _ = code_block1.is_valid() @@ -99,11 +113,21 @@ def test_it_requires_a_valid_function_call(self): val_block = ValBlock(content="'value'") var_block = VarBlock(content="$var") - code_block1 = CodeBlock(tokens=[func_id, val_block], content="", log=self.log) - code_block2 = CodeBlock(tokens=[func_id, var_block], content="", log=self.log) - code_block3 = CodeBlock(tokens=[func_id, func_id], content="", log=self.log) + code_block1 = CodeBlock( + tokens=[func_id, val_block], + content="", + ) + code_block2 = CodeBlock( + tokens=[func_id, var_block], + content="", + ) + code_block3 = CodeBlock( + tokens=[func_id, func_id], + content="", + ) code_block4 = CodeBlock( - tokens=[func_id, var_block, var_block], content="", log=self.log + tokens=[func_id, var_block, var_block], + content="", ) is_valid1, _ = code_block1.is_valid() @@ -123,15 +147,16 @@ async def test_it_renders_code_block_consisting_of_just_a_var_block1(self): variables = ContextVariables() variables["varName"] = "foo" - context = SKContext.construct( + context = KernelContext.model_construct( variables=variables, memory=NullMemory(), - skill_collection=None, - logger=self.log, + plugins=None, ) - code_block = CodeBlock(content="$varName", log=self.log) - result = await code_block.render_code_async(context) + code_block = CodeBlock( + content="$varName", + ) + result = await code_block.render_code(context) assert result == "foo" @@ -140,47 +165,48 @@ async def test_it_renders_code_block_consisting_of_just_a_var_block2(self): variables = ContextVariables() variables["varName"] = "bar" - context = SKContext.construct( + context = KernelContext.model_construct( variables=variables, memory=NullMemory(), - skill_collection=None, - logger=self.log, + plugins=None, ) code_block = CodeBlock( - tokens=[VarBlock(content="$varName")], content="", log=self.log + tokens=[VarBlock(content="$varName")], + content="", ) - result = await code_block.render_code_async(context) + result = await code_block.render_code(context) assert result == "bar" @mark.asyncio async def test_it_renders_code_block_consisting_of_just_a_val_block1(self): - context = SKContext.construct( + context = KernelContext.model_construct( variables=ContextVariables(), memory=NullMemory(), - skill_collection=None, - logger=self.log, + plugins=None, ) - code_block = CodeBlock(content="'ciao'", log=self.log) - result = await code_block.render_code_async(context) + code_block = CodeBlock( + content="'ciao'", + ) + result = await code_block.render_code(context) assert result == "ciao" @mark.asyncio async def test_it_renders_code_block_consisting_of_just_a_val_block2(self): - context = SKContext.construct( + context = KernelContext.model_construct( variables=ContextVariables(), memory=NullMemory(), - skill_collection=None, - logger=self.log, + plugins=None, ) code_block = CodeBlock( - tokens=[ValBlock(content="'arrivederci'")], content="", log=self.log + tokens=[ValBlock(content="'arrivederci'")], + content="", ) - result = await code_block.render_code_async(context) + result = await code_block.render_code(context) assert result == "arrivederci" @@ -192,14 +218,6 @@ async def test_it_invokes_function_cloning_all_variables(self): variables["var1"] = "uno" variables["var2"] = "due" - # Create a context with the variables, memory, skill collection, and logger - context = SKContext.construct( - variables=variables, - memory=NullMemory(), - skill_collection=self.skills, - logger=self.log, - ) - # Create a FunctionIdBlock with the function name func_id = FunctionIdBlock(content="funcName") @@ -218,24 +236,34 @@ def invoke(ctx): ctx["var1"] = "overridden" ctx["var2"] = "overridden" - # Create an SKFunction with the invoke function as its delegate - function = SKFunction( - delegate_type=DelegateTypes.InSKContext, + # Create an KernelFunction with the invoke function as its delegate + function = KernelFunction( + delegate_type=DelegateTypes.InKernelContext, delegate_function=invoke, - skill_name="", + plugin_name="test", function_name="funcName", description="", parameters=[], is_semantic=False, ) - # Mock the skill collection's function retrieval - self.skills.has_function.return_value = True - self.skills.get_function.return_value = function + dkp = KernelPlugin(name="test", functions=[function]) + plugins = KernelPluginCollection() + plugins.add(dkp) + + # Create a context with the variables, memory, and plugin collection + context = KernelContext.model_construct( + variables=variables, + memory=NullMemory(), + plugins=plugins, + ) # Create a CodeBlock with the FunctionIdBlock and render it with the context - code_block = CodeBlock(tokens=[func_id], content="", log=self.log) - await code_block.render_code_async(context) + code_block = CodeBlock( + tokens=[func_id], + content="", + ) + await code_block.render_code(context) # Check that the canary values match the original context variables assert canary["input"] == "zero" @@ -257,14 +285,6 @@ async def test_it_invokes_function_with_custom_variable(self): variables = ContextVariables() variables[VAR_NAME] = VAR_VALUE - # Create a context with the variables, memory, skill collection, and logger - context = SKContext.construct( - variables=variables, - memory=NullMemory(), - skill_collection=self.skills, - logger=self.log, - ) - # Create a FunctionIdBlock with the function name and a # VarBlock with the custom variable func_id = FunctionIdBlock(content="funcName") @@ -278,25 +298,35 @@ def invoke(ctx): nonlocal canary canary = ctx["input"] - # Create an SKFunction with the invoke function as its delegate - function = SKFunction( - delegate_type=DelegateTypes.InSKContext, + # Create an KernelFunction with the invoke function as its delegate + function = KernelFunction( + delegate_type=DelegateTypes.InKernelContext, delegate_function=invoke, - skill_name="", + plugin_name="test", function_name="funcName", description="", parameters=[], is_semantic=False, ) - # Mock the skill collection's function retrieval - self.skills.has_function.return_value = True - self.skills.get_function.return_value = function + dkp = KernelPlugin(name="test", functions=[function]) + plugins = KernelPluginCollection() + plugins.add(dkp) + + # Create a context with the variables, memory, and plugin collection + context = KernelContext.model_construct( + variables=variables, + memory=NullMemory(), + plugins=plugins, + ) # Create a CodeBlock with the FunctionIdBlock and VarBlock, # and render it with the context - code_block = CodeBlock(tokens=[func_id, var_block], content="", log=self.log) - result = await code_block.render_code_async(context) + code_block = CodeBlock( + tokens=[func_id, var_block], + content="", + ) + result = await code_block.render_code(context) # Check that the result matches the custom variable value assert result == VAR_VALUE @@ -308,14 +338,6 @@ async def test_it_invokes_function_with_custom_value(self): # Define a value to be used in the test VALUE = "value" - # Create a context with empty variables, memory, skill collection, and logger - context = SKContext.construct( - variables=ContextVariables(), - memory=NullMemory(), - skill_collection=self.skills, - logger=self.log, - ) - # Create a FunctionIdBlock with the function name and a ValBlock with the value func_id = FunctionIdBlock(content="funcName") val_block = ValBlock(content=f"'{VALUE}'") @@ -328,25 +350,35 @@ def invoke(ctx): nonlocal canary canary = ctx["input"] - # Create an SKFunction with the invoke function as its delegate - function = SKFunction( - delegate_type=DelegateTypes.InSKContext, + # Create an KernelFunction with the invoke function as its delegate + function = KernelFunction( + delegate_type=DelegateTypes.InKernelContext, delegate_function=invoke, - skill_name="", + plugin_name="test", function_name="funcName", description="", parameters=[], is_semantic=False, ) - # Mock the skill collection's function retrieval - self.skills.has_function.return_value = True - self.skills.get_function.return_value = function + dkp = KernelPlugin(name="test", functions=[function]) + plugins = KernelPluginCollection() + plugins.add(dkp) + + # Create a context with empty variables, memory, and plugin collection + context = KernelContext.model_construct( + variables=ContextVariables(), + memory=NullMemory(), + plugins=plugins, + ) # Create a CodeBlock with the FunctionIdBlock and ValBlock, # and render it with the context - code_block = CodeBlock(tokens=[func_id, val_block], content="", log=self.log) - result = await code_block.render_code_async(context) + code_block = CodeBlock( + tokens=[func_id, val_block], + content="", + ) + result = await code_block.render_code(context) # Check that the result matches the value assert result == VALUE diff --git a/python/tests/unit/template_engine/blocks/test_function_id_block.py b/python/tests/unit/template_engine/blocks/test_function_id_block.py index 004ad5343a15..d519a2765788 100644 --- a/python/tests/unit/template_engine/blocks/test_function_id_block.py +++ b/python/tests/unit/template_engine/blocks/test_function_id_block.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger from pytest import mark, raises @@ -10,20 +9,17 @@ def test_init(): - function_id_block = FunctionIdBlock( - content="skill.function", log=Logger("test_logger") - ) - assert function_id_block.content == "skill.function" - assert isinstance(function_id_block.log, Logger) + function_id_block = FunctionIdBlock(content="plugin.function") + assert function_id_block.content == "plugin.function" def test_type_property(): - function_id_block = FunctionIdBlock(content="skill.function") + function_id_block = FunctionIdBlock(content="plugin.function") assert function_id_block.type == BlockTypes.FUNCTION_ID def test_is_valid(): - function_id_block = FunctionIdBlock(content="skill.function") + function_id_block = FunctionIdBlock(content="plugin.function") is_valid, error_msg = function_id_block.is_valid() assert is_valid assert error_msg == "" @@ -37,14 +33,14 @@ def test_is_valid_empty_identifier(): def test_render(): - function_id_block = FunctionIdBlock(content="skill.function") + function_id_block = FunctionIdBlock(content="plugin.function") rendered_value = function_id_block.render(ContextVariables()) - assert rendered_value == "skill.function" + assert rendered_value == "plugin.function" def test_init_value_error(): with raises(ValueError): - FunctionIdBlock(content="skill.nope.function") + FunctionIdBlock(content="plugin.nope.function") def test_it_trims_spaces(): @@ -109,10 +105,10 @@ def test_it_allows_underscore_dots_letters_and_digits(name, is_valid): def test_it_allows_only_one_dot(): target1 = FunctionIdBlock(content="functionName") - target2 = FunctionIdBlock(content="skillName.functionName") + target2 = FunctionIdBlock(content="pluginName.functionName") with raises(ValueError): - FunctionIdBlock(content="foo.skillName.functionName") + FunctionIdBlock(content="foo.pluginName.functionName") assert target1.is_valid() == (True, "") assert target2.is_valid() == (True, "") diff --git a/python/tests/unit/template_engine/blocks/test_text_block.py b/python/tests/unit/template_engine/blocks/test_text_block.py index aff0a97244c5..03cb3f37284f 100644 --- a/python/tests/unit/template_engine/blocks/test_text_block.py +++ b/python/tests/unit/template_engine/blocks/test_text_block.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger from pytest import raises @@ -10,53 +9,37 @@ def test_init(): - text_block = TextBlock.from_text(text="test text", log=Logger("test_logger")) + text_block = TextBlock.from_text(text="test text") assert text_block.content == "test text" - assert isinstance(text_block.log, Logger) def test_init_with_just_start_index(): - text_block = TextBlock.from_text( - text="test text", start_index=2, log=Logger("test_logger") - ) + text_block = TextBlock.from_text(text="test text", start_index=2) assert text_block.content == "st text" - assert isinstance(text_block.log, Logger) def test_init_with_just_stop_index(): - text_block = TextBlock.from_text( - text="test text", stop_index=2, log=Logger("test_logger") - ) + text_block = TextBlock.from_text(text="test text", stop_index=2) assert text_block.content == "te" - assert isinstance(text_block.log, Logger) def test_init_with_start_index_greater_than_stop_index(): with raises(ValueError): - TextBlock.from_text( - text="test text", start_index=2, stop_index=1, log=Logger("test_logger") - ) + TextBlock.from_text(text="test text", start_index=2, stop_index=1) def test_init_with_start_stop_indices(): - text_block = TextBlock.from_text( - text="test text", start_index=0, stop_index=4, log=Logger("test_logger") - ) + text_block = TextBlock.from_text(text="test text", start_index=0, stop_index=4) assert text_block.content == "test" - assert isinstance(text_block.log, Logger) def test_init_with_start_index_less_than_zero(): with raises(ValueError): - TextBlock.from_text( - text="test text", start_index=-1, stop_index=1, log=Logger("test_logger") - ) + TextBlock.from_text(text="test text", start_index=-1, stop_index=1) def test_init_with_negative_stop_index(): - text_block = TextBlock.from_text( - text="test text", stop_index=-1, log=Logger("test_logger") - ) + text_block = TextBlock.from_text(text="test text", stop_index=-1) assert text_block.content == "test tex" diff --git a/python/tests/unit/template_engine/blocks/test_val_block.py b/python/tests/unit/template_engine/blocks/test_val_block.py index 7ce2fae490ad..8013bbd66f9f 100644 --- a/python/tests/unit/template_engine/blocks/test_val_block.py +++ b/python/tests/unit/template_engine/blocks/test_val_block.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger from semantic_kernel.orchestration.context_variables import ContextVariables from semantic_kernel.template_engine.blocks.block_types import BlockTypes @@ -8,9 +7,8 @@ def test_init(): - val_block = ValBlock(content="'test value'", log=Logger("test_logger")) + val_block = ValBlock(content="'test value'") assert val_block.content == "'test value'" - assert isinstance(val_block.log, Logger) def test_type_property(): @@ -29,28 +27,21 @@ def test_is_valid_invalid_quotes(): val_block = ValBlock(content="'test value\"") is_valid, error_msg = val_block.is_valid() assert not is_valid - assert error_msg == ( - "A value must be defined using either single quotes " - "or double quotes, not both" - ) + assert error_msg == ("A value must be defined using either single quotes " "or double quotes, not both") def test_is_valid_no_quotes(): val_block = ValBlock(content="test value") is_valid, error_msg = val_block.is_valid() assert not is_valid - assert ( - error_msg == "A value must be wrapped in either single quotes or double quotes" - ) + assert error_msg == "A value must be wrapped in either single quotes or double quotes" def test_is_valid_wrong_quotes(): val_block = ValBlock(content="!test value!") is_valid, error_msg = val_block.is_valid() assert not is_valid - assert ( - error_msg == "A value must be wrapped in either single quotes or double quotes" - ) + assert error_msg == "A value must be wrapped in either single quotes or double quotes" def test_render(): diff --git a/python/tests/unit/template_engine/blocks/test_var_block.py b/python/tests/unit/template_engine/blocks/test_var_block.py index a08b63d2a719..840a18c5275c 100644 --- a/python/tests/unit/template_engine/blocks/test_var_block.py +++ b/python/tests/unit/template_engine/blocks/test_var_block.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -from logging import Logger from pytest import mark, raises @@ -11,9 +10,8 @@ def test_init(): - var_block = VarBlock(content="$test_var", log=Logger("test_logger")) + var_block = VarBlock(content="$test_var") assert var_block.content == "$test_var" - assert isinstance(var_block.log, Logger) def test_type_property(): diff --git a/python/tests/unit/template_engine/test_prompt_template_engine.py b/python/tests/unit/template_engine/test_prompt_template_engine.py index 5f5487f60bfe..f7ccd9ae5da8 100644 --- a/python/tests/unit/template_engine/test_prompt_template_engine.py +++ b/python/tests/unit/template_engine/test_prompt_template_engine.py @@ -6,15 +6,14 @@ from semantic_kernel.memory.null_memory import NullMemory from semantic_kernel.orchestration.context_variables import ContextVariables -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.skill_definition import sk_function -from semantic_kernel.skill_definition.read_only_skill_collection import ( - ReadOnlySkillCollection, +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition import kernel_function +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, ) from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.prompt_template_engine import PromptTemplateEngine -from semantic_kernel.utils.null_logger import NullLogger @fixture @@ -28,22 +27,17 @@ def variables(): @fixture -def skills(): - return Mock(spec=ReadOnlySkillCollection) +def plugins(): + return Mock(spec=KernelPluginCollection) @fixture -def context(variables, skills): - return SKContext(variables, NullMemory(), skills, NullLogger()) +def context(variables, plugins): + return KernelContext(variables=variables, memory=NullMemory(), plugins=plugins) -def test_it_renders_variables( - target: PromptTemplateEngine, variables: ContextVariables -): - template = ( - "{$x11} This {$a} is {$_a} a {{$x11}} test {{$x11}} " - "template {{foo}}{{bar $a}}{{baz $_a}}{{yay $x11}}" - ) +def test_it_renders_variables(target: PromptTemplateEngine, variables: ContextVariables): + template = "{$x11} This {$a} is {$_a} a {{$x11}} test {{$x11}} " "template {{foo}}{{bar $a}}{{baz $_a}}{{yay $x11}}" blocks = target.extract_blocks(template) updated_blocks = target.render_variables(blocks, variables) @@ -123,62 +117,62 @@ def test_it_renders_variables( @mark.asyncio -async def test_it_renders_code_using_input_async( +async def test_it_renders_code_using_input( target: PromptTemplateEngine, variables: ContextVariables, context_factory, ): - @sk_function(name="function") - def my_function_async(cx: SKContext) -> str: + @kernel_function(name="function") + def my_function(cx: KernelContext) -> str: return f"F({cx.variables.input})" - func = SKFunction.from_native_method(my_function_async) + func = KernelFunction.from_native_method(my_function, "test") assert func is not None variables.update("INPUT-BAR") template = "foo-{{function}}-baz" - result = await target.render_async(template, context_factory(variables, func)) + result = await target.render(template, context_factory(variables, func)) assert result == "foo-F(INPUT-BAR)-baz" @mark.asyncio -async def test_it_renders_code_using_variables_async( +async def test_it_renders_code_using_variables( target: PromptTemplateEngine, variables: ContextVariables, context_factory, ): - @sk_function(name="function") - def my_function_async(cx: SKContext) -> str: + @kernel_function(name="function") + def my_function(cx: KernelContext) -> str: return f"F({cx.variables.input})" - func = SKFunction.from_native_method(my_function_async) + func = KernelFunction.from_native_method(my_function, "test") assert func is not None variables.set("myVar", "BAR") template = "foo-{{function $myVar}}-baz" - result = await target.render_async(template, context_factory(variables, func)) + result = await target.render(template, context_factory(variables, func)) assert result == "foo-F(BAR)-baz" @mark.asyncio -async def test_it_renders_async_code_using_variables_async( +async def test_it_renders_code_using_variables_async( target: PromptTemplateEngine, variables: ContextVariables, context_factory, ): - @sk_function(name="function") - async def my_function_async(cx: SKContext) -> str: + @kernel_function(name="function") + async def my_function(cx: KernelContext) -> str: return cx.variables.input - func = SKFunction.from_native_method(my_function_async) + func = KernelFunction.from_native_method(my_function, "test") assert func is not None variables.set("myVar", "BAR") template = "foo-{{function $myVar}}-baz" - result = await target.render_async(template, context_factory(variables, func)) + result = await target.render(template, context_factory(variables, func)) assert result == "foo-BAR-baz" diff --git a/python/tests/unit/template_engine/test_template_tokenizer.py b/python/tests/unit/template_engine/test_template_tokenizer.py index 52d6c5205f68..de0d739de61e 100644 --- a/python/tests/unit/template_engine/test_template_tokenizer.py +++ b/python/tests/unit/template_engine/test_template_tokenizer.py @@ -180,10 +180,7 @@ def test_it_tokenizes_edge_cases_correctly_4(template): def test_it_tokenizes_a_typical_prompt(): target = TemplateTokenizer() - template = ( - "this is a {{ $prompt }} with {{$some}} variables " - "and {{function $calls}} {{ and 'values' }}" - ) + template = "this is a {{ $prompt }} with {{$some}} variables " "and {{function $calls}} {{ and 'values' }}" blocks = target.tokenize(template) diff --git a/python/tests/unit/test_kernel.py b/python/tests/unit/test_kernel.py new file mode 100644 index 000000000000..d641f5b0fedb --- /dev/null +++ b/python/tests/unit/test_kernel.py @@ -0,0 +1,195 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import AsyncMock, Mock + +import pytest + +from semantic_kernel import Kernel +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.kernel_plugin import KernelPlugin + + +def create_mock_function(name) -> KernelFunction: + function_view = FunctionView(name, "SummarizePlugin", "Summarize an input", [], True, True) + mock_function = Mock(spec=KernelFunction) + mock_function.describe.return_value = function_view + mock_function.name = function_view.name + mock_function.plugin_name = function_view.plugin_name + mock_function.description = function_view.description + + return mock_function + + +@pytest.mark.asyncio +@pytest.mark.parametrize("pipeline_count", [1, 2]) +async def test_run_async_handles_pre_invocation(pipeline_count): + # Arrange + kernel = Kernel() + + mock_function = create_mock_function("test_function") + mock_function.invoke = AsyncMock(side_effect=lambda input, context: context) + kernel.plugins.add(KernelPlugin(name="test", functions=[mock_function])) + + invoked = 0 + + def invoking_handler(sender, e): + nonlocal invoked + invoked += 1 + + kernel.add_function_invoking_handler(invoking_handler) + functions = [mock_function] * pipeline_count + + # Act + _ = await kernel.run(*functions) + + # Assert + assert invoked == pipeline_count + assert mock_function.invoke.call_count == pipeline_count + + +@pytest.mark.asyncio +async def test_run_async_pre_invocation_skip_dont_trigger_invoked_handler(): + # Arrange + kernel = Kernel() + + mock_function1 = create_mock_function(name="SkipMe") + mock_function1.invoke = AsyncMock(side_effect=lambda input, context: context) + mock_function2 = create_mock_function(name="DontSkipMe") + mock_function2.invoke = AsyncMock(side_effect=lambda input, context: context) + invoked = 0 + invoking = 0 + invoked_function_name = "" + + def invoking_handler(sender, e): + nonlocal invoking + invoking += 1 + if e.function_view.name == "SkipMe": + e.skip() + + def invoked_handler(sender, e): + nonlocal invoked_function_name, invoked + invoked_function_name = e.function_view.name + invoked += 1 + + kernel.add_function_invoking_handler(invoking_handler) + kernel.add_function_invoked_handler(invoked_handler) + + # Act + _ = await kernel.run(mock_function1, mock_function2) + + # Assert + assert invoking == 2 + assert invoked == 1 + assert invoked_function_name == "DontSkipMe" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("pipeline_count", [1, 2]) +async def test_run_async_handles_post_invocation(pipeline_count): + # Arrange + kernel = Kernel() + + mock_function = create_mock_function("test_function") + mock_function.invoke = AsyncMock(side_effect=lambda input, context: context) + invoked = 0 + + def invoked_handler(sender, e): + nonlocal invoked + invoked += 1 + + kernel.add_function_invoked_handler(invoked_handler) + functions = [mock_function] * pipeline_count + + # Act + _ = await kernel.run(*functions) + + # Assert + assert invoked == pipeline_count + mock_function.invoke.assert_called() + assert mock_function.invoke.call_count == pipeline_count + + +@pytest.mark.asyncio +async def test_run_async_post_invocation_repeat_is_working(): + # Arrange + kernel = Kernel() + + mock_function = create_mock_function(name="RepeatMe") + mock_function.invoke = AsyncMock(side_effect=lambda input, context: context) + + invoked = 0 + repeat_times = 0 + + def invoked_handler(sender, e): + nonlocal invoked, repeat_times + invoked += 1 + + if repeat_times < 3: + e.repeat() + repeat_times += 1 + + kernel.add_function_invoked_handler(invoked_handler) + + # Act + _ = await kernel.run(mock_function) + + # Assert + assert invoked == 4 + assert repeat_times == 3 + + +@pytest.mark.asyncio +async def test_run_async_change_variable_invoking_handler(): + # Arrange + kernel = Kernel() + + mock_function = create_mock_function("test_function") + mock_function.invoke = AsyncMock(side_effect=lambda input, context: context) + + original_input = "Importance" + new_input = "Problems" + + def invoking_handler(sender, e): + e.context.variables.update(new_input) + e.context.variables["new"] = new_input + + kernel.add_function_invoking_handler(invoking_handler) + + # Act + context = await kernel.run(mock_function, input_str=original_input) + + # Assert + assert context.result == new_input + assert context.variables.input == new_input + assert context.variables["new"] == new_input + + +@pytest.mark.asyncio +async def test_run_async_change_variable_invoked_handler(): + # Arrange + kernel = Kernel() + + mock_function = create_mock_function("test_function") + mock_function.invoke = AsyncMock(side_effect=lambda input, context: context) + + original_input = "Importance" + new_input = "Problems" + + def invoked_handler(sender, e): + e.context.variables.update(new_input) + e.context.variables["new"] = new_input + + kernel.add_function_invoked_handler(invoked_handler) + + # Act + context = await kernel.run(mock_function, input_str=original_input) + + # Assert + assert context.result == new_input + assert context.variables.input == new_input + assert context.variables["new"] == new_input + + +if __name__ == "__main__": + pytest.main(["-s", "-v", __file__]) diff --git a/python/tests/unit/test_serialization.py b/python/tests/unit/test_serialization.py index 53d2d760daaa..916455cf03cd 100644 --- a/python/tests/unit/test_serialization.py +++ b/python/tests/unit/test_serialization.py @@ -1,42 +1,35 @@ -import logging import typing as t -import pydantic as pdt import pytest import typing_extensions as te +from pydantic import Field, Json -from semantic_kernel import SKFunctionBase -from semantic_kernel.core_skills.conversation_summary_skill import ( - ConversationSummarySkill, +from semantic_kernel.core_plugins.conversation_summary_plugin import ( + ConversationSummaryPlugin, ) -from semantic_kernel.core_skills.file_io_skill import FileIOSkill -from semantic_kernel.core_skills.http_skill import HttpSkill -from semantic_kernel.core_skills.math_skill import MathSkill -from semantic_kernel.core_skills.text_memory_skill import TextMemorySkill -from semantic_kernel.core_skills.text_skill import TextSkill -from semantic_kernel.core_skills.time_skill import TimeSkill -from semantic_kernel.core_skills.wait_skill import WaitSkill -from semantic_kernel.core_skills.web_search_engine_skill import WebSearchEngineSkill +from semantic_kernel.core_plugins.file_io_plugin import FileIOPlugin +from semantic_kernel.core_plugins.http_plugin import HttpPlugin +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.text_memory_plugin import TextMemoryPlugin +from semantic_kernel.core_plugins.text_plugin import TextPlugin +from semantic_kernel.core_plugins.time_plugin import TimePlugin +from semantic_kernel.core_plugins.wait_plugin import WaitPlugin +from semantic_kernel.core_plugins.web_search_engine_plugin import WebSearchEnginePlugin +from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.memory.null_memory import NullMemory from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase from semantic_kernel.orchestration.context_variables import ContextVariables from semantic_kernel.orchestration.delegate_handlers import DelegateHandlers from semantic_kernel.orchestration.delegate_inference import DelegateInference -from semantic_kernel.orchestration.sk_context import SKContext -from semantic_kernel.orchestration.sk_function import SKFunction -from semantic_kernel.sk_pydantic import PydanticField, SKBaseModel -from semantic_kernel.skill_definition.function_view import FunctionView -from semantic_kernel.skill_definition.functions_view import FunctionsView -from semantic_kernel.skill_definition.parameter_view import ParameterView -from semantic_kernel.skill_definition.read_only_skill_collection import ( - ReadOnlySkillCollection, +from semantic_kernel.orchestration.kernel_context import KernelContext +from semantic_kernel.orchestration.kernel_function import KernelFunction +from semantic_kernel.plugin_definition.function_view import FunctionView +from semantic_kernel.plugin_definition.functions_view import FunctionsView +from semantic_kernel.plugin_definition.kernel_function_decorator import kernel_function +from semantic_kernel.plugin_definition.kernel_plugin_collection import ( + KernelPluginCollection, ) -from semantic_kernel.skill_definition.read_only_skill_collection_base import ( - ReadOnlySkillCollectionBase, -) -from semantic_kernel.skill_definition.sk_function_decorator import sk_function -from semantic_kernel.skill_definition.skill_collection import SkillCollection -from semantic_kernel.skill_definition.skill_collection_base import SkillCollectionBase +from semantic_kernel.plugin_definition.parameter_view import ParameterView from semantic_kernel.template_engine.blocks.block import Block from semantic_kernel.template_engine.blocks.block_types import BlockTypes from semantic_kernel.template_engine.blocks.code_block import CodeBlock @@ -53,24 +46,24 @@ from semantic_kernel.template_engine.protocols.text_renderer import TextRenderer from semantic_kernel.template_engine.template_tokenizer import TemplateTokenizer -PydanticFieldT = t.TypeVar("PydanticFieldT", bound=PydanticField) +KernelBaseModelFieldT = t.TypeVar("KernelBaseModelFieldT", bound=KernelBaseModel) class _Serializable(t.Protocol): """A serializable object.""" - def json(self) -> pdt.Json: + def json(self) -> Json: """Return a JSON representation of the object.""" raise NotImplementedError @classmethod - def parse_raw(cls: t.Type[te.Self], json: pdt.Json) -> te.Self: + def parse_raw(cls: t.Type[te.Self], json: Json) -> te.Self: """Return the constructed object from a JSON representation.""" raise NotImplementedError @pytest.fixture() -def sk_factory() -> t.Callable[[t.Type[_Serializable]], _Serializable]: +def kernel_factory() -> t.Callable[[t.Type[_Serializable]], _Serializable]: """Return a factory for various objects in semantic-kernel.""" def create_functions_view() -> FunctionsView: @@ -79,7 +72,7 @@ def create_functions_view() -> FunctionsView: result.add_function( FunctionView( name="function1", - skill_name="skill1", + plugin_name="plugin1", description="Native function", parameters=[], is_semantic=False, @@ -89,7 +82,7 @@ def create_functions_view() -> FunctionsView: result.add_function( FunctionView( name="function1", - skill_name="skill1", + plugin_name="plugin1", description="Semantic function", parameters=[], is_semantic=True, @@ -98,14 +91,14 @@ def create_functions_view() -> FunctionsView: ) return result - def create_sk_function() -> SKFunction: - """Return an SKFunction.""" + def create_kernel_function() -> KernelFunction: + """Return an KernelFunction.""" - @sk_function(name="function") - def my_function_async(cx: SKContext) -> str: + @kernel_function(name="function") + def my_function(cx: KernelContext) -> str: return f"F({cx.variables.input})" - return SKFunction.from_native_method(my_function_async) + return KernelFunction.from_native_method(my_function, "plugin") def create_context_variables() -> ContextVariables: """Return a context variables object.""" @@ -114,21 +107,21 @@ def create_context_variables() -> ContextVariables: variables={"foo": "bar"}, ) - def create_skill_collection() -> SkillCollection: - """Return a skill collection.""" - # TODO: Add a few skills to this collection. - return SkillCollection() + def create_plugin_collection() -> KernelPluginCollection: + """Return a plugin collection.""" + # TODO: Add a few plugins to this collection. + return KernelPluginCollection() cls_obj_map = { - Block: Block("foo"), - CodeBlock: CodeBlock("foo"), - FunctionIdBlock: FunctionIdBlock("bar"), - TextBlock: TextBlock("baz"), - ValBlock: ValBlock("qux"), - VarBlock: VarBlock("quux"), - CodeTokenizer: CodeTokenizer(log=logging.getLogger("test")), - PromptTemplateEngine: PromptTemplateEngine(logger=logging.getLogger("test")), - TemplateTokenizer: TemplateTokenizer(log=logging.getLogger("test")), + Block: Block(content="foo"), + CodeBlock: CodeBlock(content="foo"), + FunctionIdBlock: FunctionIdBlock(content="bar"), + TextBlock: TextBlock(content="baz"), + ValBlock: ValBlock(content="qux"), + VarBlock: VarBlock(content="quux"), + CodeTokenizer: CodeTokenizer(), + PromptTemplateEngine: PromptTemplateEngine(), + TemplateTokenizer: TemplateTokenizer(), ParameterView: ParameterView( name="foo", description="bar", @@ -145,18 +138,18 @@ def create_skill_collection() -> SkillCollection: False, ), FunctionsView: create_functions_view(), - ReadOnlySkillCollection: create_skill_collection().read_only_skill_collection, + KernelPluginCollection: create_plugin_collection(), DelegateHandlers: DelegateHandlers(), DelegateInference: DelegateInference(), ContextVariables: create_context_variables(), - SkillCollection: create_skill_collection(), - SKContext[NullMemory]: SKContext( + KernelContext[NullMemory]: KernelContext[NullMemory]( # TODO: Test serialization with different types of memories. - memory=NullMemory(), variables=create_context_variables(), - skill_collection=create_skill_collection().read_only_skill_collection, + memory=NullMemory(), + plugins=create_plugin_collection(), ), NullMemory: NullMemory(), + KernelFunction: create_kernel_function(), } def constructor(cls: t.Type[_Serializable]) -> _Serializable: @@ -167,32 +160,22 @@ def constructor(cls: t.Type[_Serializable]) -> _Serializable: PROTOCOLS = [ - pytest.param( - ConversationSummarySkill, marks=pytest.mark.xfail(reason="Contains data") - ), - FileIOSkill, - HttpSkill, - MathSkill, - TextMemorySkill, - TextSkill, - TimeSkill, - WaitSkill, - pytest.param(WebSearchEngineSkill, marks=pytest.mark.xfail(reason="Contains data")), + pytest.param(ConversationSummaryPlugin, marks=pytest.mark.xfail(reason="Contains data")), + FileIOPlugin, + HttpPlugin, + MathPlugin, + TextMemoryPlugin, + TextPlugin, + TimePlugin, + WaitPlugin, + pytest.param(WebSearchEnginePlugin, marks=pytest.mark.xfail(reason="Contains data")), CodeRenderer, PromptTemplatingEngine, TextRenderer, ] BASE_CLASSES = [ - ReadOnlySkillCollectionBase, - SkillCollectionBase, SemanticTextMemoryBase, - SKFunctionBase, -] - -# Classes that don't need serialization -UNSERIALIZED_CLASSES = [ - ReadOnlySkillCollection, ] STATELESS_CLASSES = [ @@ -219,12 +202,11 @@ def constructor(cls: t.Type[_Serializable]) -> _Serializable: ParameterView, FunctionView, FunctionsView, - ReadOnlySkillCollection, - SkillCollection, + KernelPluginCollection, ContextVariables, - SKContext[NullMemory], + KernelContext[NullMemory], pytest.param( - SKFunction, + KernelFunction, marks=pytest.mark.xfail(reason="Need to implement Pickle serialization."), ), ] @@ -232,54 +214,48 @@ def constructor(cls: t.Type[_Serializable]) -> _Serializable: class TestUsageInPydanticFields: @pytest.mark.parametrize( - "sk_type", - BASE_CLASSES - + PROTOCOLS - + ENUMS - + PYDANTIC_MODELS - + STATELESS_CLASSES - + UNSERIALIZED_CLASSES, + "kernel_type", + BASE_CLASSES + PROTOCOLS + ENUMS + PYDANTIC_MODELS + STATELESS_CLASSES, ) def test_usage_as_optional_field( self, - sk_type: t.Type[PydanticFieldT], + kernel_type: t.Type[KernelBaseModelFieldT], ) -> None: """Semantic Kernel objects should be valid Pydantic fields. Otherwise, they cannot be used in Pydantic models. """ - class TestModel(SKBaseModel): + class TestModel(KernelBaseModel): """A test model.""" - field: t.Optional[sk_type] = None + field: t.Optional[kernel_type] = None assert_serializable(TestModel(), TestModel) - @pytest.mark.parametrize("sk_type", PYDANTIC_MODELS + STATELESS_CLASSES) + @pytest.mark.parametrize("kernel_type", PYDANTIC_MODELS + STATELESS_CLASSES) def test_usage_as_required_field( self, - sk_factory: t.Callable[[t.Type[PydanticFieldT]], PydanticFieldT], - sk_type: t.Type[PydanticFieldT], + kernel_factory: t.Callable[[t.Type[KernelBaseModelFieldT]], KernelBaseModelFieldT], + kernel_type: t.Type[KernelBaseModelFieldT], ) -> None: """Semantic Kernel objects should be valid Pydantic fields. Otherwise, they cannot be used in Pydantic models. """ - class TestModel(SKBaseModel): + class TestModel(KernelBaseModel): """A test model.""" - field: sk_type = pdt.Field(default_factory=lambda: sk_factory(sk_type)) + field: kernel_type = Field(default_factory=lambda: kernel_factory(kernel_type)) assert_serializable(TestModel(), TestModel) - assert_serializable(TestModel(field=sk_factory(sk_type)), TestModel) + assert_serializable(TestModel(field=kernel_factory(kernel_type)), TestModel) def assert_serializable(obj: _Serializable, obj_type) -> None: - """Assert that an object is serializable.""" + """Assert that an object is serializable, uses both dump and dump_json methods.""" assert obj is not None - serialized = obj.json() + serialized = obj.model_dump_json() assert isinstance(serialized, str) - deserialized = obj_type.parse_raw(serialized) - assert deserialized == obj + assert obj_type.model_validate_json(serialized).model_dump() == obj.model_dump() diff --git a/python/tests/unit/text/test_function_extension.py b/python/tests/unit/text/test_function_extension.py index 0660c8ed4164..62148c3eaf3e 100644 --- a/python/tests/unit/text/test_function_extension.py +++ b/python/tests/unit/text/test_function_extension.py @@ -2,15 +2,13 @@ import semantic_kernel.connectors.ai.open_ai as sk_oai from semantic_kernel import Kernel -from semantic_kernel.text import aggregate_chunked_results_async +from semantic_kernel.text import aggregate_chunked_results @pytest.mark.asyncio async def test_aggregate_results(): kernel = Kernel() - kernel.add_text_completion_service( - "davinci-002", sk_oai.OpenAITextCompletion("text-davinci-002", "none", "none") - ) + kernel.add_text_completion_service("davinci-002", sk_oai.OpenAITextCompletion("text-davinci-002", "none", "none")) sk_prompt = """ {{$input}} How is that ? @@ -33,6 +31,6 @@ async def test_aggregate_results(): "Seriously, this is the end.", "We're finished. All set. Bye. Done", ] - context = await aggregate_chunked_results_async(func, chunked, context) + context = await aggregate_chunked_results(func, chunked, context) assert context.variables.input == "\n".join(chunked) diff --git a/python/tests/unit/text/test_text_chunker.py b/python/tests/unit/text/test_text_chunker.py index cea178a1bd5c..b910cb174125 100644 --- a/python/tests/unit/text/test_text_chunker.py +++ b/python/tests/unit/text/test_text_chunker.py @@ -146,8 +146,7 @@ def test_split_text_paragraph_evenly(): text = [ "This is a test of the emergency broadcast system. This is only a test.", "We repeat, this is only a test. A unit test.", - "A small note. And another. And once again. Seriously, this is the end. " - + "We're finished. All set. Bye.", + "A small note. And another. And once again. Seriously, this is the end. " + "We're finished. All set. Bye.", "Done.", ] @@ -168,8 +167,7 @@ def test_split_text_paragraph_evenly_2(): """Test split_paragraph() with evenly split input""" text = [ - "The gentle breeze rustled the autumn leaves on the tree branches. " - + "She smiled and walked away.", + "The gentle breeze rustled the autumn leaves on the tree branches. " + "She smiled and walked away.", "The sun set over the horizon peacefully, the beautiful star. Cats love boxes.", "That is something. Incredible news that is. " + "What a beautiful day to be alive. Seriously, this is the end. " @@ -187,8 +185,7 @@ def test_split_text_paragraph_evenly_2(): "The sun set over the horizon peacefully, the beautiful star.", f"Cats love boxes.{NEWLINE}That is something. Incredible news that is.", f"What a beautiful day to be alive.{NEWLINE}Seriously, this is the end.", - f"We're finished once of for all. All set. Ok.{NEWLINE}Done.{NEWLINE}" - + f"Or is it?{NEWLINE}Surprise!", + f"We're finished once of for all. All set. Ok.{NEWLINE}Done.{NEWLINE}" + f"Or is it?{NEWLINE}Surprise!", ] split = split_plaintext_paragraph(text, max_token_per_line) assert expected == split @@ -201,8 +198,7 @@ def test_split_paragraph_newline(): text = [ "This is a test of the emergency broadcast system\r\nThis is only a test", "We repeat this is only a test\nA unit test", - "A small note\nAnd another\r\nAnd once again\rSeriously this is the end\n" - + "We're finished\nAll set\nBye\n", + "A small note\nAnd another\r\nAnd once again\rSeriously this is the end\n" + "We're finished\nAll set\nBye\n", "Done", ] expected = [ @@ -224,8 +220,7 @@ def test_split_paragraph_punctuation(): text = [ "This is a test of the emergency broadcast system. This is only a test", "We repeat, this is only a test? A unit test", - "A small note! And another? And once again! Seriously, this is the end. " - + "We're finished. All set. Bye.", + "A small note! And another? And once again! Seriously, this is the end. " + "We're finished. All set. Bye.", "Done.", ] expected = [ @@ -248,8 +243,7 @@ def test_split_paragraph_semicolon(): text = [ "This is a test of the emergency broadcast system; This is only a test", "We repeat; this is only a test; A unit test", - "A small note; And another; And once again; Seriously, this is the end;" - + " We're finished; All set; Bye.", + "A small note; And another; And once again; Seriously, this is the end;" + " We're finished; All set; Bye.", "Done.", ] expected = [ @@ -271,8 +265,7 @@ def test_split_paragraph_colon(): text = [ "This is a test of the emergency broadcast system: This is only a test", "We repeat: this is only a test: A unit test", - "A small note: And another: And once again: Seriously, this is the end: " - + "We're finished: All set: Bye.", + "A small note: And another: And once again: Seriously, this is the end: " + "We're finished: All set: Bye.", "Done.", ] expected = [ @@ -294,8 +287,7 @@ def test_split_paragraph_commas(): text = [ "This is a test of the emergency broadcast system, This is only a test", "We repeat, this is only a test, A unit test", - "A small note, And another, And once again, Seriously this is the end, " - + "We're finished, All set, Bye.", + "A small note, And another, And once again, Seriously this is the end, " + "We're finished, All set, Bye.", "Done.", ] expected = [ @@ -317,8 +309,7 @@ def test_split_paragraph_closing_brackets(): text = [ "This is a test of the emergency broadcast system) This is only a test", "We repeat) this is only a test) A unit test", - "A small note] And another) And once again] Seriously this is the end} " - + "We're finished} All set} Bye.", + "A small note] And another) And once again] Seriously this is the end} " + "We're finished} All set} Bye.", "Done.", ] expected = [ @@ -326,8 +317,7 @@ def test_split_paragraph_closing_brackets(): "This is only a test", "We repeat) this is only a test) A unit test", "A small note] And another) And once again]", - "Seriously this is the end\u007d We're finished\u007d All set\u007d " - + f"Bye.{NEWLINE}Done.", + "Seriously this is the end\u007d We're finished\u007d All set\u007d " + f"Bye.{NEWLINE}Done.", ] max_token_per_line = 15 split = split_plaintext_paragraph(text, max_token_per_line) @@ -341,8 +331,7 @@ def test_split_paragraph_spaces(): text = [ "This is a test of the emergency broadcast system This is only a test", "We repeat this is only a test A unit test", - "A small note And another And once again Seriously this is the end We're " - + "finished All set Bye.", + "A small note And another And once again Seriously this is the end We're " + "finished All set Bye.", "Done.", ] expected = [ @@ -364,8 +353,7 @@ def test_split_paragraph_hyphens(): text = [ "This is a test of the emergency broadcast system-This is only a test", "We repeat-this is only a test-A unit test", - "A small note-And another-And once again-Seriously, this is the end-We're" - + " finished-All set-Bye.", + "A small note-And another-And once again-Seriously, this is the end-We're" + " finished-All set-Bye.", "Done.", ] expected = [ @@ -389,8 +377,7 @@ def test_split_paragraph_nodelimiters(): "Thisisonlyatest", "WerepeatthisisonlyatestAunittest", "AsmallnoteAndanotherAndonceagain", - "SeriouslythisistheendWe'refinishedAllsetByeDoneThisOneWillBeSplitToMeet" - + "TheLimit", + "SeriouslythisistheendWe'refinishedAllsetByeDoneThisOneWillBeSplitToMeet" + "TheLimit", ] expected = [ f"Thisisatestoftheemergencybroadcastsystem{NEWLINE}Thisisonlyatest", @@ -411,8 +398,7 @@ def test_split_md_on_dot(): text = [ "This is a test of the emergency broadcast\n system.This\n is only a test", "We repeat. this is only a test. A unit test", - "A small note. And another. And once again. Seriously, this is the end. " - + "We're finished. All set. Bye.", + "A small note. And another. And once again. Seriously, this is the end. " + "We're finished. All set. Bye.", "Done.", ] expected = [ @@ -434,8 +420,7 @@ def test_split_md_on_colon(): text = [ "This is a test of the emergency broadcast system: This is only a test", "We repeat: this is only a test: A unit test", - "A small note: And another: And once again: Seriously, this is the end: " - + "We're finished: All set: Bye.", + "A small note: And another: And once again: Seriously, this is the end: " + "We're finished: All set: Bye.", "Done.", ] expected = [ @@ -457,8 +442,7 @@ def test_split_md_on_punctuation(): text = [ "This is a test of the emergency broadcast\n system?This\n is only a test", "We repeat? this is only a test! A unit test", - "A small note? And another! And once again? Seriously, this is the end! " - + "We're finished! All set! Bye.", + "A small note? And another! And once again? Seriously, this is the end! " + "We're finished! All set! Bye.", "Done.", ] expected = [ @@ -480,8 +464,7 @@ def test_split_md_on_semicolon(): text = [ "This is a test of the emergency broadcast system; This is only a test", "We repeat; this is only a test; A unit test", - "A small note; And another; And once again; Seriously, this is the end; " - + "We're finished; All set; Bye.", + "A small note; And another; And once again; Seriously, this is the end; " + "We're finished; All set; Bye.", "Done.", ] expected = [ @@ -503,8 +486,7 @@ def test_split_md_on_commas(): test = [ "This is a test of the emergency broadcast system, This is only a test", "We repeat, this is only a test, A unit test", - "A small note, And another, And once again, Seriously, this is the end, " - + "We're finished, All set, Bye.", + "A small note, And another, And once again, Seriously, this is the end, " + "We're finished, All set, Bye.", "Done.", ] expected = [ @@ -526,8 +508,7 @@ def test_split_md_on_brackets(): test = [ "This is a test of the emergency broadcast system) This is only a test.", "We repeat [this is only a test] A unit test", - "A small note (And another) And once (again) Seriously, this is the end " - + "We're finished (All set) Bye.", + "A small note (And another) And once (again) Seriously, this is the end " + "We're finished (All set) Bye.", "Done.", ] expected = [ @@ -549,8 +530,7 @@ def test_split_md_on_spaces(): test = [ "This is a test of the emergency broadcast system This is only a test", "We repeat this is only a test A unit test", - "A small note And another And once again Seriously this is the end We're " - + "finished All set Bye.", + "A small note And another And once again Seriously this is the end We're " + "finished All set Bye.", "Done.", ] expected = [ @@ -569,8 +549,7 @@ def test_split_md_on_newlines(): test = [ "This_is_a_test_of_the_emergency_broadcast_system\r\nThis_is_only_a_test", "We_repeat_this_is_only_a_test\nA_unit_test", - "A_small_note\nAnd_another\r\nAnd_once_again\rSeriously_this_is_the_end\n" - + "We're_finished\nAll_set\nBye\n", + "A_small_note\nAnd_another\r\nAnd_once_again\rSeriously_this_is_the_end\n" + "We're_finished\nAll_set\nBye\n", "Done", ] expected = [ diff --git a/samples/apps/.eslintrc.js b/samples/apps/.eslintrc.js deleted file mode 100644 index 3755ebea6fee..000000000000 --- a/samples/apps/.eslintrc.js +++ /dev/null @@ -1,37 +0,0 @@ -module.exports = { - env: { - es2021: true, - }, - extends: [ - 'eslint:recommended', - 'plugin:react/recommended', - 'plugin:react-hooks/recommended', - 'plugin:@typescript-eslint/recommended', - 'plugin:@typescript-eslint/recommended-requiring-type-checking', - 'plugin:@typescript-eslint/strict', - ], - ignorePatterns: ['build', '.*.js', 'node_modules'], - parserOptions: { - project: './tsconfig.json', - ecmaVersion: 'latest', - sourceType: 'module', - }, - rules: { - '@typescript-eslint/array-type': ['error', { default: 'array-simple' }], - '@typescript-eslint/triple-slash-reference': ['error', { types: 'prefer-import' }], - '@typescript-eslint/non-nullable-type-assertion-style': 'off', - '@typescript-eslint/strict-boolean-expressions': 'off', - '@typescript-eslint/explicit-function-return-type': 'off', - '@typescript-eslint/consistent-type-imports': 'off', - '@typescript-eslint/no-empty-function': 'off', - '@typescript-eslint/no-explicit-any': 'off', - 'react/react-in-jsx-scope': 'off', - 'react/prop-types': 'off', - 'react/jsx-props-no-spreading': 'off', - }, - settings: { - react: { - version: 'detect', - }, - }, -}; diff --git a/samples/apps/README.md b/samples/apps/README.md deleted file mode 100644 index fd8675a27ca4..000000000000 --- a/samples/apps/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Sample Applications -This folder contains a set of end-to-end applications leveraging various features of the Semantic Kernel orchestrator. \ No newline at end of file diff --git a/samples/apps/auth-api-webapp-react/.env.example b/samples/apps/auth-api-webapp-react/.env.example deleted file mode 100644 index dc1fffca82e0..000000000000 --- a/samples/apps/auth-api-webapp-react/.env.example +++ /dev/null @@ -1,8 +0,0 @@ -REACT_APP_GRAPH_CLIENT_ID= -REACT_APP_GRAPH_SCOPES=User.Read,Files.ReadWrite,Tasks.ReadWrite,Mail.Send -REACT_APP_FUNCTION_URI=http://localhost:7071 -REACT_APP_OPEN_AI_KEY= -REACT_APP_OPEN_AI_MODEL= -REACT_APP_AZURE_OPEN_AI_KEY= -REACT_APP_AZURE_OPEN_AI_DEPLOYMENT= -REACT_APP_AZURE_OPEN_AI_ENDPOINT= diff --git a/samples/apps/auth-api-webapp-react/.env.local b/samples/apps/auth-api-webapp-react/.env.local deleted file mode 100644 index 146010753578..000000000000 --- a/samples/apps/auth-api-webapp-react/.env.local +++ /dev/null @@ -1,2 +0,0 @@ -# Disable ESLint in all environments -DISABLE_ESLINT_PLUGIN=true \ No newline at end of file diff --git a/samples/apps/auth-api-webapp-react/README.md b/samples/apps/auth-api-webapp-react/README.md deleted file mode 100644 index 8e3f546388dd..000000000000 --- a/samples/apps/auth-api-webapp-react/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Authenticated API’s Sample Learning App - -> [!IMPORTANT] -> This sample will be removed in a future release. If you are looking for samples that demonstrate -> how to use Semantic Kernel, please refer to the sample folders in the root [python](../../../python/samples/) -> and [dotnet](../../../dotnet/samples/) folders. - -> [!IMPORTANT] -> This learning sample is for educational purposes only and should not be used in any production -> use case. It is intended to highlight concepts of Semantic Kernel and not any -> architectural / security design practices to be used. - -### Watch the Authenticated API’s Sample Quick Start [Video](https://aka.ms/SK-Samples-AuthAPI-Video) - -## Running the sample - -1. You will need an [OpenAI Key](https://openai.com/product/) or - [Azure OpenAI Service key](https://learn.microsoft.com/azure/cognitive-services/openai/quickstart) - for this sample -2. Ensure the KernelHttpServer sample is already running at `http://localhost:7071`. If not, follow the steps - to start it [here](../../dotnet/KernelHttpServer/README.md). -3. You will also need to - [register your application](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app) - in the Azure Portal. Follow the steps to register your app - [here](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app). - - Select **`Single-page application (SPA)`** as platform type, and the Redirect URI will be **`http://localhost:3000`** - - Select **`Personal Microsoft accounts only`** as supported account types for this sample -4. Copy **[.env.example](.env.example)** into a new file with name "**.env**". - > **Note**: Samples are configured to use chat completion AI models (e.g., gpt-3.5-turbo, gpt-4, etc.). See https://platform.openai.com/docs/models/model-endpoint-compatibility for chat completion model options. -5. Once registered, copy the **Application (client) ID** from the Azure Portal and paste - the GUID into the **.env** file next to `REACT_APP_GRAPH_CLIENT_ID=` (first line of the .env file). -6. **Run** the following command `yarn install` (if you have never run the sample before) - and/or `yarn start` from the command line. -7. A browser will automatically open, otherwise you can navigate to `http://localhost:3000` to use the sample. - -> Working with Secrets: [KernelHttpServer's Readme](../../dotnet/KernelHttpServer/README.md#Working-with-Secrets) has a note on safely working with keys and other secrets. - -## About the Authenticated API’s Sample - -The Authenticated API’s sample allows you to use authentication to connect to the -Microsoft Graph using your personal account. - -If you don’t have a Microsoft account or do not want to connect to it, -you can review the code to see the patterns needed to call out to APIs. - -The sample highlights connecting to Microsoft Graph and calling APIs for Outlook, OneDrive, and ToDo. -Each function will call Microsoft Graph and/or Open AI to perform the tasks. - -> [!CAUTION] -> Each function will call Open AI which will use tokens that you will be billed for. - -## Troubleshooting - -### unauthorized_client: The client does not exist or is not enabled for consumers. - -1. Ensure in your Application Manifest that **`Personal Microsoft accounts`** are allowed to sign in. - - - `"signInAudience": "PersonalMicrosoftAccount"` or - - `"signInAudience": "AzureADandPersonalMicrosoftAccount"` - -2. If you are not able to change the manifest, create a new Application following the instructions in [Running the sample](#running-the-sample), step 3. diff --git a/samples/apps/auth-api-webapp-react/package.json b/samples/apps/auth-api-webapp-react/package.json deleted file mode 100644 index cdfe0bbe357c..000000000000 --- a/samples/apps/auth-api-webapp-react/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "starter-identity-webapp-react", - "version": "0.1.0", - "private": true, - "dependencies": { - "@azure/msal-browser": "^2.33.0", - "@azure/msal-react": "^1.5.3", - "@fluentui/react-components": "^9.15.6", - "msal": "^1.4.17", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "web-vitals": "^3.1.1" - }, - "devDependencies": { - "@testing-library/jest-dom": "^5.14.1", - "@testing-library/react": "^14.0.0", - "@testing-library/user-event": "^14.4.3", - "@types/jest": "^29.4.0", - "@types/node": "^18.14.0", - "@types/react": "^18.0.28", - "@types/react-dom": "^18.0.11", - "eslint": "^8.42.0", - "react-scripts": "5.0.1", - "typescript": "^4.9.5" - }, - "scripts": { - "lint": "eslint src", - "start": "react-scripts start", - "build": "react-scripts build", - "test": "react-scripts test", - "eject": "react-scripts eject" - }, - "browserslist": { - "production": [ - ">0.2%", - "not dead", - "not op_mini all" - ], - "development": [ - "last 1 chrome version", - "last 1 firefox version", - "last 1 safari version" - ] - } -} diff --git a/samples/apps/auth-api-webapp-react/public/favicon.ico b/samples/apps/auth-api-webapp-react/public/favicon.ico deleted file mode 100644 index bfe873eb228f..000000000000 Binary files a/samples/apps/auth-api-webapp-react/public/favicon.ico and /dev/null differ diff --git a/samples/apps/auth-api-webapp-react/public/index.html b/samples/apps/auth-api-webapp-react/public/index.html deleted file mode 100644 index ba8b8c439363..000000000000 --- a/samples/apps/auth-api-webapp-react/public/index.html +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - Authentication and API's App - - - - -
- - - - \ No newline at end of file diff --git a/samples/apps/auth-api-webapp-react/src/App.css b/samples/apps/auth-api-webapp-react/src/App.css deleted file mode 100644 index 2bd4b20d460d..000000000000 --- a/samples/apps/auth-api-webapp-react/src/App.css +++ /dev/null @@ -1,45 +0,0 @@ -body { - padding: 0px; - margin: 0px; -} - -#container { - display: flex; - flex-direction: column; - align-items: stretch; - justify-content: space-between; -} - -#header { - background-color: #9c2153; - width: 100%; - height: 40px; - color: #FFF; - display: flex; -} - -#header h1 { - padding-left: 20px; - align-items: center; - display: flex; -} - -#content { - display: flex; - align-items: stretch; - flex-direction: row; - padding-top: 12px; - gap: 80px; -} - -#main { - display: flex; - align-items: stretch; - flex-direction: column; - gap: 10px; -} - -#tipbar { - background-color: #FAF9F8; - width: 360px; -} \ No newline at end of file diff --git a/samples/apps/auth-api-webapp-react/src/App.tsx b/samples/apps/auth-api-webapp-react/src/App.tsx deleted file mode 100644 index ca4d0d67d7aa..000000000000 --- a/samples/apps/auth-api-webapp-react/src/App.tsx +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -import { AuthenticatedTemplate, useAccount, useIsAuthenticated, useMsal } from '@azure/msal-react'; -import { Subtitle1, Tab, TabList } from '@fluentui/react-components'; -import { FC, useEffect, useState } from 'react'; -import FunctionProbe from './components/FunctionProbe'; -import InteractWithGraph from './components/InteractWithGraph'; -import QuickTips, { ITipGroup } from './components/QuickTips'; -import ServiceConfig from './components/ServiceConfig'; -import YourInfo from './components/YourInfo'; -import { IKeyConfig } from './model/KeyConfig'; - -const App: FC = () => { - enum AppState { - ProbeForFunction = 0, - YourInfo = 1, - Setup = 2, - InteractWithGraph = 3, - } - - const isAuthenticated = useIsAuthenticated(); - const { instance, accounts } = useMsal(); - const account = useAccount(accounts[0] || {}); - const [appState, setAppState] = useState(AppState.ProbeForFunction); - const [selectedTabValue, setSelectedTabValue] = useState(isAuthenticated ? 'setup' : 'yourinfo'); - const [config, setConfig] = useState(); - - const appStateToTabValueMap = new Map([ - [AppState.Setup, 'setup'], - [AppState.InteractWithGraph, 'interact'], - [AppState.YourInfo, 'yourinfo'], - ]); - const tabValueToAppStateMap = new Map([ - ['setup', AppState.Setup], - ['yourinfo', AppState.YourInfo], - ['interact', AppState.InteractWithGraph], - ]); - - useEffect(() => { - changeAppState(appState); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [appState]); - - useEffect(() => { - if (isAuthenticated) { - setAppState(AppState.Setup); - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [isAuthenticated]); - - const changeAppState = function (newAppState: AppState) { - setAppState(newAppState); - setSelectedTabValue(appStateToTabValueMap.get(newAppState) ?? 'setup'); - }; - const changeTabValue = function (newTabValue: string) { - setSelectedTabValue(newTabValue); - setAppState(tabValueToAppStateMap.get(newTabValue) ?? AppState.Setup); - }; - - useEffect(() => { - const fetchAsync = async () => { - if (config === undefined || config === null) { - return; - } - - var result = await instance.acquireTokenSilent({ - account: account !== null ? account : undefined, - scopes: (process.env.REACT_APP_GRAPH_SCOPES as string).split(','), - forceRefresh: false, - }); - - config.graphToken = result.accessToken; - setConfig(config); - }; - - fetchAsync(); - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [config]); - - const tips: ITipGroup[] = [ - { - header: 'Useful Resources', - items: [ - { - title: 'Read Documentation', - uri: 'https://aka.ms/SKDoc-Auth-API', - }, - ], - }, - { - header: 'Functions used in this sample', - items: [ - { - title: 'Summarize', - uri: 'https://github.com/microsoft/semantic-kernel/tree/main/samples/skills/SummarizeSkill/Summarize', - }, - { - title: 'AppendTextAsync', - uri: 'https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Skills/Skills.Document/DocumentSkill.cs#L86', - }, - { - title: 'UploadFileAsync', - uri: 'https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Skills/Skills.MsGraph/CloudDriveSkill.cs#L61', - }, - { - title: 'CreateLinkAsync', - uri: 'https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Skills/Skills.MsGraph/CloudDriveSkill.cs#L88', - }, - { - title: 'GetMyEmailAddressAsync', - uri: 'https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Skills/Skills.MsGraph/EmailSkill.cs#L55', - }, - { - title: 'SendEmailAsync', - uri: 'https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Skills/Skills.MsGraph/EmailSkill.cs#L65', - }, - { - title: 'AddTaskAsync', - uri: 'https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Skills/Skills.MsGraph/TaskListSkill.cs#L71', - }, - ], - }, - { - header: 'Local SK URL', - items: [ - { - title: process.env.REACT_APP_FUNCTION_URI as string, - uri: process.env.REACT_APP_FUNCTION_URI as string, - }, - ], - }, - ]; - - return ( -
- - - {appState === AppState.ProbeForFunction ? ( - setAppState(isAuthenticated ? AppState.Setup : AppState.YourInfo)} - /> - ) : null} -
-
- {appState === AppState.ProbeForFunction ? null : ( - changeTabValue(data.value as string)} - > - Your Info - - Setup - - - Interact - - - )} -
- {appState === AppState.YourInfo ? : null} - - {appState === AppState.Setup ? ( - { - setConfig(config); - setAppState(AppState.InteractWithGraph); - }} - /> - ) : null} - - {appState === AppState.InteractWithGraph ? ( - - { - changeAppState(appState - 1); - }} - /> - - ) : null} -
-
- {appState === AppState.ProbeForFunction ? null : ( -
- -
- )} -
-
- ); -}; - -export default App; diff --git a/samples/apps/auth-api-webapp-react/src/components/FunctionProbe.tsx b/samples/apps/auth-api-webapp-react/src/components/FunctionProbe.tsx deleted file mode 100644 index b2d7b635c51f..000000000000 --- a/samples/apps/auth-api-webapp-react/src/components/FunctionProbe.tsx +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -import { Body1, Spinner, Title3 } from '@fluentui/react-components'; -import { FC, useEffect } from 'react'; - -interface IData { - uri: string; - onFunctionFound: () => void; -} - -const FunctionProbe: FC = ({ uri, onFunctionFound }) => { - useEffect(() => { - const fetchAsync = async () => { - try { - var result = await fetch(`${uri}/api/ping`); - - if (result.ok) { - onFunctionFound(); - } - } catch {} - }; - - fetchAsync(); - }); - - return ( -
- Looking for your function - - - This sample expects to find the Azure Function from samples/dotnet/KernelHttpServer{' '} - running at {uri} - - - Run your Azure Function locally using{' '} - - Visual Studio - - ,{' '} - - Visual Studio Code - {' '} - or from the command line using the{' '} - - Azure Functions Core Tools - - -
- ); -}; - -export default FunctionProbe; diff --git a/samples/apps/auth-api-webapp-react/src/components/InteractWithGraph.tsx b/samples/apps/auth-api-webapp-react/src/components/InteractWithGraph.tsx deleted file mode 100644 index 9f0d7255d890..000000000000 --- a/samples/apps/auth-api-webapp-react/src/components/InteractWithGraph.tsx +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -import { Body1, Button, Image, Textarea, Title3 } from '@fluentui/react-components'; -import React, { FC } from 'react'; -import wordLogo from '../../src/word.png'; -import { useSemanticKernel } from '../hooks/useSemanticKernel'; -import { IKeyConfig } from '../model/KeyConfig'; -import InteractionButton from './InteractionButton'; - -interface IData { - uri: string; - config: IKeyConfig; - onBack: () => void; -} - -const InteractWithGraph: FC = ({ uri, config, onBack }) => { - const sk = useSemanticKernel(uri); - const defaultText = `A glacier is a persistent body of dense ice that is constantly moving under its own weight. A glacier forms where the accumulation of snow exceeds its ablation over many years, often centuries. It acquires distinguishing features, such as crevasses and seracs, as it slowly flows and deforms under stresses induced by its weight. As it moves, it abrades rock and debris from its substrate to create landforms such as cirques, moraines, or fjords. Although a glacier may flow into a body of water, it forms only on land and is distinct from the much thinner sea ice and lake ice that form on the surface of bodies of water.`; - const filename = 'AuthenticationSampleSummary.docx'; - const path = '%temp%\\' + filename; - const destinationPath = '/' + filename; - - const [text, setText] = React.useState(defaultText); - - const runTask1 = async () => { - try { - //get summary - var summary = await sk.invokeAsync(config, { value: text }, 'summarizeskill', 'summarize'); - - //write document - await sk.invokeAsync( - config, - { - value: summary.value, - inputs: [{ key: 'filePath', value: path }], - }, - 'documentskill', - 'appendtext', - ); - - //upload to onedrive - await sk.invokeAsync( - config, - { - value: path, - inputs: [{ key: 'destinationPath', value: destinationPath }], - }, - 'clouddriveskill', - 'uploadfile', - ); - } catch (e) { - alert('Something went wrong.\n\nDetails:\n' + e); - } - }; - - const runTask2 = async () => { - try { - var shareLink = await sk.invokeAsync( - config, - { value: destinationPath }, - 'clouddriveskill', - 'createlink', - ); - var myEmail = await sk.invokeAsync(config, { value: '' }, 'emailskill', 'getmyemailaddress'); - - await sk.invokeAsync( - config, - { - value: `Here's the link: ${shareLink.value}\n\nReminder: Please delete the document on your OneDrive after you finish with this sample app.`, - inputs: [ - { - key: 'recipients', - value: myEmail.value, - }, - { - key: 'subject', - value: 'Semantic Kernel Authentication Sample Project Document Link', - }, - ], - }, - 'emailskill', - 'sendemail', - ); - } catch (e) { - alert('Something went wrong.\n\nDetails:\n' + e); - } - }; - - const runTask3 = async () => { - try { - var reminderDate = new Date(); - reminderDate.setDate(reminderDate.getDate() + 3); - - await sk.invokeAsync( - config, - { - value: 'Remind me to follow up re the authentication sample email', - inputs: [ - { - key: 'reminder', - value: reminderDate.toISOString(), - }, - ], - }, - 'tasklistskill', - 'addtask', - ); - } catch (e) { - alert('Something went wrong.\n\nDetails:\n' + e); - } - }; - - return ( -
- Interact with data and services - - You can interact with data and Microsoft services for your account. Ask questions about your data or ask - for help to complete a task. - - -
-
- - Sample Doc: {filename} -
- -